diff --git a/.ci/init.gradle b/.ci/init.gradle index 97b92da3f0c..e5c71eb5881 100644 --- a/.ci/init.gradle +++ b/.ci/init.gradle @@ -70,6 +70,11 @@ projectsLoaded { maven configCache() } } + rootProject { + project.pluginManager.withPlugin('com.gradle.build-scan') { + buildScan.server = 'https://gradle-enterprise.elastic.co' + } + } } final String buildCacheUrl = System.getProperty('org.elasticsearch.build.cache.url') diff --git a/Vagrantfile b/Vagrantfile index 14f6ad00f3a..ed572aba8a8 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -41,6 +41,16 @@ Vagrant.configure(2) do |config| # the elasticsearch project called vagrant.... config.vm.synced_folder '.', '/vagrant', disabled: true config.vm.synced_folder '.', '/elasticsearch' + # TODO: make these syncs work for windows!!! + config.vm.synced_folder "#{Dir.home}/.vagrant/gradle/caches/jars-3", "/root/.gradle/caches/jars-3", + create: true, + owner: "vagrant" + config.vm.synced_folder "#{Dir.home}/.vagrant/gradle/caches/modules-2", "/root/.gradle/caches/modules-2", + create: true, + owner: "vagrant" + config.vm.synced_folder "#{Dir.home}/.gradle/wrapper", "/root/.gradle/wrapper", + create: true, + owner: "vagrant" # Expose project directory. Note that VAGRANT_CWD may not be the same as Dir.pwd PROJECT_DIR = ENV['VAGRANT_PROJECT_DIR'] || Dir.pwd @@ -380,10 +390,6 @@ export ZIP=/elasticsearch/distribution/zip/build/distributions export TAR=/elasticsearch/distribution/tar/build/distributions export RPM=/elasticsearch/distribution/rpm/build/distributions export DEB=/elasticsearch/distribution/deb/build/distributions -export BATS=/project/build/bats -export BATS_UTILS=/project/build/packaging/bats/utils -export BATS_TESTS=/project/build/packaging/bats/tests -export PACKAGING_ARCHIVES=/project/build/packaging/archives export PACKAGING_TESTS=/project/build/packaging/tests VARS cat \<\ /etc/sudoers.d/elasticsearch_vars @@ -391,11 +397,10 @@ Defaults env_keep += "ZIP" Defaults env_keep += "TAR" Defaults env_keep += "RPM" Defaults env_keep += "DEB" -Defaults env_keep += "BATS" -Defaults env_keep += "BATS_UTILS" -Defaults env_keep += "BATS_TESTS" Defaults env_keep += "PACKAGING_ARCHIVES" Defaults env_keep += "PACKAGING_TESTS" +Defaults env_keep += "BATS_UTILS" +Defaults env_keep += "BATS_TESTS" Defaults env_keep += "JAVA_HOME" Defaults env_keep += "SYSTEM_JAVA_HOME" SUDOERS_VARS diff --git a/build.gradle b/build.gradle index ba4f241080f..85e502e0154 100644 --- a/build.gradle +++ b/build.gradle @@ -24,12 +24,15 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.BwcVersions import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.plugin.PluginBuildPlugin +import org.elasticsearch.gradle.tool.Boilerplate import org.gradle.util.GradleVersion import org.gradle.util.DistributionLocator import org.gradle.plugins.ide.eclipse.model.SourceFolder +import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure + plugins { - id 'com.gradle.build-scan' version '2.3' + id 'com.gradle.build-scan' version '2.4' id 'base' id 'elasticsearch.global-build-info' } @@ -212,7 +215,7 @@ task branchConsistency { allprojects { // ignore missing javadocs - tasks.withType(Javadoc) { Javadoc javadoc -> + tasks.withType(Javadoc).configureEach { Javadoc javadoc -> // the -quiet here is because of a bug in gradle, in that adding a string option // by itself is not added to the options. By adding quiet, both this option and // the "value" -quiet is added, separated by a space. This is ok since the javadoc @@ -329,13 +332,9 @@ allprojects { } } - task cleanIdeaBuildDir(type: Delete) { - delete 'build-idea' + tasks.named('cleanIdea') { + delete 'build-idea' } - cleanIdeaBuildDir.setGroup("ide") - cleanIdeaBuildDir.setDescription("Deletes the IDEA build directory.") - - tasks.cleanIdea.dependsOn(cleanIdeaBuildDir) } idea { @@ -390,29 +389,20 @@ allprojects { String lineSeparator = Os.isFamily(Os.FAMILY_WINDOWS) ? '\\\\r\\\\n' : '\\\\n' String licenseHeader = licenseHeaderFile.getText('UTF-8').replace(System.lineSeparator(), lineSeparator) - task copyEclipseSettings(type: Copy) { + tasks.register('copyEclipseSettings', Copy) { + mustRunAfter 'wipeEclipseSettings' // TODO: "package this up" for external builds from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings') into '.settings' filter{ it.replaceAll('@@LICENSE_HEADER_TEXT@@', licenseHeader)} } // otherwise .settings is not nuked entirely - task wipeEclipseSettings(type: Delete) { + tasks.register('wipeEclipseSettings', Delete) { delete '.settings' } - tasks.cleanEclipse.dependsOn(wipeEclipseSettings) + tasks.named('cleanEclipse') { dependsOn 'wipeEclipseSettings' } // otherwise the eclipse merging is *super confusing* - tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings) - - // work arround https://github.com/gradle/gradle/issues/6582 - tasks.eclipseProject.mustRunAfter tasks.cleanEclipseProject - tasks.matching { it.name == 'eclipseClasspath' }.all { - it.mustRunAfter { tasks.cleanEclipseClasspath } - } - tasks.matching { it.name == 'eclipseJdt' }.all { - it.mustRunAfter { tasks.cleanEclipseJdt } - } - tasks.copyEclipseSettings.mustRunAfter tasks.wipeEclipseSettings + tasks.named('eclipse') { dependsOn 'cleanEclipse', 'copyEclipseSettings' } } allprojects { @@ -477,13 +467,11 @@ gradle.projectsEvaluated { * need to publish artifacts for them. */ if (project.name.equals('qa') || project.path.contains(':qa:')) { - Task assemble = project.tasks.findByName('assemble') - if (assemble) { - assemble.enabled = false + maybeConfigure(project.tasks, 'assemble') { + it.enabled = false } - Task dependenciesInfo = project.tasks.findByName('dependenciesInfo') - if (dependenciesInfo) { - dependenciesInfo.enabled = false + maybeConfigure(project.tasks, 'dependenciesInfo') { + it.enabled = false } } } @@ -505,7 +493,7 @@ gradle.projectsEvaluated { } allprojects { - task resolveAllDependencies { + tasks.register('resolveAllDependencies') { dependsOn tasks.matching { it.name == "pullFixture"} doLast { configurations.findAll { it.isCanBeResolved() }.each { it.resolve() } @@ -535,13 +523,13 @@ allprojects { } } - task checkPart1 - task checkPart2 - tasks.matching { it.name == "check" }.all { check -> - if (check.path.startsWith(":x-pack:")) { - checkPart2.dependsOn check - } else { - checkPart1.dependsOn check - } - } + def checkPart1 = tasks.register('checkPart1') + def checkPart2 = tasks.register('checkPart2') + plugins.withId('lifecycle-base') { + if (project.path.startsWith(":x-pack:")) { + checkPart1.configure { dependsOn 'check' } + } else { + checkPart2.configure { dependsOn 'check' } + } + } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 8232ce63f23..951b013e267 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -72,6 +72,7 @@ sourceSets { } allprojects { + apply plugin: 'java' targetCompatibility = 11 sourceCompatibility = 11 } @@ -87,9 +88,6 @@ compileMinimumRuntimeJava { jar { from sourceSets.minimumRuntime.output - into('META-INF') { - from configurations.reaper - } } javadoc { @@ -127,7 +125,6 @@ dependencies { testCompile "junit:junit:${props.getProperty('junit')}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}" testCompile 'com.github.tomakehurst:wiremock-jre8-standalone:2.23.2' - reaper project('reaper') minimumRuntimeCompile "junit:junit:${props.getProperty('junit')}" minimumRuntimeCompile localGroovy() minimumRuntimeCompile gradleApi() @@ -143,6 +140,10 @@ if (project == rootProject) { mavenLocal() } } + dependencies { + // add this so the runtime classpath so Gradle will properly track it as a build runtime classpath input + runtimeOnly project('reaper') + } // only run tests as build-tools test.enabled = false } @@ -177,9 +178,11 @@ if (project != rootProject) { configurations { distribution + reaper } dependencies { + reaper project('reaper') distribution project(':distribution:archives:windows-zip') distribution project(':distribution:archives:oss-windows-zip') distribution project(':distribution:archives:darwin-tar') @@ -191,6 +194,9 @@ if (project != rootProject) { // for external projects we want to remove the marker file indicating we are running the Elasticsearch project processResources { exclude 'buildSrc.marker' + into('META-INF') { + from configurations.reaper + } } String localDownloads = "${rootProject.buildDir}/local-downloads" diff --git a/buildSrc/reaper/build.gradle b/buildSrc/reaper/build.gradle index 242dfcafaaa..0d73bbbfcb4 100644 --- a/buildSrc/reaper/build.gradle +++ b/buildSrc/reaper/build.gradle @@ -1,6 +1,5 @@ -apply plugin: 'java' - jar { + archiveName = "${project.name}.jar" manifest { attributes 'Main-Class': 'org.elasticsearch.gradle.reaper.Reaper' } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 1ad4a3f7d93..b303ba5f388 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -18,6 +18,7 @@ */ package org.elasticsearch.gradle + import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar import groovy.transform.CompileDynamic @@ -66,6 +67,7 @@ import org.gradle.api.publish.maven.plugins.MavenPublishPlugin import org.gradle.api.publish.maven.tasks.GenerateMavenPom import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.SourceSetContainer +import org.gradle.api.tasks.TaskProvider import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.GroovyCompile import org.gradle.api.tasks.compile.JavaCompile @@ -82,10 +84,11 @@ import org.gradle.process.ExecSpec import org.gradle.util.GradleVersion import java.nio.charset.StandardCharsets -import java.time.ZoneOffset -import java.time.ZonedDateTime +import java.nio.file.Files import java.util.regex.Matcher +import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure + /** * Encapsulates build configuration for elasticsearch projects. */ @@ -127,7 +130,7 @@ class BuildPlugin implements Plugin { // apply global test task failure listener project.rootProject.pluginManager.apply(TestFailureReportingPlugin) - project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask) + project.getTasks().register("buildResources", ExportElasticsearchBuildResourcesTask) setupSeed(project) configureRepositories(project) @@ -154,7 +157,7 @@ class BuildPlugin implements Plugin { ExtraPropertiesExtension ext = project.extensions.getByType(ExtraPropertiesExtension) // Common config when running with a FIPS-140 runtime JVM if (ext.has('inFipsJvm') && ext.get('inFipsJvm')) { - project.tasks.withType(Test) { Test task -> + project.tasks.withType(Test).configureEach { Test task -> task.systemProperty 'javax.net.ssl.trustStorePassword', 'password' task.systemProperty 'javax.net.ssl.keyStorePassword', 'password' } @@ -530,7 +533,7 @@ class BuildPlugin implements Plugin { static void configurePomGeneration(Project project) { // Only works with `enableFeaturePreview('STABLE_PUBLISHING')` // https://github.com/gradle/gradle/issues/5696#issuecomment-396965185 - project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask -> + project.tasks.withType(GenerateMavenPom.class).configureEach({ GenerateMavenPom generatePOMTask -> // The GenerateMavenPom task is aggressive about setting the destination, instead of fighting it, // just make a copy. ExtraPropertiesExtension ext = generatePOMTask.extensions.getByType(ExtraPropertiesExtension) @@ -546,12 +549,15 @@ class BuildPlugin implements Plugin { } } } - // build poms with assemble (if the assemble task exists) - Task assemble = project.tasks.findByName('assemble') - if (assemble && assemble.enabled) { - assemble.dependsOn(generatePOMTask) + } as Action) + + // build poms with assemble (if the assemble task exists) + maybeConfigure(project.tasks, 'assemble') { assemble -> + if (assemble.enabled) { + assemble.dependsOn(project.tasks.withType(GenerateMavenPom)) } } + project.plugins.withType(MavenPublishPlugin).whenPluginAdded { PublishingExtension publishing = project.extensions.getByType(PublishingExtension) publishing.publications.all { MavenPublication publication -> // we only deal with maven @@ -607,7 +613,7 @@ class BuildPlugin implements Plugin { project.afterEvaluate { File compilerJavaHome = ext.get('compilerJavaHome') as File - project.tasks.withType(JavaCompile) { JavaCompile compileTask -> + project.tasks.withType(JavaCompile).configureEach({ JavaCompile compileTask -> final JavaVersion targetCompatibilityVersion = JavaVersion.toVersion(compileTask.targetCompatibility) // we only fork if the Gradle JDK is not the same as the compiler JDK if (compilerJavaHome.canonicalPath == Jvm.current().javaHome.canonicalPath) { @@ -644,9 +650,9 @@ class BuildPlugin implements Plugin { // TODO: use native Gradle support for --release when available (cf. https://github.com/gradle/gradle/issues/2510) compileTask.options.compilerArgs << '--release' << targetCompatibilityVersion.majorVersion - } + } as Action) // also apply release flag to groovy, which is used in build-tools - project.tasks.withType(GroovyCompile) { GroovyCompile compileTask -> + project.tasks.withType(GroovyCompile).configureEach({ GroovyCompile compileTask -> // we only fork if the Gradle JDK is not the same as the compiler JDK if (compilerJavaHome.canonicalPath == Jvm.current().javaHome.canonicalPath) { compileTask.options.fork = false @@ -655,19 +661,23 @@ class BuildPlugin implements Plugin { compileTask.options.forkOptions.javaHome = compilerJavaHome compileTask.options.compilerArgs << '--release' << JavaVersion.toVersion(compileTask.targetCompatibility).majorVersion } - } + } as Action) } } static void configureJavadoc(Project project) { // remove compiled classes from the Javadoc classpath: http://mail.openjdk.java.net/pipermail/javadoc-dev/2018-January/000400.html final List classes = new ArrayList<>() - project.tasks.withType(JavaCompile) { JavaCompile javaCompile -> + project.tasks.withType(JavaCompile).configureEach { JavaCompile javaCompile -> classes.add(javaCompile.destinationDir) } - project.tasks.withType(Javadoc) { Javadoc javadoc -> + project.tasks.withType(Javadoc).configureEach { Javadoc javadoc -> File compilerJavaHome = project.extensions.getByType(ExtraPropertiesExtension).get('compilerJavaHome') as File - javadoc.executable = new File(compilerJavaHome, 'bin/javadoc') + // only explicitly set javadoc executable if compiler JDK is different from Gradle + // this ensures better cacheability as setting ths input to an absolute path breaks portability + if (Files.isSameFile(compilerJavaHome.toPath(), Jvm.current().getJavaHome().toPath()) == false) { + javadoc.executable = new File(compilerJavaHome, 'bin/javadoc') + } javadoc.classpath = javadoc.getClasspath().filter { f -> return classes.contains(f) == false } @@ -682,21 +692,27 @@ class BuildPlugin implements Plugin { /** Adds a javadocJar task to generate a jar containing javadocs. */ static void configureJavadocJar(Project project) { - Jar javadocJarTask = project.tasks.create('javadocJar', Jar) - javadocJarTask.classifier = 'javadoc' - javadocJarTask.group = 'build' - javadocJarTask.description = 'Assembles a jar containing javadocs.' - javadocJarTask.from(project.tasks.getByName(JavaPlugin.JAVADOC_TASK_NAME)) - project.tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn(javadocJarTask) + TaskProvider javadocJarTask = project.tasks.register('javadocJar', Jar, { Jar jar -> + jar.archiveClassifier.set('javadoc') + jar.group = 'build' + jar.description = 'Assembles a jar containing javadocs.' + jar.from(project.tasks.named(JavaPlugin.JAVADOC_TASK_NAME)) + } as Action) + maybeConfigure(project.tasks, BasePlugin.ASSEMBLE_TASK_NAME) { Task t -> + t.dependsOn(javadocJarTask) + } } static void configureSourcesJar(Project project) { - Jar sourcesJarTask = project.tasks.create('sourcesJar', Jar) - sourcesJarTask.classifier = 'sources' - sourcesJarTask.group = 'build' - sourcesJarTask.description = 'Assembles a jar containing source files.' - sourcesJarTask.from(project.extensions.getByType(SourceSetContainer).getByName(SourceSet.MAIN_SOURCE_SET_NAME).allSource) - project.tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn(sourcesJarTask) + TaskProvider sourcesJarTask = project.tasks.register('sourcesJar', Jar, { Jar jar -> + jar.archiveClassifier.set('sources') + jar.group = 'build' + jar.description = 'Assembles a jar containing source files.' + jar.from(project.extensions.getByType(SourceSetContainer).getByName(SourceSet.MAIN_SOURCE_SET_NAME).allSource) + } as Action) + maybeConfigure(project.tasks, BasePlugin.ASSEMBLE_TASK_NAME) { Task t -> + t.dependsOn(sourcesJarTask) + } } /** Adds additional manifest info to jars */ @@ -704,7 +720,7 @@ class BuildPlugin implements Plugin { ExtraPropertiesExtension ext = project.extensions.getByType(ExtraPropertiesExtension) ext.set('licenseFile', null) ext.set('noticeFile', null) - project.tasks.withType(Jar) { Jar jarTask -> + project.tasks.withType(Jar).configureEach { Jar jarTask -> // we put all our distributable files under distributions jarTask.destinationDir = new File(project.buildDir, 'distributions') // fixup the jar manifest @@ -720,9 +736,10 @@ class BuildPlugin implements Plugin { 'Build-Date': ext.get('buildDate'), 'Build-Java-Version': compilerJavaVersion) } - - // add license/notice files - project.afterEvaluate { + } + // add license/notice files + project.afterEvaluate { + project.tasks.withType(Jar).configureEach { Jar jarTask -> if (ext.has('licenseFile') == false || ext.get('licenseFile') == null || ext.has('noticeFile') == false || ext.get('noticeFile') == null) { throw new GradleException("Must specify license and notice file for project ${project.path}") } @@ -748,8 +765,8 @@ class BuildPlugin implements Plugin { * normal jar with the shadow jar so we no longer want to run * the jar task. */ - project.tasks.getByName(JavaPlugin.JAR_TASK_NAME).enabled = false - project.tasks.getByName('shadowJar').configure { ShadowJar shadowJar -> + project.tasks.named(JavaPlugin.JAR_TASK_NAME).configure { it.enabled = false } + project.tasks.named('shadowJar').configure { ShadowJar shadowJar -> /* * Replace the default "shadow" classifier with null * which will leave the classifier off of the file name. @@ -766,7 +783,9 @@ class BuildPlugin implements Plugin { shadowJar.configurations = [project.configurations.getByName('bundle')] } // Make sure we assemble the shadow jar - project.tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn project.tasks.getByName('shadowJar') + project.tasks.named(BasePlugin.ASSEMBLE_TASK_NAME).configure { + it.dependsOn project.tasks.named('shadowJar') + } project.artifacts.add('apiElements', project.tasks.getByName('shadowJar')) } } @@ -775,7 +794,7 @@ class BuildPlugin implements Plugin { ExtraPropertiesExtension ext = project.extensions.getByType(ExtraPropertiesExtension) // Default test task should run only unit tests - project.tasks.withType(Test).matching { Test task -> task.name == 'test' }.all { Test task -> + maybeConfigure(project.tasks, 'test', Test) { Test task -> task.include '**/*Tests.class' } @@ -783,7 +802,7 @@ class BuildPlugin implements Plugin { if (project.path != ':build-tools') { File heapdumpDir = new File(project.buildDir, 'heapdump') - project.tasks.withType(Test) { Test test -> + project.tasks.withType(Test).configureEach { Test test -> File testOutputDir = new File(test.reports.junitXml.getDestination(), "output") ErrorReportingTestListener listener = new ErrorReportingTestListener(test.testLogging, testOutputDir) @@ -894,30 +913,37 @@ class BuildPlugin implements Plugin { } private static configurePrecommit(Project project) { - Task precommit = PrecommitTasks.create(project, true) - project.tasks.getByName(LifecycleBasePlugin.CHECK_TASK_NAME).dependsOn(precommit) - project.tasks.getByName(JavaPlugin.TEST_TASK_NAME).mustRunAfter(precommit) + TaskProvider precommit = PrecommitTasks.create(project, true) + project.tasks.named(LifecycleBasePlugin.CHECK_TASK_NAME).configure { it.dependsOn(precommit) } + project.tasks.named(JavaPlugin.TEST_TASK_NAME).configure { it.mustRunAfter(precommit) } // only require dependency licenses for non-elasticsearch deps - (project.tasks.getByName('dependencyLicenses') as DependencyLicensesTask).dependencies = project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME).fileCollection { Dependency dependency -> - dependency.group.startsWith('org.elasticsearch') == false - } - project.configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME) - project.plugins.withType(ShadowPlugin).whenPluginAdded { - (project.tasks.getByName('dependencyLicenses') as DependencyLicensesTask).dependencies += project.configurations.getByName('bundle').fileCollection { Dependency dependency -> + project.tasks.withType(DependencyLicensesTask).named('dependencyLicenses').configure { + it.dependencies = project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME).fileCollection { Dependency dependency -> dependency.group.startsWith('org.elasticsearch') == false + } - project.configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME) + } + project.plugins.withType(ShadowPlugin).whenPluginAdded { + project.tasks.withType(DependencyLicensesTask).named('dependencyLicenses').configure { + it.dependencies += project.configurations.getByName('bundle').fileCollection { Dependency dependency -> + dependency.group.startsWith('org.elasticsearch') == false + } } } } private static configureDependenciesInfo(Project project) { - DependenciesInfoTask deps = project.tasks.create("dependenciesInfo", DependenciesInfoTask) - deps.runtimeConfiguration = project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME) + TaskProvider deps = project.tasks.register("dependenciesInfo", DependenciesInfoTask, { DependenciesInfoTask task -> + task.runtimeConfiguration = project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME) + task.compileOnlyConfiguration = project.configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME) + task.getConventionMapping().map('mappings') { + (project.tasks.getByName('dependencyLicenses') as DependencyLicensesTask).mappings + } + } as Action) project.plugins.withType(ShadowPlugin).whenPluginAdded { - deps.runtimeConfiguration = project.configurations.create('infoDeps') - deps.runtimeConfiguration.extendsFrom(project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME), project.configurations.getByName('bundle')) - } - deps.compileOnlyConfiguration = project.configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME) - project.afterEvaluate { - deps.mappings = (project.tasks.getByName('dependencyLicenses') as DependencyLicensesTask).mappings + deps.configure { task -> + task.runtimeConfiguration = project.configurations.create('infoDeps') + task.runtimeConfiguration.extendsFrom(project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME), project.configurations.getByName('bundle')) + } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy index 13e457c0317..aa01b1d7dfd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy @@ -25,6 +25,7 @@ import org.gradle.api.artifacts.Configuration import org.gradle.api.artifacts.Dependency import org.gradle.api.artifacts.DependencyResolutionListener import org.gradle.api.artifacts.DependencySet +import org.gradle.api.internal.ConventionTask import org.gradle.api.tasks.Input import org.gradle.api.tasks.InputDirectory import org.gradle.api.tasks.OutputFile @@ -45,7 +46,7 @@ import java.util.regex.Pattern * * */ -public class DependenciesInfoTask extends DefaultTask { +public class DependenciesInfoTask extends ConventionTask { /** Dependencies to gather information from. */ @Input @@ -55,8 +56,7 @@ public class DependenciesInfoTask extends DefaultTask { @Input public Configuration compileOnlyConfiguration - @Input - public LinkedHashMap mappings + private LinkedHashMap mappings /** Directory to read license files */ @InputDirectory @@ -93,7 +93,7 @@ public class DependenciesInfoTask extends DefaultTask { } final String url = createURL(dependency.group, dependency.name, dependency.version) - final String dependencyName = DependencyLicensesTask.getDependencyName(mappings, dependency.name) + final String dependencyName = DependencyLicensesTask.getDependencyName(getMappings(), dependency.name) logger.info("mapped dependency ${dependency.group}:${dependency.name} to ${dependencyName} for license info") final String licenseType = getLicenseType(dependency.group, dependencyName) @@ -103,7 +103,15 @@ public class DependenciesInfoTask extends DefaultTask { outputFile.setText(output.toString(), 'UTF-8') } - /** + @Input + LinkedHashMap getMappings() { + return mappings + } + + void setMappings(LinkedHashMap mappings) { + this.mappings = mappings + } +/** * Create an URL on Maven Central * based on dependency coordinates. */ diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 692181710f4..b832f53b342 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -32,6 +32,7 @@ import org.gradle.api.InvalidUserDataException import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.Task +import org.gradle.api.plugins.BasePlugin import org.gradle.api.publish.maven.MavenPublication import org.gradle.api.publish.maven.plugins.MavenPublishPlugin import org.gradle.api.publish.maven.tasks.GenerateMavenPom @@ -112,7 +113,7 @@ class PluginBuildPlugin implements Plugin { addNoticeGeneration(project, extension) } } - project.testingConventions { + project.tasks.named('testingConventions').configure { naming.clear() naming { Tests { @@ -175,7 +176,7 @@ class PluginBuildPlugin implements Plugin { /** Adds an integTest task which runs rest tests */ private static void createIntegTestTask(Project project) { RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class) - integTest.mustRunAfter(project.precommit, project.test) + integTest.mustRunAfter('precommit', 'test') if (project.plugins.hasPlugin(TestClustersPlugin.class) == false) { // only if not using test clusters project.integTestCluster.distribution = System.getProperty('tests.distribution', 'integ-test-zip') @@ -259,7 +260,9 @@ class PluginBuildPlugin implements Plugin { include 'bin/**' } } - project.assemble.dependsOn(bundle) + project.tasks.named(BasePlugin.ASSEMBLE_TASK_NAME).configure { + dependsOn(bundle) + } // also make the zip available as a configuration (used when depending on this project) project.configurations.create('zip') diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 7f3aa3edb0d..8127c485e19 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -26,11 +26,10 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.tool.ClasspathUtils import org.gradle.api.JavaVersion import org.gradle.api.Project -import org.gradle.api.Task import org.gradle.api.artifacts.Configuration import org.gradle.api.plugins.JavaBasePlugin -import org.gradle.api.plugins.JavaPluginConvention import org.gradle.api.plugins.quality.Checkstyle +import org.gradle.api.tasks.TaskProvider /** * Validation tasks which should be run before committing. These run before tests. @@ -41,7 +40,7 @@ class PrecommitTasks { public static final String CHECKSTYLE_VERSION = '8.20' - public static Task create(Project project, boolean includeDependencyLicenses) { + public static TaskProvider create(Project project, boolean includeDependencyLicenses) { project.configurations.create("forbiddenApisCliJar") project.dependencies { forbiddenApisCliJar('de.thetaphi:forbiddenapis:2.6') @@ -57,12 +56,12 @@ class PrecommitTasks { } } - List precommitTasks = [ + List precommitTasks = [ configureCheckstyle(project), configureForbiddenApisCli(project), - project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), - project.tasks.create('licenseHeaders', LicenseHeadersTask.class), - project.tasks.create('filepermissions', FilePermissionsTask.class), + project.tasks.register('forbiddenPatterns', ForbiddenPatternsTask), + project.tasks.register('licenseHeaders', LicenseHeadersTask), + project.tasks.register('filepermissions', FilePermissionsTask), configureJarHell(project, jarHellConfig), configureThirdPartyAudit(project), configureTestingConventions(project) @@ -71,11 +70,12 @@ class PrecommitTasks { // tasks with just tests don't need dependency licenses, so this flag makes adding // the task optional if (includeDependencyLicenses) { - DependencyLicensesTask dependencyLicenses = project.tasks.create('dependencyLicenses', DependencyLicensesTask.class) + TaskProvider dependencyLicenses = project.tasks.register('dependencyLicenses', DependencyLicensesTask) precommitTasks.add(dependencyLicenses) // we also create the updateShas helper task that is associated with dependencyLicenses - UpdateShasTask updateShas = project.tasks.create('updateShas', UpdateShasTask.class) - updateShas.parentTask = dependencyLicenses + project.tasks.register('updateShas', UpdateShasTask) { + it.parentTask = dependencyLicenses + } } if (project.path != ':build-tools') { /* @@ -93,35 +93,36 @@ class PrecommitTasks { // We want to get any compilation error before running the pre-commit checks. project.sourceSets.all { sourceSet -> - precommitTasks.each { task -> - task.shouldRunAfter(sourceSet.getClassesTaskName()) + precommitTasks.each { provider -> + provider.configure { + shouldRunAfter(sourceSet.getClassesTaskName()) + } } } - return project.tasks.create([ - name : 'precommit', - group : JavaBasePlugin.VERIFICATION_GROUP, - description: 'Runs all non-test checks.', - dependsOn : precommitTasks - ]) + return project.tasks.register('precommit') { + group = JavaBasePlugin.VERIFICATION_GROUP + description = 'Runs all non-test checks.' + dependsOn = precommitTasks + } } - static Task configureTestingConventions(Project project) { - TestingConventionsTasks task = project.getTasks().create("testingConventions", TestingConventionsTasks.class) - task.naming { - Tests { - baseClass "org.apache.lucene.util.LuceneTestCase" - } - IT { - baseClass "org.elasticsearch.test.ESIntegTestCase" - baseClass 'org.elasticsearch.test.rest.ESRestTestCase' + static TaskProvider configureTestingConventions(Project project) { + return project.getTasks().register("testingConventions", TestingConventionsTasks) { + naming { + Tests { + baseClass "org.apache.lucene.util.LuceneTestCase" + } + IT { + baseClass "org.elasticsearch.test.ESIntegTestCase" + baseClass 'org.elasticsearch.test.rest.ESRestTestCase' + } } } - return task } - private static Task configureJarHell(Project project, Configuration jarHelConfig) { - return project.tasks.create('jarHell', JarHellTask) { task -> + private static TaskProvider configureJarHell(Project project, Configuration jarHelConfig) { + return project.tasks.register('jarHell', JarHellTask) { task -> task.classpath = project.sourceSets.test.runtimeClasspath + jarHelConfig; if (project.plugins.hasPlugin(ShadowPlugin)) { task.classpath += project.configurations.bundle @@ -130,9 +131,9 @@ class PrecommitTasks { } } - private static Task configureThirdPartyAudit(Project project) { + private static TaskProvider configureThirdPartyAudit(Project project) { ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') - return project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class) { task -> + return project.tasks.register('thirdPartyAudit', ThirdPartyAuditTask) { task -> task.dependsOn(buildResources) task.signatureFile = buildResources.copy("forbidden/third-party-audit.txt") task.javaHome = project.runtimeJavaHome @@ -140,10 +141,10 @@ class PrecommitTasks { } } - private static Task configureForbiddenApisCli(Project project) { + private static TaskProvider configureForbiddenApisCli(Project project) { project.pluginManager.apply(ForbiddenApisPlugin) ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources') - project.tasks.withType(CheckForbiddenApis) { + project.tasks.withType(CheckForbiddenApis).configureEach { dependsOn(buildResources) doFirst { // we need to defer this configuration since we don't know the runtime java version until execution time @@ -183,12 +184,14 @@ class PrecommitTasks { ) } } - Task forbiddenApis = project.tasks.getByName("forbiddenApis") - forbiddenApis.group = "" + TaskProvider forbiddenApis = project.tasks.named("forbiddenApis") + forbiddenApis.configure { + group = "" + } return forbiddenApis } - private static Task configureCheckstyle(Project project) { + private static TaskProvider configureCheckstyle(Project project) { // Always copy the checkstyle configuration files to 'buildDir/checkstyle' since the resources could be located in a jar // file. If the resources are located in a jar, Gradle will fail when it tries to turn the URL into a file URL checkstyleConfUrl = PrecommitTasks.getResource("/checkstyle.xml") @@ -196,29 +199,39 @@ class PrecommitTasks { File checkstyleDir = new File(project.buildDir, "checkstyle") File checkstyleSuppressions = new File(checkstyleDir, "checkstyle_suppressions.xml") File checkstyleConf = new File(checkstyleDir, "checkstyle.xml"); - Task copyCheckstyleConf = project.tasks.create("copyCheckstyleConf") + TaskProvider copyCheckstyleConf = project.tasks.register("copyCheckstyleConf") // configure inputs and outputs so up to date works properly - copyCheckstyleConf.outputs.files(checkstyleSuppressions, checkstyleConf) + copyCheckstyleConf.configure { + outputs.files(checkstyleSuppressions, checkstyleConf) + } if ("jar".equals(checkstyleConfUrl.getProtocol())) { JarURLConnection jarURLConnection = (JarURLConnection) checkstyleConfUrl.openConnection() - copyCheckstyleConf.inputs.file(jarURLConnection.getJarFileURL()) + copyCheckstyleConf.configure { + inputs.file(jarURLConnection.getJarFileURL()) + } } else if ("file".equals(checkstyleConfUrl.getProtocol())) { - copyCheckstyleConf.inputs.files(checkstyleConfUrl.getFile(), checkstyleSuppressionsUrl.getFile()) - } - - copyCheckstyleConf.doLast { - checkstyleDir.mkdirs() - // withStream will close the output stream and IOGroovyMethods#getBytes reads the InputStream fully and closes it - new FileOutputStream(checkstyleConf).withStream { - it.write(checkstyleConfUrl.openStream().getBytes()) - } - new FileOutputStream(checkstyleSuppressions).withStream { - it.write(checkstyleSuppressionsUrl.openStream().getBytes()) + copyCheckstyleConf.configure { + inputs.files(checkstyleConfUrl.getFile(), checkstyleSuppressionsUrl.getFile()) } } - Task checkstyleTask = project.tasks.create('checkstyle') + copyCheckstyleConf.configure { + doLast { + checkstyleDir.mkdirs() + // withStream will close the output stream and IOGroovyMethods#getBytes reads the InputStream fully and closes it + new FileOutputStream(checkstyleConf).withStream { + it.write(checkstyleConfUrl.openStream().getBytes()) + } + new FileOutputStream(checkstyleSuppressions).withStream { + it.write(checkstyleSuppressionsUrl.openStream().getBytes()) + } + } + } + + TaskProvider checkstyleTask = project.tasks.register('checkstyle') { + dependsOn project.tasks.withType(Checkstyle) + } // Apply the checkstyle plugin to create `checkstyleMain` and `checkstyleTest`. It only // creates them if there is main or test code to check and it makes `check` depend // on them. We also want `precommit` to depend on `checkstyle`. @@ -231,8 +244,7 @@ class PrecommitTasks { toolVersion = CHECKSTYLE_VERSION } - project.tasks.withType(Checkstyle) { task -> - checkstyleTask.dependsOn(task) + project.tasks.withType(Checkstyle).configureEach { task -> task.dependsOn(copyCheckstyleConf) task.inputs.file(checkstyleSuppressions) task.reports { @@ -243,13 +255,13 @@ class PrecommitTasks { return checkstyleTask } - private static Task configureLoggerUsage(Project project) { + private static TaskProvider configureLoggerUsage(Project project) { Object dependency = ClasspathUtils.isElasticsearchProject() ? project.project(':test:logger-usage') : "org.elasticsearch.test:logger-usage:${VersionProperties.elasticsearch}" project.configurations.create('loggerUsagePlugin') project.dependencies.add('loggerUsagePlugin', dependency) - return project.tasks.create('loggerUsageCheck', LoggerUsageTask.class) { + return project.tasks.register('loggerUsageCheck', LoggerUsageTask) { classpath = project.configurations.loggerUsagePlugin } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/DistroTestPlugin.java new file mode 100644 index 00000000000..d78dba6b47c --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/DistroTestPlugin.java @@ -0,0 +1,300 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.test; + +import org.elasticsearch.gradle.BuildPlugin; +import org.elasticsearch.gradle.BwcVersions; +import org.elasticsearch.gradle.DistributionDownloadPlugin; +import org.elasticsearch.gradle.ElasticsearchDistribution; +import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor; +import org.elasticsearch.gradle.ElasticsearchDistribution.Platform; +import org.elasticsearch.gradle.ElasticsearchDistribution.Type; +import org.elasticsearch.gradle.Jdk; +import org.elasticsearch.gradle.JdkDownloadPlugin; +import org.elasticsearch.gradle.Version; +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.tool.Boilerplate; +import org.elasticsearch.gradle.vagrant.BatsProgressLogger; +import org.elasticsearch.gradle.vagrant.VagrantBasePlugin; +import org.elasticsearch.gradle.vagrant.VagrantExtension; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.file.Directory; +import org.gradle.api.plugins.ExtraPropertiesExtension; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.Copy; +import org.gradle.api.tasks.TaskInputs; +import org.gradle.api.tasks.TaskProvider; +import org.gradle.api.tasks.testing.Test; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; +import java.util.Random; +import java.util.stream.Collectors; + +import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath; +import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath; + +public class DistroTestPlugin implements Plugin { + + private static final String GRADLE_JDK_VERSION = "12.0.1+12@69cfe15208a647278a19ef0990eea691"; + + // all distributions used by distro tests. this is temporary until tests are per distribution + private static final String PACKAGING_DISTRIBUTION = "packaging"; + private static final String COPY_PACKAGING_TASK = "copyPackagingArchives"; + private static final String IN_VM_SYSPROP = "tests.inVM"; + + private static Version upgradeVersion; + private Provider archivesDir; + private TaskProvider copyPackagingArchives; + private Jdk gradleJdk; + + @Override + public void apply(Project project) { + project.getPluginManager().apply(JdkDownloadPlugin.class); + project.getPluginManager().apply(DistributionDownloadPlugin.class); + project.getPluginManager().apply(VagrantBasePlugin.class); + project.getPluginManager().apply(JavaPlugin.class); + + configureVM(project); + + if (upgradeVersion == null) { + // just read this once, since it is the same for all projects. this is safe because gradle configuration is single threaded + upgradeVersion = getUpgradeVersion(project); + } + + // setup task to run inside VM + configureDistributions(project); + configureCopyPackagingTask(project); + configureDistroTest(project); + configureBatsTest(project, "oss"); + configureBatsTest(project, "default"); + } + + private static Jdk createJdk(NamedDomainObjectContainer jdksContainer, String name, String version, String platform) { + Jdk jdk = jdksContainer.create(name); + jdk.setVersion(version); + jdk.setPlatform(platform); + return jdk; + } + + private static Version getUpgradeVersion(Project project) { + String upgradeFromVersionRaw = System.getProperty("tests.packaging.upgradeVersion"); + if (upgradeFromVersionRaw != null) { + return Version.fromString(upgradeFromVersionRaw); + } + + // was not passed in, so randomly choose one from bwc versions + ExtraPropertiesExtension extraProperties = project.getExtensions().getByType(ExtraPropertiesExtension.class); + + if ((boolean) extraProperties.get("bwc_tests_enabled") == false) { + // Upgrade tests will go from current to current when the BWC tests are disabled to skip real BWC tests + return Version.fromString(project.getVersion().toString()); + } + + ExtraPropertiesExtension rootExtraProperties = project.getRootProject().getExtensions().getByType(ExtraPropertiesExtension.class); + String firstPartOfSeed = rootExtraProperties.get("testSeed").toString().split(":")[0]; + final long seed = Long.parseUnsignedLong(firstPartOfSeed, 16); + BwcVersions bwcVersions = (BwcVersions) extraProperties.get("bwcVersions"); + final List indexCompatVersions = bwcVersions.getIndexCompatible(); + return indexCompatVersions.get(new Random(seed).nextInt(indexCompatVersions.size())); + } + + private void configureVM(Project project) { + String box = project.getName(); + + // setup jdks used by the distro tests, and by gradle executing + + NamedDomainObjectContainer jdksContainer = JdkDownloadPlugin.getContainer(project); + String platform = box.contains("windows") ? "windows" : "linux"; + this.gradleJdk = createJdk(jdksContainer, "gradle", GRADLE_JDK_VERSION, platform); + + // setup VM used by these tests + VagrantExtension vagrant = project.getExtensions().getByType(VagrantExtension.class); + vagrant.setBox(box); + vagrant.vmEnv("PATH", convertPath(project, vagrant, gradleJdk, "/bin:$PATH", "\\bin;$Env:PATH")); + vagrant.setIsWindowsVM(box.contains("windows")); + } + + private static Object convertPath(Project project, VagrantExtension vagrant, Jdk jdk, + String additionaLinux, String additionalWindows) { + return new Object() { + @Override + public String toString() { + if (vagrant.isWindowsVM()) { + return convertWindowsPath(project, jdk.getPath()) + additionalWindows; + } + return convertLinuxPath(project, jdk.getPath()) + additionaLinux; + } + }; + } + + private void configureCopyPackagingTask(Project project) { + this.archivesDir = project.getParent().getLayout().getBuildDirectory().dir("packaging/archives"); + // temporary, until we have tasks per distribution + this.copyPackagingArchives = Boilerplate.maybeRegister(project.getParent().getTasks(), COPY_PACKAGING_TASK, Copy.class, + t -> { + t.into(archivesDir); + t.from(project.getConfigurations().getByName(PACKAGING_DISTRIBUTION)); + + Path archivesPath = archivesDir.get().getAsFile().toPath(); + + // write bwc version, and append -SNAPSHOT if it is an unreleased version + ExtraPropertiesExtension extraProperties = project.getExtensions().getByType(ExtraPropertiesExtension.class); + BwcVersions bwcVersions = (BwcVersions) extraProperties.get("bwcVersions"); + final String upgradeFromVersion; + if (bwcVersions.unreleasedInfo(upgradeVersion) != null) { + upgradeFromVersion = upgradeVersion.toString() + "-SNAPSHOT"; + } else { + upgradeFromVersion = upgradeVersion.toString(); + } + TaskInputs inputs = t.getInputs(); + inputs.property("version", VersionProperties.getElasticsearch()); + inputs.property("upgrade_from_version", upgradeFromVersion); + // TODO: this is serializable, need to think how to represent this as an input + //inputs.property("bwc_versions", bwcVersions); + t.doLast(action -> { + try { + Files.writeString(archivesPath.resolve("version"), VersionProperties.getElasticsearch()); + Files.writeString(archivesPath.resolve("upgrade_from_version"), upgradeFromVersion); + // this is always true, but bats tests rely on it. It is just temporary until bats is removed. + Files.writeString(archivesPath.resolve("upgrade_is_oss"), ""); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + }); + } + + private void configureDistroTest(Project project) { + BuildPlugin.configureCompile(project); + BuildPlugin.configureRepositories(project); + BuildPlugin.configureTestTasks(project); + BuildPlugin.configureInputNormalization(project); + + TaskProvider destructiveTest = project.getTasks().register("destructiveDistroTest", Test.class, + t -> { + t.setMaxParallelForks(1); + t.setWorkingDir(archivesDir.get()); + if (System.getProperty(IN_VM_SYSPROP) == null) { + t.dependsOn(copyPackagingArchives, gradleJdk); + } + }); + + // setup outer task to run + project.getTasks().register("distroTest", GradleDistroTestTask.class, + t -> { + t.setGroup(JavaBasePlugin.VERIFICATION_GROUP); + t.setDescription("Runs distribution tests within vagrant"); + t.setTaskName(project.getPath() + ":" + destructiveTest.getName()); + t.extraArg("-D'" + IN_VM_SYSPROP + "'"); + t.dependsOn(copyPackagingArchives, gradleJdk); + }); + } + + private void configureBatsTest(Project project, String type) { + + // destructive task to run inside + TaskProvider destructiveTest = project.getTasks().register("destructiveBatsTest." + type, BatsTestTask.class, + t -> { + // this is hacky for shared source, but bats are a temporary thing we are removing, so it is not worth + // the overhead of a real project dependency + Directory batsDir = project.getParent().getLayout().getProjectDirectory().dir("bats"); + t.setTestsDir(batsDir.dir(type)); + t.setUtilsDir(batsDir.dir("utils")); + t.setArchivesDir(archivesDir.get()); + t.setPackageName("elasticsearch" + (type.equals("oss") ? "-oss" : "")); + if (System.getProperty(IN_VM_SYSPROP) == null) { + t.dependsOn(copyPackagingArchives, gradleJdk); + } + }); + + VagrantExtension vagrant = project.getExtensions().getByType(VagrantExtension.class); + // setup outer task to run + project.getTasks().register("batsTest." + type, GradleDistroTestTask.class, + t -> { + t.setGroup(JavaBasePlugin.VERIFICATION_GROUP); + t.setDescription("Runs bats tests within vagrant"); + t.setTaskName(project.getPath() + ":" + destructiveTest.getName()); + t.setProgressHandler(new BatsProgressLogger(project.getLogger())); + t.extraArg("-D'" + IN_VM_SYSPROP + "'"); + t.dependsOn(copyPackagingArchives, gradleJdk); + t.onlyIf(spec -> vagrant.isWindowsVM() == false); // bats doesn't run on windows + }); + } + + private void configureDistributions(Project project) { + NamedDomainObjectContainer distributions = DistributionDownloadPlugin.getContainer(project); + + for (Type type : Arrays.asList(Type.DEB, Type.RPM)) { + for (Flavor flavor : Flavor.values()) { + for (boolean bundledJdk : Arrays.asList(true, false)) { + addDistro(distributions, type, null, flavor, bundledJdk, VersionProperties.getElasticsearch()); + } + } + // upgrade version is always bundled jdk + // NOTE: this is mimicking the old VagrantTestPlugin upgrade behavior. It will eventually be replaced + // witha dedicated upgrade test from every bwc version like other bwc tests + addDistro(distributions, type, null, Flavor.DEFAULT, true, upgradeVersion.toString()); + if (upgradeVersion.onOrAfter("6.3.0")) { + addDistro(distributions, type, null, Flavor.OSS, true, upgradeVersion.toString()); + } + } + for (Platform platform : Arrays.asList(Platform.LINUX, Platform.WINDOWS)) { + for (Flavor flavor : Flavor.values()) { + for (boolean bundledJdk : Arrays.asList(true, false)) { + addDistro(distributions, Type.ARCHIVE, platform, flavor, bundledJdk, VersionProperties.getElasticsearch()); + } + } + } + + // temporary until distro tests have one test per distro + Configuration packagingConfig = project.getConfigurations().create(PACKAGING_DISTRIBUTION); + List distroConfigs = distributions.stream().map(ElasticsearchDistribution::getConfiguration) + .collect(Collectors.toList()); + packagingConfig.setExtendsFrom(distroConfigs); + } + + private static void addDistro(NamedDomainObjectContainer distributions, + Type type, Platform platform, Flavor flavor, boolean bundledJdk, String version) { + + String name = flavor + "-" + (type == Type.ARCHIVE ? platform + "-" : "") + type + (bundledJdk ? "" : "-no-jdk") + "-" + version; + if (distributions.findByName(name) != null) { + return; + } + distributions.create(name, d -> { + d.setFlavor(flavor); + d.setType(type); + if (type == Type.ARCHIVE) { + d.setPlatform(platform); + } + d.setBundledJdk(bundledJdk); + d.setVersion(version); + }); + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 13482355572..4617968949c 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -20,6 +20,7 @@ package org.elasticsearch.gradle.test import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testclusters.ElasticsearchCluster +import org.elasticsearch.gradle.testclusters.RestTestRunnerTask import org.elasticsearch.gradle.testclusters.TestClustersPlugin import org.elasticsearch.gradle.tool.ClasspathUtils import org.gradle.api.DefaultTask @@ -49,8 +50,6 @@ class RestIntegTestTask extends DefaultTask { protected Test runner - protected Task clusterInit - /** Info about nodes in the integ test cluster. Note this is *not* available until runtime. */ List nodes @@ -61,8 +60,6 @@ class RestIntegTestTask extends DefaultTask { RestIntegTestTask() { runner = project.tasks.create("${name}Runner", RestTestRunnerTask.class) super.dependsOn(runner) - clusterInit = project.tasks.create(name: "${name}Cluster#init", dependsOn: project.testClasses) - runner.dependsOn(clusterInit) boolean usesTestclusters = project.plugins.hasPlugin(TestClustersPlugin.class) if (usesTestclusters == false) { clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project) @@ -75,8 +72,6 @@ class RestIntegTestTask extends DefaultTask { runner.useCluster project.testClusters."$name" } - // override/add more for rest tests - runner.maxParallelForks = 1 runner.include('**/*IT.class') runner.systemProperty('tests.rest.load_packaged', 'false') @@ -134,7 +129,6 @@ class RestIntegTestTask extends DefaultTask { project.gradle.projectsEvaluated { if (enabled == false) { runner.enabled = false - clusterInit.enabled = false return // no need to add cluster formation tasks if the task won't run! } if (usesTestclusters == false) { @@ -185,11 +179,6 @@ class RestIntegTestTask extends DefaultTask { } } - @Override - public Task mustRunAfter(Object... tasks) { - clusterInit.mustRunAfter(tasks) - } - public void runner(Closure configure) { project.tasks.getByName("${name}Runner").configure(configure) } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/VagrantFixture.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/VagrantFixture.groovy deleted file mode 100644 index fa08a8f9c66..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/VagrantFixture.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle.test - -import org.elasticsearch.gradle.vagrant.VagrantCommandTask -import org.gradle.api.Task - -/** - * A fixture for integration tests which runs in a virtual machine launched by Vagrant. - */ -class VagrantFixture extends VagrantCommandTask implements Fixture { - - private VagrantCommandTask stopTask - - public VagrantFixture() { - this.stopTask = project.tasks.create(name: "${name}#stop", type: VagrantCommandTask) { - command 'halt' - } - finalizedBy this.stopTask - } - - @Override - void setBoxName(String boxName) { - super.setBoxName(boxName) - this.stopTask.setBoxName(boxName) - } - - @Override - void setEnvironmentVars(Map environmentVars) { - super.setEnvironmentVars(environmentVars) - this.stopTask.setEnvironmentVars(environmentVars) - } - - @Override - public Task getStopTask() { - return this.stopTask - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy deleted file mode 100644 index bcc612c7afa..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle.vagrant - -import org.apache.commons.io.output.TeeOutputStream -import org.elasticsearch.gradle.LoggedExec -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.Optional -import org.gradle.internal.logging.progress.ProgressLoggerFactory - -import javax.inject.Inject - -/** - * Runs a vagrant command. Pretty much like Exec task but with a nicer output - * formatter and defaults to `vagrant` as first part of commandLine. - */ -public class VagrantCommandTask extends LoggedExec { - - @Input - String command - - @Input @Optional - String subcommand - - @Input - String boxName - - @Input - Map environmentVars - - public VagrantCommandTask() { - executable = 'vagrant' - - // We're using afterEvaluate here to slot in some logic that captures configurations and - // modifies the command line right before the main execution happens. The reason that we - // call doFirst instead of just doing the work in the afterEvaluate is that the latter - // restricts how subclasses can extend functionality. Calling afterEvaluate is like having - // all the logic of a task happening at construction time, instead of at execution time - // where a subclass can override or extend the logic. - project.afterEvaluate { - doFirst { - if (environmentVars != null) { - environment environmentVars - } - - // Build our command line for vagrant - def vagrantCommand = [executable, command] - if (subcommand != null) { - vagrantCommand = vagrantCommand + subcommand - } - commandLine([*vagrantCommand, boxName, *args]) - - // It'd be nice if --machine-readable were, well, nice - standardOutput = new TeeOutputStream(standardOutput, createLoggerOutputStream()) - } - } - } - - @Inject - ProgressLoggerFactory getProgressLoggerFactory() { - throw new UnsupportedOperationException() - } - - protected OutputStream createLoggerOutputStream() { - return new VagrantLoggerOutputStream(getProgressLoggerFactory().newOperation(boxName + " " + command).setDescription(boxName), - /* Vagrant tends to output a lot of stuff, but most of the important - stuff starts with ==> $box */ - "==> $boxName: ") - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy deleted file mode 100644 index e9b664a5a31..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle.vagrant - -import org.elasticsearch.gradle.Version -import org.gradle.api.tasks.Input - -class VagrantPropertiesExtension { - - @Input - List boxes - - @Input - Version upgradeFromVersion - - @Input - List upgradeFromVersions - - @Input - String batsDir - - @Input - Boolean inheritTests - - @Input - Boolean inheritTestUtils - - @Input - String testClass - - VagrantPropertiesExtension(List availableBoxes) { - this.boxes = availableBoxes - this.batsDir = 'src/test/resources/packaging' - } - - void boxes(String... boxes) { - this.boxes = Arrays.asList(boxes) - } - - void setBatsDir(String batsDir) { - this.batsDir = batsDir - } - - void setInheritTests(Boolean inheritTests) { - this.inheritTests = inheritTests - } - - void setInheritTestUtils(Boolean inheritTestUtils) { - this.inheritTestUtils = inheritTestUtils - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantSupportPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantSupportPlugin.groovy deleted file mode 100644 index 9dfe487e830..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantSupportPlugin.groovy +++ /dev/null @@ -1,127 +0,0 @@ -package org.elasticsearch.gradle.vagrant - -import org.gradle.api.GradleException -import org.gradle.api.InvalidUserDataException -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.process.ExecResult -import org.gradle.process.internal.ExecException - -/** - * Global configuration for if Vagrant tasks are supported in this - * build environment. - */ -class VagrantSupportPlugin implements Plugin { - - @Override - void apply(Project project) { - if (project.rootProject.ext.has('vagrantEnvChecksDone') == false) { - Map vagrantInstallation = getVagrantInstallation(project) - Map virtualBoxInstallation = getVirtualBoxInstallation(project) - - project.rootProject.ext.vagrantInstallation = vagrantInstallation - project.rootProject.ext.virtualBoxInstallation = virtualBoxInstallation - project.rootProject.ext.vagrantSupported = vagrantInstallation.supported && virtualBoxInstallation.supported - project.rootProject.ext.vagrantEnvChecksDone = true - - // Finding that HOME needs to be set when performing vagrant updates - String homeLocation = System.getenv("HOME") - if (project.rootProject.ext.vagrantSupported && homeLocation == null) { - throw new GradleException("Could not locate \$HOME environment variable. Vagrant is enabled " + - "and requires \$HOME to be set to function properly.") - } - } - - addVerifyInstallationTasks(project) - } - - private Map getVagrantInstallation(Project project) { - try { - ByteArrayOutputStream pipe = new ByteArrayOutputStream() - ExecResult runResult = project.exec { - commandLine 'vagrant', '--version' - standardOutput pipe - ignoreExitValue true - } - String version = pipe.toString().trim() - if (runResult.exitValue == 0) { - if (version ==~ /Vagrant 1\.(8\.[6-9]|9\.[0-9])+/ || version ==~ /Vagrant 2\.[0-9]+\.[0-9]+/) { - return [ 'supported' : true ] - } else { - return [ 'supported' : false, - 'info' : "Illegal version of vagrant [${version}]. Need [Vagrant 1.8.6+]" ] - } - } else { - return [ 'supported' : false, - 'info' : "Could not read installed vagrant version:\n" + version ] - } - } catch (ExecException e) { - // Exec still throws this if it cannot find the command, regardless if ignoreExitValue is set. - // Swallow error. Vagrant isn't installed. Don't halt the build here. - return [ 'supported' : false, 'info' : "Could not find vagrant: " + e.message ] - } - } - - private Map getVirtualBoxInstallation(Project project) { - try { - ByteArrayOutputStream pipe = new ByteArrayOutputStream() - ExecResult runResult = project.exec { - commandLine 'vboxmanage', '--version' - standardOutput = pipe - ignoreExitValue true - } - String version = pipe.toString().trim() - if (runResult.exitValue == 0) { - try { - String[] versions = version.split('\\.') - int major = Integer.parseInt(versions[0]) - int minor = Integer.parseInt(versions[1]) - if ((major < 5) || (major == 5 && minor < 1)) { - return [ 'supported' : false, - 'info' : "Illegal version of virtualbox [${version}]. Need [5.1+]" ] - } else { - return [ 'supported' : true ] - } - } catch (NumberFormatException | ArrayIndexOutOfBoundsException e) { - return [ 'supported' : false, - 'info' : "Unable to parse version of virtualbox [${version}]. Required [5.1+]" ] - } - } else { - return [ 'supported': false, 'info': "Could not read installed virtualbox version:\n" + version ] - } - } catch (ExecException e) { - // Exec still throws this if it cannot find the command, regardless if ignoreExitValue is set. - // Swallow error. VirtualBox isn't installed. Don't halt the build here. - return [ 'supported' : false, 'info' : "Could not find virtualbox: " + e.message ] - } - } - - private void addVerifyInstallationTasks(Project project) { - createCheckVagrantVersionTask(project) - createCheckVirtualBoxVersionTask(project) - } - - private void createCheckVagrantVersionTask(Project project) { - project.tasks.create('vagrantCheckVersion') { - description 'Check the Vagrant version' - group 'Verification' - doLast { - if (project.rootProject.vagrantInstallation.supported == false) { - throw new InvalidUserDataException(project.rootProject.vagrantInstallation.info) - } - } - } - } - - private void createCheckVirtualBoxVersionTask(Project project) { - project.tasks.create('virtualboxCheckVersion') { - description 'Check the Virtualbox version' - group 'Verification' - doLast { - if (project.rootProject.virtualBoxInstallation.supported == false) { - throw new InvalidUserDataException(project.rootProject.virtualBoxInstallation.info) - } - } - } - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy deleted file mode 100644 index 3868e0417f4..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ /dev/null @@ -1,658 +0,0 @@ -package org.elasticsearch.gradle.vagrant - -import org.apache.tools.ant.taskdefs.condition.Os -import org.elasticsearch.gradle.BwcVersions -import org.elasticsearch.gradle.FileContentsTask -import org.elasticsearch.gradle.Jdk -import org.elasticsearch.gradle.JdkDownloadPlugin -import org.elasticsearch.gradle.LoggedExec -import org.elasticsearch.gradle.Version -import org.gradle.api.GradleException -import org.gradle.api.InvalidUserDataException -import org.gradle.api.NamedDomainObjectContainer -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.Task -import org.gradle.api.artifacts.dsl.RepositoryHandler -import org.gradle.api.execution.TaskExecutionAdapter -import org.gradle.api.internal.artifacts.dependencies.DefaultProjectDependency -import org.gradle.api.tasks.Copy -import org.gradle.api.tasks.Delete -import org.gradle.api.tasks.Exec -import org.gradle.api.tasks.StopExecutionException -import org.gradle.api.tasks.TaskState - -import java.nio.file.Paths - -import static java.util.Collections.unmodifiableList - -class VagrantTestPlugin implements Plugin { - - /** All Linux boxes that we test. These are all always supplied **/ - static final List LINUX_BOXES = unmodifiableList([ - 'centos-6', - 'centos-7', - 'debian-8', - 'debian-9', - 'fedora-28', - 'fedora-29', - 'oel-6', - 'oel-7', - 'opensuse-42', - /* TODO: need a real RHEL license now that it is out of beta 'rhel-8',*/ - 'sles-12', - 'ubuntu-1604', - 'ubuntu-1804' - ]) - - /** All Windows boxes that we test, which may or may not be supplied **/ - static final List WINDOWS_BOXES = unmodifiableList([ - 'windows-2012r2', - 'windows-2016' - ]) - - /** All boxes that we test, some of which may not be supplied **/ - static final List ALL_BOXES = unmodifiableList(LINUX_BOXES + WINDOWS_BOXES) - - /** Boxes used when sampling the tests **/ - static final List SAMPLE = unmodifiableList([ - 'centos-7', - 'ubuntu-1604' - ]) - - /** All distributions to bring into test VM, whether or not they are used **/ - static final List DISTRIBUTIONS = unmodifiableList([ - 'archives:linux-tar', - 'archives:oss-linux-tar', - 'archives:windows-zip', - 'archives:oss-windows-zip', - 'packages:rpm', - 'packages:oss-rpm', - 'packages:deb', - 'packages:oss-deb', - 'archives:no-jdk-linux-tar', - 'archives:oss-no-jdk-linux-tar', - 'archives:no-jdk-windows-zip', - 'archives:oss-no-jdk-windows-zip', - 'packages:no-jdk-rpm', - 'packages:oss-no-jdk-rpm', - 'packages:no-jdk-deb', - 'packages:oss-no-jdk-deb' - ]) - - /** Packages onboarded for upgrade tests **/ - static final List UPGRADE_FROM_ARCHIVES = unmodifiableList(['rpm', 'deb']) - - private static final PACKAGING_CONFIGURATION = 'packaging' - private static final PACKAGING_TEST_CONFIGURATION = 'packagingTest' - private static final BATS = 'bats' - private static final String BATS_TEST_COMMAND ="cd \$PACKAGING_ARCHIVES && sudo bats --tap \$BATS_TESTS/*.$BATS" - - /** Boxes that have been supplied and are available for testing **/ - List availableBoxes = [] - - /** extra env vars to pass to vagrant for box configuration **/ - Map vagrantBoxEnvVars = [:] - - private static final String GRADLE_JDK_VERSION = "12.0.1+12@69cfe15208a647278a19ef0990eea691" - private Jdk linuxGradleJdk; - private Jdk windowsGradleJdk; - - @Override - void apply(Project project) { - project.pluginManager.apply(JdkDownloadPlugin.class) - NamedDomainObjectContainer jdksContainer = (NamedDomainObjectContainer) project.getExtensions().getByName("jdks"); - linuxGradleJdk = jdksContainer.create("linux_gradle") { - version = GRADLE_JDK_VERSION - platform = "linux" - } - windowsGradleJdk = jdksContainer.create("windows_gradle") { - version = GRADLE_JDK_VERSION - platform = "windows" - } - - collectAvailableBoxes(project) - - // Creates the Vagrant extension for the project - project.extensions.create('esvagrant', VagrantPropertiesExtension, listSelectedBoxes(project)) - - // Add required repositories for packaging tests - configurePackagingArchiveRepositories(project) - - // Creates custom configurations for Bats testing files (and associated scripts and archives) - createPackagingConfiguration(project) - project.configurations.create(PACKAGING_TEST_CONFIGURATION) - - // Creates all the main Vagrant tasks - createVagrantTasks(project) - - if (project.extensions.esvagrant.boxes == null || project.extensions.esvagrant.boxes.size() == 0) { - throw new InvalidUserDataException('Must specify at least one vagrant box') - } - - for (String box : project.extensions.esvagrant.boxes) { - if (ALL_BOXES.contains(box) == false) { - throw new InvalidUserDataException("Vagrant box [${box}] is unknown to this plugin. Valid boxes are ${ALL_BOXES}") - } - - if (availableBoxes.contains(box) == false) { - throw new InvalidUserDataException("Vagrant box [${box}] is not available because an image is not supplied for it. " + - "Available boxes with supplied images are ${availableBoxes}") - } - } - - // Creates all tasks related to the Vagrant boxes - createVagrantBoxesTasks(project) - } - - /** - * Enumerate all the boxes that we know about and could possibly choose to test - */ - private void collectAvailableBoxes(Project project) { - // these images are hardcoded in the Vagrantfile and are always available - availableBoxes.addAll(LINUX_BOXES) - - // these images need to be provided at runtime - String windows_2012r2_box = project.getProperties().get('vagrant.windows-2012r2.id') - if (windows_2012r2_box != null && windows_2012r2_box.isEmpty() == false) { - availableBoxes.add('windows-2012r2') - vagrantBoxEnvVars['VAGRANT_WINDOWS_2012R2_BOX'] = windows_2012r2_box - } - - String windows_2016_box = project.getProperties().get('vagrant.windows-2016.id') - if (windows_2016_box != null && windows_2016_box.isEmpty() == false) { - availableBoxes.add('windows-2016') - vagrantBoxEnvVars['VAGRANT_WINDOWS_2016_BOX'] = windows_2016_box - } - } - - /** - * Enumerate all the boxes that we have chosen to test - */ - private static List listSelectedBoxes(Project project) { - String vagrantBoxes = project.getProperties().get('vagrant.boxes', 'sample') - switch (vagrantBoxes) { - case 'sample': - return SAMPLE - case 'linux-all': - return LINUX_BOXES - case 'windows-all': - return WINDOWS_BOXES - case 'all': - return ALL_BOXES - case '': - return [] - default: - return vagrantBoxes.split(',') - } - } - - private static void configurePackagingArchiveRepositories(Project project) { - RepositoryHandler repos = project.repositories - - repos.jcenter() // will have releases before 5.0.0 - - /* Setup a repository that tries to download from - https://artifacts.elastic.co/downloads/elasticsearch/[module]-[revision].[ext] - which should work for 5.0.0+. This isn't a real ivy repository but gradle - is fine with that */ - repos.ivy { - name "elasticsearch" - artifactPattern "https://artifacts.elastic.co/downloads/elasticsearch/[module]-[revision].[ext]" - } - } - - private static void createPackagingConfiguration(Project project) { - project.configurations.create(PACKAGING_CONFIGURATION) - - String upgradeFromVersionRaw = System.getProperty("tests.packaging.upgradeVersion"); - Version upgradeFromVersion - if (upgradeFromVersionRaw == null) { - String firstPartOfSeed = project.rootProject.testSeed.tokenize(':').get(0) - final long seed = Long.parseUnsignedLong(firstPartOfSeed, 16) - final def indexCompatVersions = project.bwcVersions.indexCompatible - upgradeFromVersion = indexCompatVersions[new Random(seed).nextInt(indexCompatVersions.size())] - } else { - upgradeFromVersion = Version.fromString(upgradeFromVersionRaw) - } - - List dependencies = new ArrayList<>() - DISTRIBUTIONS.each { - // Adds a dependency for the current version - dependencies.add(project.dependencies.project(path: ":distribution:${it}", configuration: 'default')) - } - - if (project.ext.bwc_tests_enabled) { - // The version of elasticsearch that we upgrade *from* - // we only add them as dependencies if the bwc tests are enabled, so we don't trigger builds otherwise - BwcVersions.UnreleasedVersionInfo unreleasedInfo = project.bwcVersions.unreleasedInfo(upgradeFromVersion) - if (unreleasedInfo != null) { - // handle snapshots pointing to bwc build - UPGRADE_FROM_ARCHIVES.each { - dependencies.add(project.dependencies.project( - path: "${unreleasedInfo.gradleProjectPath}", configuration: it)) - if (upgradeFromVersion.onOrAfter('6.3.0')) { - dependencies.add(project.dependencies.project( - path: "${unreleasedInfo.gradleProjectPath}", configuration: "oss-${it}")) - } - } - } else { - UPGRADE_FROM_ARCHIVES.each { - // The version of elasticsearch that we upgrade *from* - if (upgradeFromVersion.onOrAfter('7.0.0')) { - String arch = it == "rpm" ? "x86_64" : "amd64" - dependencies.add("downloads.${it}:elasticsearch:${upgradeFromVersion}-${arch}@${it}") - dependencies.add("downloads.${it}:elasticsearch-oss:${upgradeFromVersion}-${arch}@${it}") - } else { - dependencies.add("downloads.${it}:elasticsearch:${upgradeFromVersion}@${it}") - if (upgradeFromVersion.onOrAfter('6.3.0')) { - dependencies.add("downloads.${it}:elasticsearch-oss:${upgradeFromVersion}@${it}") - } - } - } - } - } else { - // Upgrade tests will go from current to current when the BWC tests are disabled to skip real BWC tests. - upgradeFromVersion = Version.fromString(project.version) - } - - for (Object dependency : dependencies) { - project.dependencies.add(PACKAGING_CONFIGURATION, dependency) - } - - project.extensions.esvagrant.upgradeFromVersion = upgradeFromVersion - } - - private static void createCleanTask(Project project) { - if (project.tasks.findByName('clean') == null) { - project.tasks.create('clean', Delete.class) { - description 'Clean the project build directory' - group 'Build' - delete project.buildDir - } - } - } - - private static void createStopTask(Project project) { - project.tasks.create('stop') { - description 'Stop any tasks from tests that still may be running' - group 'Verification' - } - } - - private static void createSmokeTestTask(Project project) { - project.tasks.create('vagrantSmokeTest') { - description 'Smoke test the specified vagrant boxes' - group 'Verification' - } - } - - private void createPrepareVagrantTestEnvTask(Project project) { - File packagingDir = new File(project.buildDir, PACKAGING_CONFIGURATION) - - File archivesDir = new File(packagingDir, 'archives') - Copy copyPackagingArchives = project.tasks.create('copyPackagingArchives', Copy) { - into archivesDir - from project.configurations[PACKAGING_CONFIGURATION] - } - - File testsDir = new File(packagingDir, 'tests') - Copy copyPackagingTests = project.tasks.create('copyPackagingTests', Copy) { - into testsDir - from project.configurations[PACKAGING_TEST_CONFIGURATION] - } - - Task createLinuxRunnerScript = project.tasks.create('createLinuxRunnerScript', FileContentsTask) { - dependsOn copyPackagingTests, linuxGradleJdk - file "${testsDir}/run-tests.sh" - contents """\ - if [ "\$#" -eq 0 ]; then - test_args=( "${-> project.extensions.esvagrant.testClass}" ) - else - test_args=( "\$@" ) - fi - - "${-> convertLinuxPath(project, linuxGradleJdk.toString()) }"/bin/java -cp "\$PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner "\${test_args[@]}" - """ - } - Task createWindowsRunnerScript = project.tasks.create('createWindowsRunnerScript', FileContentsTask) { - dependsOn copyPackagingTests, windowsGradleJdk - file "${testsDir}/run-tests.ps1" - // the use of $args rather than param() here is deliberate because the syntax for array (multivalued) parameters is likely - // a little trappy for those unfamiliar with powershell - contents """\ - try { - if (\$args.Count -eq 0) { - \$testArgs = @("${-> project.extensions.esvagrant.testClass}") - } else { - \$testArgs = \$args - } - & "${-> convertWindowsPath(project, windowsGradleJdk.toString()) }/bin/java" -cp "\$Env:PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner @testArgs - exit \$LASTEXITCODE - } catch { - # catch if we have a failure to even run the script at all above, equivalent to set -e, sort of - echo "\$_.Exception.Message" - exit 1 - } - """ - } - - Task createVersionFile = project.tasks.create('createVersionFile', FileContentsTask) { - dependsOn copyPackagingArchives - file "${archivesDir}/version" - contents project.version - } - - Task createUpgradeFromFile = project.tasks.create('createUpgradeFromFile', FileContentsTask) { - String version = project.extensions.esvagrant.upgradeFromVersion - if (project.bwcVersions.unreleased.contains(project.extensions.esvagrant.upgradeFromVersion)) { - version += "-SNAPSHOT" - } - dependsOn copyPackagingArchives - file "${archivesDir}/upgrade_from_version" - contents version - } - - Task createUpgradeIsOssFile = project.tasks.create('createUpgradeIsOssFile', FileContentsTask) { - dependsOn copyPackagingArchives - doFirst { - project.delete("${archivesDir}/upgrade_is_oss") - if (project.extensions.esvagrant.upgradeFromVersion.before('6.3.0')) { - throw new StopExecutionException("upgrade version is before 6.3.0") - } - } - file "${archivesDir}/upgrade_is_oss" - contents '' - } - - File batsDir = new File(packagingDir, BATS) - Copy copyBatsTests = project.tasks.create('copyBatsTests', Copy) { - into "${batsDir}/tests" - from { - "${project.extensions.esvagrant.batsDir}/tests" - } - } - - Copy copyBatsUtils = project.tasks.create('copyBatsUtils', Copy) { - into "${batsDir}/utils" - from { - "${project.extensions.esvagrant.batsDir}/utils" - } - } - - // Now we iterate over dependencies of the bats configuration. When a project dependency is found, - // we bring back its test files or test utils. - project.afterEvaluate { - project.configurations[PACKAGING_CONFIGURATION].dependencies - .findAll {it.targetConfiguration == PACKAGING_CONFIGURATION } - .each { d -> - if (d instanceof DefaultProjectDependency) { - DefaultProjectDependency externalBatsDependency = (DefaultProjectDependency) d - Project externalBatsProject = externalBatsDependency.dependencyProject - String externalBatsDir = externalBatsProject.extensions.esvagrant.batsDir - - if (project.extensions.esvagrant.inheritTests) { - copyBatsTests.from(externalBatsProject.files("${externalBatsDir}/tests")) - } - if (project.extensions.esvagrant.inheritTestUtils) { - copyBatsUtils.from(externalBatsProject.files("${externalBatsDir}/utils")) - } - } - } - } - - Task vagrantSetUpTask = project.tasks.create('setupPackagingTest') - vagrantSetUpTask.dependsOn( - 'vagrantCheckVersion', - copyPackagingArchives, - copyPackagingTests, - createLinuxRunnerScript, - createWindowsRunnerScript, - createVersionFile, - createUpgradeFromFile, - createUpgradeIsOssFile, - copyBatsTests, - copyBatsUtils - ) - } - - private static void createPackagingTestTask(Project project) { - project.tasks.create('packagingTest') { - group 'Verification' - description "Tests distribution installation on different platforms using vagrant. See TESTING.asciidoc for details." - dependsOn 'vagrantCheckVersion' - } - } - - private void createBoxListTasks(Project project) { - project.tasks.create('listAllBoxes') { - group 'Verification' - description 'List all vagrant boxes which can be tested by this plugin' - doLast { - println("All vagrant boxes supported by ${project.path}") - for (String box : ALL_BOXES) { - println(box) - } - } - dependsOn 'vagrantCheckVersion' - } - - project.tasks.create('listAvailableBoxes') { - group 'Verification' - description 'List all vagrant boxes which are available for testing' - doLast { - println("All vagrant boxes available to ${project.path}") - for (String box : availableBoxes) { - println(box) - } - } - dependsOn 'vagrantCheckVersion' - } - } - - private void createVagrantTasks(Project project) { - createCleanTask(project) - createStopTask(project) - createSmokeTestTask(project) - createPrepareVagrantTestEnvTask(project) - createPackagingTestTask(project) - createBoxListTasks(project) - } - - private void createVagrantBoxesTasks(Project project) { - assert project.extensions.esvagrant.boxes != null - - assert project.tasks.stop != null - Task stop = project.tasks.stop - - assert project.tasks.vagrantSmokeTest != null - Task vagrantSmokeTest = project.tasks.vagrantSmokeTest - - assert project.tasks.vagrantCheckVersion != null - Task vagrantCheckVersion = project.tasks.vagrantCheckVersion - - assert project.tasks.virtualboxCheckVersion != null - Task virtualboxCheckVersion = project.tasks.virtualboxCheckVersion - - assert project.tasks.setupPackagingTest != null - Task setupPackagingTest = project.tasks.setupPackagingTest - - assert project.tasks.packagingTest != null - Task packagingTest = project.tasks.packagingTest - - /* - * We always use the main project.rootDir as Vagrant's current working directory (VAGRANT_CWD) - * so that boxes are not duplicated for every Gradle project that use this VagrantTestPlugin. - */ - def vagrantEnvVars = [ - 'VAGRANT_CWD' : "${project.rootDir.absolutePath}", - 'VAGRANT_VAGRANTFILE' : 'Vagrantfile', - 'VAGRANT_PROJECT_DIR' : "${project.projectDir.absolutePath}" - ] - vagrantEnvVars.putAll(vagrantBoxEnvVars) - - // Each box gets it own set of tasks - for (String box : availableBoxes) { - String boxTask = box.capitalize().replace('-', '') - - // always add a halt task for all boxes, so clean makes sure they are all shutdown - Task halt = project.tasks.create("vagrant${boxTask}#halt", VagrantCommandTask) { - command 'halt' - boxName box - environmentVars vagrantEnvVars - } - stop.dependsOn(halt) - - Task update = project.tasks.create("vagrant${boxTask}#update", VagrantCommandTask) { - command 'box' - subcommand 'update' - boxName box - environmentVars vagrantEnvVars - dependsOn vagrantCheckVersion, virtualboxCheckVersion - } - update.mustRunAfter(setupPackagingTest) - - /* - * Destroying before every execution can be annoying while iterating on tests locally. Therefore, we provide a flag - * vagrant.destroy that defaults to true that can be used to control whether or not to destroy any test boxes before test - * execution. - */ - final String vagrantDestroyProperty = project.getProperties().get('vagrant.destroy', 'true') - boolean vagrantDestroy - if ("true".equals(vagrantDestroyProperty)) { - vagrantDestroy = true - } else if ("false".equals(vagrantDestroyProperty)) { - vagrantDestroy = false - } else { - throw new GradleException("[vagrant.destroy] must be [true] or [false] but was [" + vagrantDestroyProperty + "]") - } - /* - * Some versions of Vagrant will fail destroy if the box does not exist. Therefore we check if the box exists before attempting - * to destroy the box. - */ - final Task destroy = project.tasks.create("vagrant${boxTask}#destroy", LoggedExec) { - commandLine "bash", "-c", "vagrant status ${box} | grep -q \"${box}\\s\\+not created\" || vagrant destroy ${box} --force" - workingDir project.rootProject.rootDir - environment vagrantEnvVars - } - destroy.onlyIf { vagrantDestroy } - update.mustRunAfter(destroy) - - Task up = project.tasks.create("vagrant${boxTask}#up", VagrantCommandTask) { - command 'up' - boxName box - environmentVars vagrantEnvVars - /* We lock the provider to virtualbox because the Vagrantfile specifies - lots of boxes that only work properly in virtualbox. Virtualbox is - vagrant's default but its possible to change that default and folks do. - But the boxes that we use are unlikely to work properly with other - virtualization providers. Thus the lock. */ - args '--provision', '--provider', 'virtualbox' - /* It'd be possible to check if the box is already up here and output - SKIPPED but that would require running vagrant status which is slow! */ - dependsOn destroy, update - } - - Task smoke = project.tasks.create("vagrant${boxTask}#smoketest", Exec) { - environment vagrantEnvVars - dependsOn up - finalizedBy halt - } - vagrantSmokeTest.dependsOn(smoke) - if (LINUX_BOXES.contains(box)) { - smoke.commandLine = ['vagrant', 'ssh', box, '--command', - "set -o pipefail && echo 'Hello from ${project.path}' | sed -ue 's/^/ ${box}: /'"] - } else { - smoke.commandLine = ['vagrant', 'winrm', box, '--command', - "Write-Host ' ${box}: Hello from ${project.path}'"] - } - - if (LINUX_BOXES.contains(box)) { - Task batsPackagingTest = project.tasks.create("vagrant${boxTask}#batsPackagingTest", BatsOverVagrantTask) { - remoteCommand BATS_TEST_COMMAND - boxName box - environmentVars vagrantEnvVars - dependsOn up, setupPackagingTest - finalizedBy halt - } - - TaskExecutionAdapter batsPackagingReproListener = createReproListener(project, batsPackagingTest.path) - batsPackagingTest.doFirst { - project.gradle.addListener(batsPackagingReproListener) - } - batsPackagingTest.doLast { - project.gradle.removeListener(batsPackagingReproListener) - } - if (project.extensions.esvagrant.boxes.contains(box)) { - // these tests are temporarily disabled for suse boxes while we debug an issue - // https://github.com/elastic/elasticsearch/issues/30295 - if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { - packagingTest.dependsOn(batsPackagingTest) - } - } - } - - Task javaPackagingTest = project.tasks.create("vagrant${boxTask}#javaPackagingTest", VagrantCommandTask) { - boxName box - environmentVars vagrantEnvVars - dependsOn up, setupPackagingTest - finalizedBy halt - } - - // todo remove this onlyIf after all packaging tests are consolidated - javaPackagingTest.onlyIf { - project.extensions.esvagrant.testClass != null - } - - if (LINUX_BOXES.contains(box)) { - javaPackagingTest.command = 'ssh' - javaPackagingTest.args = ['--command', 'sudo bash "$PACKAGING_TESTS/run-tests.sh"'] - } else { - // powershell sessions run over winrm always run as administrator, whether --elevated is passed or not. however - // remote sessions have some restrictions on what they can do, such as impersonating another user (or the same user - // without administrator elevation), which we need to do for these tests. passing --elevated runs the session - // as a scheduled job locally on the vm as a true administrator to get around this limitation - // - // https://github.com/hashicorp/vagrant/blob/9c299a2a357fcf87f356bb9d56e18a037a53d138/plugins/communicators/winrm/communicator.rb#L195-L225 - // https://devops-collective-inc.gitbooks.io/secrets-of-powershell-remoting/content/manuscript/accessing-remote-computers.html - javaPackagingTest.command = 'winrm' - javaPackagingTest.args = ['--elevated', '--command', '& "$Env:PACKAGING_TESTS/run-tests.ps1"; exit $LASTEXITCODE'] - } - - TaskExecutionAdapter javaPackagingReproListener = createReproListener(project, javaPackagingTest.path) - javaPackagingTest.doFirst { - project.gradle.addListener(javaPackagingReproListener) - } - javaPackagingTest.doLast { - project.gradle.removeListener(javaPackagingReproListener) - } - if (project.extensions.esvagrant.boxes.contains(box)) { - // these tests are temporarily disabled for suse boxes while we debug an issue - // https://github.com/elastic/elasticsearch/issues/30295 - if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { - packagingTest.dependsOn(javaPackagingTest) - } - } - } - } - - private static TaskExecutionAdapter createReproListener(Project project, String reproTaskPath) { - return new TaskExecutionAdapter() { - @Override - void afterExecute(Task task, TaskState state) { - final String gradlew = Os.isFamily(Os.FAMILY_WINDOWS) ? "gradlew" : "./gradlew" - if (state.failure != null) { - println "REPRODUCE WITH: ${gradlew} \"${reproTaskPath}\" -Dtests.seed=${project.testSeed} " - } - } - } - } - - // convert the given path from an elasticsearch repo path to a VM path - private String convertLinuxPath(Project project, String path) { - return "/elasticsearch/" + project.rootDir.toPath().relativize(Paths.get(path)); - } - private String convertWindowsPath(Project project, String path) { - return "C:\\elasticsearch\\" + project.rootDir.toPath().relativize(Paths.get(path)).toString().replace('/', '\\'); - } -} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 5a3a4a277dd..d8c693b77d2 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -203,12 +203,16 @@ public class DistributionDownloadPlugin implements Plugin { String extension = distribution.getType().toString(); String classifier = "x86_64"; - if (distribution.getType() == Type.ARCHIVE) { + if (distribution.getVersion().before("7.0.0")) { + classifier = null; // no platform specific distros before 7.0 + } else if (distribution.getType() == Type.ARCHIVE) { extension = distribution.getPlatform() == Platform.WINDOWS ? "zip" : "tar.gz"; classifier = distribution.getPlatform() + "-" + classifier; + } else if (distribution.getType() == Type.DEB) { + classifier = "amd64"; } return FAKE_IVY_GROUP + ":elasticsearch" + (distribution.getFlavor() == Flavor.OSS ? "-oss:" : ":") - + distribution.getVersion() + ":" + classifier + "@" + extension; + + distribution.getVersion() + (classifier == null ? "" : ":" + classifier) + "@" + extension; } private static Dependency projectDependency(Project project, String projectPath, String projectConfig) { diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java index 53089f9b3d7..815da77a154 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java @@ -20,9 +20,7 @@ package org.elasticsearch.gradle; import org.gradle.api.Buildable; -import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; -import org.gradle.api.file.FileTree; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; import org.gradle.api.tasks.TaskDependency; @@ -30,9 +28,8 @@ import org.gradle.api.tasks.TaskDependency; import java.io.File; import java.util.Iterator; import java.util.Locale; -import java.util.concurrent.Callable; -public class ElasticsearchDistribution implements Buildable { +public class ElasticsearchDistribution implements Buildable, Iterable { public enum Platform { LINUX, @@ -93,10 +90,6 @@ public class ElasticsearchDistribution implements Buildable { return configuration.getBuildDependencies(); } - public FileTree getFileTree(Project project) { - return project.fileTree((Callable) configuration::getSingleFile); - } - @Override public String toString() { return configuration.getSingleFile().toString(); @@ -190,6 +183,16 @@ public class ElasticsearchDistribution implements Buildable { return configuration.getBuildDependencies(); } + @Override + public Iterator iterator() { + return configuration.iterator(); + } + + // TODO: remove this when distro tests are per distribution + public Configuration getConfiguration() { + return configuration; + } + // internal, make this distribution's configuration unmodifiable void finalizeValues() { diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/Jdk.java b/buildSrc/src/main/java/org/elasticsearch/gradle/Jdk.java index aa26f398e8b..91516e26af9 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/Jdk.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/Jdk.java @@ -83,9 +83,13 @@ public class Jdk implements Buildable, Iterable { return configuration; } + public String getPath() { + return configuration.getSingleFile().toString(); + } + @Override public String toString() { - return configuration.getSingleFile().toString(); + return getPath(); } @Override diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java index d4f0d9941da..7c57af701fe 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java @@ -48,13 +48,14 @@ import java.util.regex.Matcher; public class JdkDownloadPlugin implements Plugin { private static final String REPO_NAME_PREFIX = "jdk_repo_"; + private static final String CONTAINER_NAME = "jdks"; @Override public void apply(Project project) { NamedDomainObjectContainer jdksContainer = project.container(Jdk.class, name -> new Jdk(name, project) ); - project.getExtensions().add("jdks", jdksContainer); + project.getExtensions().add(CONTAINER_NAME, jdksContainer); project.afterEvaluate(p -> { for (Jdk jdk : jdksContainer) { @@ -82,6 +83,11 @@ public class JdkDownloadPlugin implements Plugin { }); } + @SuppressWarnings("unchecked") + public static NamedDomainObjectContainer getContainer(Project project) { + return (NamedDomainObjectContainer) project.getExtensions().getByName(CONTAINER_NAME); + } + private static void setupRootJdkDownload(Project rootProject, String platform, String version) { String extractTaskName = "extract" + capitalize(platform) + "Jdk" + version; // NOTE: this is *horrendous*, but seems to be the only way to check for the existence of a registered task diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperService.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperService.java index 436e7d2331f..596b1ca12db 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperService.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperService.java @@ -19,6 +19,7 @@ package org.elasticsearch.gradle; +import org.elasticsearch.gradle.tool.ClasspathUtils; import org.gradle.api.GradleException; import org.gradle.api.logging.Logger; import org.gradle.internal.jvm.Jvm; @@ -27,11 +28,16 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UncheckedIOException; +import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; +import java.util.regex.Matcher; +import java.util.regex.Pattern; public class ReaperService { + private static final String REAPER_CLASS = "org/elasticsearch/gradle/reaper/Reaper.class"; + private static final Pattern REAPER_JAR_PATH_PATTERN = Pattern.compile("file:(.*)!/" + REAPER_CLASS); private Logger logger; private Path buildDir; private Path inputDir; @@ -103,13 +109,7 @@ public class ReaperService { private synchronized void ensureReaperStarted() { if (reaperProcess == null) { try { - // copy the reaper jar - Path jarPath = buildDir.resolve("reaper").resolve("reaper.jar"); - Files.createDirectories(jarPath.getParent()); - InputStream jarInput = ReaperPlugin.class.getResourceAsStream("/META-INF/reaper.jar"); - try (OutputStream out = Files.newOutputStream(jarPath)) { - jarInput.transferTo(out); - } + Path jarPath = locateReaperJar(); // ensure the input directory exists Files.createDirectories(inputDir); @@ -134,6 +134,47 @@ public class ReaperService { } } + private Path locateReaperJar() { + if (ClasspathUtils.isElasticsearchProject()) { + // when running inside the Elasticsearch build just pull find the jar in the runtime classpath + URL main = this.getClass().getClassLoader().getResource(REAPER_CLASS); + String mainPath = main.getFile(); + Matcher matcher = REAPER_JAR_PATH_PATTERN.matcher(mainPath); + + if (matcher.matches()) { + String path = matcher.group(1); + return Path.of( + OS.conditional() + .onWindows(() -> path.substring(1)) + .onUnix(() -> path) + .supply() + ); + } else { + throw new RuntimeException("Unable to locate " + REAPER_CLASS + " on build classpath."); + } + } else { + // copy the reaper jar + Path jarPath = buildDir.resolve("reaper").resolve("reaper.jar"); + try { + Files.createDirectories(jarPath.getParent()); + } catch (IOException e) { + throw new UncheckedIOException("Unable to create reaper JAR output directory " + jarPath.getParent(), e); + } + + try ( + OutputStream out = Files.newOutputStream(jarPath); + InputStream jarInput = this.getClass().getResourceAsStream("/META-INF/reaper.jar"); + ) { + logger.info("Copying reaper.jar..."); + jarInput.transferTo(out); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + return jarPath; + } + } + private void ensureReaperAlive() { if (reaperProcess.isAlive() == false) { throw new IllegalStateException("Reaper process died unexpectedly! Check the log at " + logFile.toString()); diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy b/buildSrc/src/main/java/org/elasticsearch/gradle/Util.java similarity index 52% rename from buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy rename to buildSrc/src/main/java/org/elasticsearch/gradle/Util.java index 1d85d8584bb..ffc5ce353d2 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/Util.java @@ -16,30 +16,24 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.gradle.vagrant -import org.gradle.api.tasks.Input +package org.elasticsearch.gradle; -/** - * Runs bats over vagrant. Pretty much like running it using Exec but with a - * nicer output formatter. - */ -public class BatsOverVagrantTask extends VagrantCommandTask { +import org.gradle.api.GradleException; - @Input - Object remoteCommand +public class Util { - BatsOverVagrantTask() { - command = 'ssh' - } - - void setRemoteCommand(Object remoteCommand) { - this.remoteCommand = Objects.requireNonNull(remoteCommand) - setArgs((Iterable) ['--command', remoteCommand]) - } - - @Override - protected OutputStream createLoggerOutputStream() { - return new TapLoggerOutputStream(logger, getProgressLoggerFactory().newOperation(boxName).setDescription(boxName)); + public static boolean getBooleanProperty(String property, boolean defaultValue) { + String propertyValue = System.getProperty(property); + if (propertyValue == null) { + return defaultValue; + } + if ("true".equals(propertyValue)) { + return true; + } else if ("false".equals(propertyValue)) { + return false; + } else { + throw new GradleException("Sysprop [" + property + "] must be [true] or [false] but was [" + propertyValue + "]"); + } } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java index 1b6880277fe..f5178d529bb 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java @@ -23,6 +23,7 @@ import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static java.nio.charset.StandardCharsets.UTF_8; @@ -45,6 +46,8 @@ public class GlobalBuildInfoPlugin implements Plugin { File compilerJavaHome = findCompilerJavaHome(); File runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome); + Object gitRevisionResolver = createGitRevisionResolver(project); + final List javaVersions = new ArrayList<>(); for (int version = 8; version <= Integer.parseInt(minimumCompilerVersion.getMajorVersion()); version++) { if (System.getenv(getJavaHomeEnvVarName(Integer.toString(version))) != null) { @@ -77,7 +80,7 @@ public class GlobalBuildInfoPlugin implements Plugin { project.allprojects(p -> { // Make sure than any task execution generates and prints build info - p.getTasks().all(task -> { + p.getTasks().configureEach(task -> { if (task != generateTask && task != printTask) { task.dependsOn(printTask); } @@ -92,7 +95,7 @@ public class GlobalBuildInfoPlugin implements Plugin { ext.set("minimumCompilerVersion", minimumCompilerVersion); ext.set("minimumRuntimeVersion", minimumRuntimeVersion); ext.set("gradleJavaVersion", Jvm.current().getJavaVersion()); - ext.set("gitRevision", gitRevision(project)); + ext.set("gitRevision", gitRevisionResolver); ext.set("buildDate", ZonedDateTime.now(ZoneOffset.UTC)); }); } @@ -203,21 +206,35 @@ public class GlobalBuildInfoPlugin implements Plugin { return _defaultParallel; } - private String gitRevision(final Project project) { - final ByteArrayOutputStream stdout = new ByteArrayOutputStream(); - final ByteArrayOutputStream stderr = new ByteArrayOutputStream(); - final ExecResult result = project.exec(spec -> { - spec.setExecutable("git"); - spec.setArgs(Arrays.asList("rev-parse", "HEAD")); - spec.setStandardOutput(stdout); - spec.setErrorOutput(stderr); - spec.setIgnoreExitValue(true); - }); + private Object createGitRevisionResolver(final Project project) { + return new Object() { + private final AtomicReference gitRevision = new AtomicReference<>(); + + @Override + public String toString() { + if (gitRevision.get() == null) { + final ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + final ByteArrayOutputStream stderr = new ByteArrayOutputStream(); + final ExecResult result = project.exec(spec -> { + spec.setExecutable("git"); + spec.setArgs(Arrays.asList("rev-parse", "HEAD")); + spec.setStandardOutput(stdout); + spec.setErrorOutput(stderr); + spec.setIgnoreExitValue(true); + }); + + final String revision; + if (result.getExitValue() != 0) { + revision = "unknown"; + } else { + revision = stdout.toString(UTF_8).trim(); + } + this.gitRevision.compareAndSet(null, revision); + } + return gitRevision.get(); + } + }; - if (result.getExitValue() != 0) { - return "unknown"; - } - return stdout.toString(UTF_8).trim(); } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/UpdateShasTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/UpdateShasTask.java index db3148da696..c4dd1feb745 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/UpdateShasTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/UpdateShasTask.java @@ -23,6 +23,7 @@ import org.gradle.api.DefaultTask; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.TaskProvider; import java.io.File; import java.io.IOException; @@ -40,20 +41,20 @@ public class UpdateShasTask extends DefaultTask { private final Logger logger = Logging.getLogger(getClass()); /** The parent dependency licenses task to use configuration from */ - private DependencyLicensesTask parentTask; + private TaskProvider parentTask; public UpdateShasTask() { setDescription("Updates the sha files for the dependencyLicenses check"); - setOnlyIf(element -> parentTask.getLicensesDir() != null); + setOnlyIf(element -> parentTask.get().getLicensesDir() != null); } @TaskAction public void updateShas() throws NoSuchAlgorithmException, IOException { - Set shaFiles = parentTask.getShaFiles(); + Set shaFiles = parentTask.get().getShaFiles(); - for (File dependency : parentTask.getDependencies()) { + for (File dependency : parentTask.get().getDependencies()) { String jarName = dependency.getName(); - File shaFile = parentTask.getShaFile(jarName); + File shaFile = parentTask.get().getShaFile(jarName); if (shaFile.exists() == false) { createSha(dependency, jarName, shaFile); @@ -71,16 +72,16 @@ public class UpdateShasTask extends DefaultTask { private void createSha(File dependency, String jarName, File shaFile) throws IOException, NoSuchAlgorithmException { logger.lifecycle("Adding sha for " + jarName); - String sha = parentTask.getSha1(dependency); + String sha = parentTask.get().getSha1(dependency); Files.write(shaFile.toPath(), sha.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE); } public DependencyLicensesTask getParentTask() { - return parentTask; + return parentTask.get(); } - public void setParentTask(DependencyLicensesTask parentTask) { + public void setParentTask(TaskProvider parentTask) { this.parentTask = parentTask; } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/BatsTestTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/test/BatsTestTask.java new file mode 100644 index 00000000000..c3d79f44ae3 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/test/BatsTestTask.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.test; + +import org.gradle.api.DefaultTask; +import org.gradle.api.file.Directory; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputDirectory; +import org.gradle.api.tasks.TaskAction; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +public class BatsTestTask extends DefaultTask { + + private Directory testsDir; + private Directory utilsDir; + private Directory archivesDir; + private String packageName; + + @InputDirectory + public Directory getTestsDir() { + return testsDir; + } + + public void setTestsDir(Directory testsDir) { + this.testsDir = testsDir; + } + + @InputDirectory + public Directory getUtilsDir() { + return utilsDir; + } + + public void setUtilsDir(Directory utilsDir) { + this.utilsDir = utilsDir; + } + + @InputDirectory + public Directory getArchivesDir() { + return archivesDir; + } + + public void setArchivesDir(Directory archivesDir) { + this.archivesDir = archivesDir; + } + + @Input + public String getPackageName() { + return packageName; + } + + public void setPackageName(String packageName) { + this.packageName = packageName; + } + + @TaskAction + public void runBats() { + List command = new ArrayList<>(); + command.add("bats"); + command.add("--tap"); + command.addAll(testsDir.getAsFileTree().getFiles().stream() + .filter(f -> f.getName().endsWith(".bats")) + .sorted().collect(Collectors.toList())); + getProject().exec(spec -> { + spec.setWorkingDir(archivesDir.getAsFile()); + spec.environment(System.getenv()); + spec.environment("BATS_TESTS", testsDir.getAsFile().toString()); + spec.environment("BATS_UTILS", utilsDir.getAsFile().toString()); + spec.environment("PACKAGE_NAME", packageName); + spec.setCommandLine(command); + }); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/GradleDistroTestTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/test/GradleDistroTestTask.java new file mode 100644 index 00000000000..cfb960e4e36 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/test/GradleDistroTestTask.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.test; + +import org.elasticsearch.gradle.vagrant.VagrantShellTask; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.options.Option; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath; +import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath; + +/** + * Run a gradle task of the current build, within the configured vagrant VM. + */ +public class GradleDistroTestTask extends VagrantShellTask { + + private String taskName; + private String testClass; + private List extraArgs = new ArrayList<>(); + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + @Input + public String getTaskName() { + return taskName; + } + + @Option(option = "tests", description = "Sets test class or method name to be included, '*' is supported.") + public void setTestClass(String testClass) { + this.testClass = testClass; + } + + @Input + public List getExtraArgs() { + return extraArgs; + } + + public void extraArg(String arg) { + this.extraArgs.add(arg); + } + + @Override + protected List getWindowsScript() { + return getScript(true); + } + + @Override + protected List getLinuxScript() { + return getScript(false); + } + + private List getScript(boolean isWindows) { + String cacheDir = getProject().getBuildDir() + "/gradle-cache"; + StringBuilder line = new StringBuilder(); + line.append(isWindows ? "& .\\gradlew " : "./gradlew "); + line.append(taskName); + line.append(" --project-cache-dir "); + line.append(isWindows ? convertWindowsPath(getProject(), cacheDir) : convertLinuxPath(getProject(), cacheDir)); + line.append(" -S"); + line.append(" -D'org.gradle.logging.level'=" + getProject().getGradle().getStartParameter().getLogLevel()); + if (testClass != null) { + line.append(" --tests="); + line.append(testClass); + } + extraArgs.stream().map(s -> " " + s).forEach(line::append); + return Collections.singletonList(line.toString()); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/DefaultTestClustersTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/DefaultTestClustersTask.java new file mode 100644 index 00000000000..d03651591fd --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/DefaultTestClustersTask.java @@ -0,0 +1,17 @@ +package org.elasticsearch.gradle.testclusters; + +import org.gradle.api.DefaultTask; + +import java.util.Collection; +import java.util.HashSet; + +public class DefaultTestClustersTask extends DefaultTask implements TestClustersAware { + + private Collection clusters = new HashSet<>(); + + @Override + public Collection getClusters() { + return clusters; + } + +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index b436c800f1d..c20f0128f23 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -117,6 +117,10 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { return clusterName; } + public String getPath() { + return path; + } + @Override public void setVersion(String version) { nodes.all(each -> each.setVersion(version)); diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index a13e747f784..900a0f99ed9 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -216,6 +216,9 @@ public class ElasticsearchNode implements TestClusterConfiguration { public void plugin(URI plugin) { requireNonNull(plugin, "Plugin name can't be null"); checkFrozen(); + if (plugins.contains(plugin)) { + throw new TestClustersException("Plugin already configured for installation " + plugin); + } this.plugins.add(plugin); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/RestTestRunnerTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/RestTestRunnerTask.java similarity index 73% rename from buildSrc/src/main/java/org/elasticsearch/gradle/test/RestTestRunnerTask.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/RestTestRunnerTask.java index 95040af9809..833ce019d1d 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/test/RestTestRunnerTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/RestTestRunnerTask.java @@ -1,12 +1,11 @@ -package org.elasticsearch.gradle.test; +package org.elasticsearch.gradle.testclusters; -import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; import org.gradle.api.tasks.CacheableTask; import org.gradle.api.tasks.Nested; import org.gradle.api.tasks.testing.Test; -import java.util.ArrayList; import java.util.Collection; +import java.util.HashSet; import static org.elasticsearch.gradle.testclusters.TestDistribution.INTEG_TEST; @@ -16,9 +15,9 @@ import static org.elasticsearch.gradle.testclusters.TestDistribution.INTEG_TEST; * {@link Nested} inputs. */ @CacheableTask -public class RestTestRunnerTask extends Test { +public class RestTestRunnerTask extends Test implements TestClustersAware { - private Collection clusters = new ArrayList<>(); + private Collection clusters = new HashSet<>(); public RestTestRunnerTask() { super(); @@ -26,12 +25,15 @@ public class RestTestRunnerTask extends Test { task -> clusters.stream().flatMap(c -> c.getNodes().stream()).anyMatch(n -> n.getTestDistribution() != INTEG_TEST)); } + @Override + public int getMaxParallelForks() { + return 1; + } + @Nested + @Override public Collection getClusters() { return clusters; } - public void testCluster(ElasticsearchCluster cluster) { - this.clusters.add(cluster); - } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java new file mode 100644 index 00000000000..72703399c95 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java @@ -0,0 +1,26 @@ +package org.elasticsearch.gradle.testclusters; + +import org.gradle.api.Task; +import org.gradle.api.tasks.Nested; + +import java.util.Collection; + +interface TestClustersAware extends Task { + + @Nested + Collection getClusters(); + + default void useCluster(ElasticsearchCluster cluster) { + if (cluster.getPath().equals(getProject().getPath()) == false) { + throw new TestClustersException( + "Task " + getPath() + " can't use test cluster from" + + " another project " + cluster + ); + } + + for (ElasticsearchNode node : cluster.getNodes()) { + this.dependsOn(node.getDistribution().getExtracted()); + } + getClusters().add(cluster); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index cb17f7a4026..b6c8c39e2ed 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -18,32 +18,22 @@ */ package org.elasticsearch.gradle.testclusters; -import groovy.lang.Closure; import org.elasticsearch.gradle.DistributionDownloadPlugin; import org.elasticsearch.gradle.ElasticsearchDistribution; import org.elasticsearch.gradle.ReaperPlugin; import org.elasticsearch.gradle.ReaperService; -import org.elasticsearch.gradle.test.RestTestRunnerTask; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.execution.TaskActionListener; import org.gradle.api.execution.TaskExecutionListener; +import org.gradle.api.invocation.Gradle; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; -import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.tasks.TaskState; import java.io.File; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; public class TestClustersPlugin implements Plugin { @@ -51,12 +41,6 @@ public class TestClustersPlugin implements Plugin { public static final String EXTENSION_NAME = "testClusters"; private static final Logger logger = Logging.getLogger(TestClustersPlugin.class); - private static final String TESTCLUSTERS_INSPECT_FAILURE = "testclusters.inspect.failure"; - - private final Map> usedClusters = new HashMap<>(); - private final Map claimsInventory = new HashMap<>(); - private final Set runningClusters = new HashSet<>(); - private final Boolean allowClusterToSurvive = Boolean.valueOf(System.getProperty(TESTCLUSTERS_INSPECT_FAILURE, "false")); private ReaperService reaper; @@ -73,20 +57,22 @@ public class TestClustersPlugin implements Plugin { // provide a task to be able to list defined clusters. createListClustersTask(project, container); - // create DSL for tasks to mark clusters these use - createUseClusterTaskExtension(project, container); + if (project.getRootProject().getExtensions().findByType(TestClustersRegistry.class) == null) { + TestClustersRegistry registry = project.getRootProject().getExtensions() + .create("testClusters", TestClustersRegistry.class); - // When we know what tasks will run, we claim the clusters of those task to differentiate between clusters - // that are defined in the build script and the ones that will actually be used in this invocation of gradle - // we use this information to determine when the last task that required the cluster executed so that we can - // terminate the cluster right away and free up resources. - configureClaimClustersHook(project); + // When we know what tasks will run, we claim the clusters of those task to differentiate between clusters + // that are defined in the build script and the ones that will actually be used in this invocation of gradle + // we use this information to determine when the last task that required the cluster executed so that we can + // terminate the cluster right away and free up resources. + configureClaimClustersHook(project.getGradle(), registry); - // Before each task, we determine if a cluster needs to be started for that task. - configureStartClustersHook(project); + // Before each task, we determine if a cluster needs to be started for that task. + configureStartClustersHook(project.getGradle(), registry); - // After each task we determine if there are clusters that are no longer needed. - configureStopClustersHook(project); + // After each task we determine if there are clusters that are no longer needed. + configureStopClustersHook(project.getGradle(), registry); + } } private NamedDomainObjectContainer createTestClustersContainerExtension(Project project) { @@ -120,78 +106,28 @@ public class TestClustersPlugin implements Plugin { ); } - private void createUseClusterTaskExtension(Project project, NamedDomainObjectContainer container) { - // register an extension for all current and future tasks, so that any task can declare that it wants to use a - // specific cluster. - project.getTasks().all((Task task) -> - task.getExtensions().findByType(ExtraPropertiesExtension.class) - .set( - "useCluster", - new Closure(project, task) { - public void doCall(ElasticsearchCluster cluster) { - if (container.contains(cluster) == false) { - throw new TestClustersException( - "Task " + task.getPath() + " can't use test cluster from" + - " another project " + cluster - ); - } - Object thisObject = this.getThisObject(); - if (thisObject instanceof Task == false) { - throw new AssertionError("Expected " + thisObject + " to be an instance of " + - "Task, but got: " + thisObject.getClass()); - } - usedClusters.computeIfAbsent(task, k -> new ArrayList<>()).add(cluster); - for (ElasticsearchNode node : cluster.getNodes()) { - ((Task) thisObject).dependsOn(node.getDistribution().getExtracted()); - } - if (thisObject instanceof RestTestRunnerTask) { - ((RestTestRunnerTask) thisObject).testCluster(cluster); - } - } - }) - ); - } - - private void configureClaimClustersHook(Project project) { + private static void configureClaimClustersHook(Gradle gradle, TestClustersRegistry registry) { // Once we know all the tasks that need to execute, we claim all the clusters that belong to those and count the // claims so we'll know when it's safe to stop them. - project.getGradle().getTaskGraph().whenReady(taskExecutionGraph -> { - Set forExecution = taskExecutionGraph.getAllTasks().stream() - .map(Task::getPath) - .collect(Collectors.toSet()); - - usedClusters.forEach((task, listOfClusters) -> - listOfClusters.forEach(elasticsearchCluster -> { - if (forExecution.contains(task.getPath())) { - elasticsearchCluster.freeze(); - claimsInventory.put(elasticsearchCluster, claimsInventory.getOrDefault(elasticsearchCluster, 0) + 1); - } - })); - if (claimsInventory.isEmpty() == false) { - logger.info("Claims inventory: {}", claimsInventory); - } + gradle.getTaskGraph().whenReady(taskExecutionGraph -> { + taskExecutionGraph.getAllTasks().stream() + .filter(task -> task instanceof TestClustersAware) + .map(task -> (TestClustersAware) task) + .flatMap(task -> task.getClusters().stream()) + .forEach(registry::claimCluster); }); } - private void configureStartClustersHook(Project project) { - project.getGradle().addListener( + private static void configureStartClustersHook(Gradle gradle, TestClustersRegistry registry) { + gradle.addListener( new TaskActionListener() { @Override public void beforeActions(Task task) { + if (task instanceof TestClustersAware == false) { + return; + } // we only start the cluster before the actions, so we'll not start it if the task is up-to-date - List neededButNotRunning = usedClusters.getOrDefault( - task, - Collections.emptyList() - ) - .stream() - .filter(cluster -> runningClusters.contains(cluster) == false) - .collect(Collectors.toList()); - - neededButNotRunning - .forEach(elasticsearchCluster -> { - elasticsearchCluster.start(); - runningClusters.add(elasticsearchCluster); - }); + ((TestClustersAware) task).getClusters().forEach(registry::maybeStartCluster); } @Override public void afterActions(Task task) {} @@ -199,43 +135,18 @@ public class TestClustersPlugin implements Plugin { ); } - private void configureStopClustersHook(Project project) { - project.getGradle().addListener( + private static void configureStopClustersHook(Gradle gradle, TestClustersRegistry registry) { + gradle.addListener( new TaskExecutionListener() { @Override public void afterExecute(Task task, TaskState state) { - // always unclaim the cluster, even if _this_ task is up-to-date, as others might not have been - // and caused the cluster to start. - List clustersUsedByTask = usedClusters.getOrDefault( - task, - Collections.emptyList() - ); - if (clustersUsedByTask.isEmpty()) { + if (task instanceof TestClustersAware == false) { return; } - logger.info("Clusters were used, stopping and releasing permits"); - final int permitsToRelease; - if (state.getFailure() != null) { - // If the task fails, and other tasks use this cluster, the other task will likely never be - // executed at all, so we will never be called again to un-claim and terminate it. - clustersUsedByTask.forEach(cluster -> stopCluster(cluster, true)); - permitsToRelease = clustersUsedByTask.stream() - .map(cluster -> cluster.getNumberOfNodes()) - .reduce(Integer::sum).get(); - } else { - clustersUsedByTask.forEach( - cluster -> claimsInventory.put(cluster, claimsInventory.getOrDefault(cluster, 0) - 1) - ); - List stoppingClusers = claimsInventory.entrySet().stream() - .filter(entry -> entry.getValue() == 0) - .filter(entry -> runningClusters.contains(entry.getKey())) - .map(Map.Entry::getKey) - .collect(Collectors.toList()); - stoppingClusers.forEach(cluster -> { - stopCluster(cluster, false); - runningClusters.remove(cluster); - }); - } + // always unclaim the cluster, even if _this_ task is up-to-date, as others might not have been + // and caused the cluster to start. + ((TestClustersAware) task).getClusters() + .forEach(cluster -> registry.stopCluster(cluster, state.getFailure() != null)); } @Override public void beforeExecute(Task task) {} @@ -243,25 +154,5 @@ public class TestClustersPlugin implements Plugin { ); } - private void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { - if (allowClusterToSurvive) { - logger.info("Not stopping clusters, disabled by property"); - if (taskFailed) { - // task failed or this is the last one to stop - for (int i=1 ; ; i += i) { - logger.lifecycle( - "No more test clusters left to run, going to sleep because {} was set," + - " interrupt (^C) to stop clusters.", TESTCLUSTERS_INSPECT_FAILURE - ); - try { - Thread.sleep(1000 * i); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - return; - } - } - } - } - cluster.stop(taskFailed); - } + } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java new file mode 100644 index 00000000000..35f6fffc39b --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java @@ -0,0 +1,66 @@ +package org.elasticsearch.gradle.testclusters; + +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +public class TestClustersRegistry { + private static final Logger logger = Logging.getLogger(TestClustersRegistry.class); + private static final String TESTCLUSTERS_INSPECT_FAILURE = "testclusters.inspect.failure"; + private final Boolean allowClusterToSurvive = Boolean.valueOf(System.getProperty(TESTCLUSTERS_INSPECT_FAILURE, "false")); + private final Map claimsInventory = new HashMap<>(); + private final Set runningClusters = new HashSet<>(); + + public void claimCluster(ElasticsearchCluster cluster) { + cluster.freeze(); + claimsInventory.put(cluster, claimsInventory.getOrDefault(cluster, 0) + 1); + } + + public void maybeStartCluster(ElasticsearchCluster cluster) { + if (runningClusters.contains(cluster)) { + return; + } + runningClusters.add(cluster); + cluster.start(); + } + + public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { + if (taskFailed) { + // If the task fails, and other tasks use this cluster, the other task will likely never be + // executed at all, so we will never be called again to un-claim and terminate it. + if (allowClusterToSurvive) { + logger.info("Not stopping clusters, disabled by property"); + // task failed or this is the last one to stop + for (int i = 1; ; i += i) { + logger.lifecycle( + "No more test clusters left to run, going to sleep because {} was set," + + " interrupt (^C) to stop clusters.", TESTCLUSTERS_INSPECT_FAILURE + ); + try { + Thread.sleep(1000 * i); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return; + } + } + } else { + cluster.stop(false); + runningClusters.remove(cluster); + } + } else { + int currentClaims = claimsInventory.getOrDefault(cluster, 0) - 1; + claimsInventory.put(cluster, currentClaims); + + if (currentClaims <= 0 && runningClusters.contains(cluster)) { + cluster.stop(false); + runningClusters.remove(cluster); + } + } + } + + +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java index 29b0b5def20..760e5f60f1c 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java @@ -21,8 +21,12 @@ package org.elasticsearch.gradle.tool; import org.gradle.api.Action; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.UnknownTaskException; import org.gradle.api.plugins.JavaPluginConvention; import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.TaskContainer; +import org.gradle.api.tasks.TaskProvider; import java.util.Optional; @@ -37,6 +41,7 @@ public abstract class Boilerplate { .orElse(collection.create(name)); } + public static T maybeCreate(NamedDomainObjectContainer collection, String name, Action action) { return Optional.ofNullable(collection.findByName(name)) .orElseGet(() -> { @@ -47,4 +52,45 @@ public abstract class Boilerplate { } + public static TaskProvider maybeRegister(TaskContainer tasks, String name, Class clazz, Action action) { + try { + return tasks.named(name, clazz); + } catch (UnknownTaskException e) { + return tasks.register(name, clazz, action); + } + } + + public static void maybeConfigure(TaskContainer tasks, String name, Action config) { + TaskProvider task; + try { + task = tasks.named(name); + } catch (UnknownTaskException e) { + return; + } + + task.configure(config); + } + + public static void maybeConfigure( + TaskContainer tasks, String name, + Class type, + Action config + ) { + tasks.withType(type).configureEach(task -> { + if (task.getName().equals(name)) { + config.execute(task); + } + }); + } + + public static TaskProvider findByName(TaskContainer tasks, String name) { + TaskProvider task; + try { + task = tasks.named(name); + } catch (UnknownTaskException e) { + return null; + } + + return task; + } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.java b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/BatsProgressLogger.java similarity index 69% rename from buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/BatsProgressLogger.java index 353b2687ad1..8db4e704fb4 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/BatsProgressLogger.java @@ -19,12 +19,10 @@ package org.elasticsearch.gradle.vagrant; -import org.elasticsearch.gradle.LoggingOutputStream; -import org.gradle.api.GradleScriptException; import org.gradle.api.logging.Logger; -import org.gradle.internal.logging.progress.ProgressLogger; import java.util.Formatter; +import java.util.function.UnaryOperator; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -41,49 +39,43 @@ import java.util.regex.Pattern; * There is a Tap4j project but we can't use it because it wants to parse the * entire TAP stream at once and won't parse it stream-wise. */ -public class TapLoggerOutputStream extends LoggingOutputStream { +public class BatsProgressLogger implements UnaryOperator { private static final Pattern lineRegex = Pattern.compile("(?ok|not ok) \\d+(? # skip (?\\(.+\\))?)? \\[(?.+)\\] (?.+)"); + private static final Pattern startRegex = Pattern.compile("1..(\\d+)"); private final Logger logger; - private final ProgressLogger progressLogger; - private boolean isStarted = false; private int testsCompleted = 0; private int testsFailed = 0; private int testsSkipped = 0; private Integer testCount; private String countsFormat; - TapLoggerOutputStream(Logger logger, ProgressLogger progressLogger) { + public BatsProgressLogger(Logger logger) { this.logger = logger; - this.progressLogger = progressLogger; } @Override - public void logLine(String line) { - if (isStarted == false) { - progressLogger.started("started"); - isStarted = true; - } + public String apply(String line) { if (testCount == null) { - try { - int lastDot = line.lastIndexOf('.'); - testCount = Integer.parseInt(line.substring(lastDot + 1)); - int length = String.valueOf(testCount).length(); - String count = "%0" + length + "d"; - countsFormat = "[" + count +"|" + count + "|" + count + "/" + count + "]"; - return; - } catch (Exception e) { - throw new GradleScriptException("Error parsing first line of TAP stream!!", e); + Matcher m = startRegex.matcher(line); + if (m.matches() == false) { + // haven't reached start of bats test yet, pass through whatever we see + return line; } + testCount = Integer.parseInt(m.group(1)); + int length = String.valueOf(testCount).length(); + String count = "%0" + length + "d"; + countsFormat = "[" + count +"|" + count + "|" + count + "/" + count + "]"; + return null; } Matcher m = lineRegex.matcher(line); if (m.matches() == false) { /* These might be failure report lines or comments or whatever. Its hard to tell and it doesn't matter. */ logger.warn(line); - return; + return null; } boolean skipped = m.group("skip") != null; boolean success = skipped == false && m.group("status").equals("ok"); @@ -104,15 +96,9 @@ public class TapLoggerOutputStream extends LoggingOutputStream { } String counts = new Formatter().format(countsFormat, testsCompleted, testsFailed, testsSkipped, testCount).out().toString(); - progressLogger.progress("BATS " + counts + ", " + status + " [" + suiteName + "] " + testName); if (success == false) { logger.warn(line); } - } - - @Override - public void close() { - flush(); - progressLogger.completed(); + return "BATS " + counts + ", " + status + " [" + suiteName + "] " + testName; } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantBasePlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantBasePlugin.java new file mode 100644 index 00000000000..f77fe982f74 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantBasePlugin.java @@ -0,0 +1,147 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.vagrant; + +import org.elasticsearch.gradle.ReaperPlugin; +import org.elasticsearch.gradle.ReaperService; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.execution.TaskActionListener; +import org.gradle.api.execution.TaskExecutionListener; +import org.gradle.api.tasks.TaskState; + +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.function.Consumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class VagrantBasePlugin implements Plugin { + + @Override + public void apply(Project project) { + project.getRootProject().getPluginManager().apply(VagrantSetupCheckerPlugin.class); + project.getRootProject().getPluginManager().apply(VagrantManagerPlugin.class); + project.getRootProject().getPluginManager().apply(ReaperPlugin.class); + + ReaperService reaper = project.getRootProject().getExtensions().getByType(ReaperService.class); + VagrantExtension extension = project.getExtensions().create("vagrant", VagrantExtension.class, project); + VagrantMachine service = project.getExtensions().create("vagrantService", VagrantMachine.class, project, extension, reaper); + + project.getGradle().getTaskGraph().whenReady(graph -> + service.refs = graph.getAllTasks().stream() + .filter(t -> t instanceof VagrantShellTask) + .filter(t -> t.getProject() == project) + .count()); + } + + + /** + * Check vagrant and virtualbox versions, if any vagrant test tasks will be run. + */ + static class VagrantSetupCheckerPlugin implements Plugin { + + private static final Pattern VAGRANT_VERSION = Pattern.compile("Vagrant (\\d+\\.\\d+\\.\\d+)"); + private static final Pattern VIRTUAL_BOX_VERSION = Pattern.compile("(\\d+\\.\\d+)"); + + @Override + public void apply(Project project) { + if (project != project.getRootProject()) { + throw new IllegalArgumentException("VagrantSetupCheckerPlugin can only be applied to the root project of a build"); + } + + project.getGradle().getTaskGraph().whenReady(graph -> { + boolean needsVagrant = graph.getAllTasks().stream().anyMatch(t -> t instanceof VagrantShellTask); + if (needsVagrant) { + checkVersion(project, "vagrant", VAGRANT_VERSION, 1, 8, 6); + checkVersion(project, "vboxmanage", VIRTUAL_BOX_VERSION, 5, 1); + } + }); + } + + void checkVersion(Project project, String tool, Pattern versionRegex, int... minVersion) { + ByteArrayOutputStream pipe = new ByteArrayOutputStream(); + project.exec(spec -> { + spec.setCommandLine(tool, "--version"); + spec.setStandardOutput(pipe); + }); + String output = pipe.toString(StandardCharsets.UTF_8).trim(); + Matcher matcher = versionRegex.matcher(output); + if (matcher.find() == false) { + throw new IllegalStateException(tool + + " version output [" + output + "] did not match regex [" + versionRegex.pattern() + "]"); + } + + String version = matcher.group(1); + List versionParts = Stream.of(version.split("\\.")).map(Integer::parseInt).collect(Collectors.toList()); + for (int i = 0; i < minVersion.length; ++i) { + int found = versionParts.get(i); + if (found > minVersion[i]) { + break; // most significant version is good + } else if (found < minVersion[i]) { + throw new IllegalStateException("Unsupported version of " + tool + ". Found [" + version + "], expected [" + + Stream.of(minVersion).map(String::valueOf).collect(Collectors.joining(".")) + "+"); + } // else equal, so check next element + } + } + } + + /** + * Adds global hooks to manage destroying, starting and updating VMs. + */ + static class VagrantManagerPlugin implements Plugin, TaskActionListener, TaskExecutionListener { + + @Override + public void apply(Project project) { + if (project != project.getRootProject()) { + throw new IllegalArgumentException("VagrantManagerPlugin can only be applied to the root project of a build"); + } + project.getGradle().addListener(this); + } + + private void callIfVagrantTask(Task task, Consumer method) { + if (task instanceof VagrantShellTask) { + VagrantMachine service = task.getProject().getExtensions().getByType(VagrantMachine.class); + method.accept(service); + } + } + + @Override + public void beforeExecute(Task task) { /* nothing to do */} + + @Override + public void afterActions(Task task) { /* nothing to do */ } + + @Override + public void beforeActions(Task task) { + callIfVagrantTask(task, VagrantMachine::maybeStartVM); + } + + @Override + public void afterExecute(Task task, TaskState state) { + callIfVagrantTask(task, service -> service.maybeStopVM(state.getFailure() != null)); + } + } + +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantExtension.java b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantExtension.java new file mode 100644 index 00000000000..10ec03f7f10 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantExtension.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.vagrant; + +import org.gradle.api.Project; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.provider.MapProperty; +import org.gradle.api.provider.Property; +import org.gradle.api.tasks.Input; + +import java.io.File; +import java.util.Map; + +public class VagrantExtension { + + private final Property box; + private final MapProperty hostEnv; + private final MapProperty vmEnv; + private final RegularFileProperty vagrantfile; + private boolean isWindowsVM; + + public VagrantExtension(Project project) { + this.box = project.getObjects().property(String.class); + this.hostEnv = project.getObjects().mapProperty(String.class, Object.class); + this.vmEnv = project.getObjects().mapProperty(String.class, Object.class); + this.vagrantfile = project.getObjects().fileProperty(); + this.vagrantfile.convention(project.getRootProject().getLayout().getProjectDirectory().file("Vagrantfile")); + this.isWindowsVM = false; + } + + @Input + public String getBox() { + return box.get(); + } + + public void setBox(String box) { + // TODO: should verify this against the Vagrantfile, but would need to do so in afterEvaluate once vagrantfile is unmodifiable + this.box.set(box); + } + + @Input + public Map getHostEnv() { + return hostEnv.get(); + } + + public void hostEnv(String name, Object value) { + hostEnv.put(name, value); + } + + @Input + public Map getVmEnv() { + return vmEnv.get(); + } + + public void vmEnv(String name, Object value) { + vmEnv.put(name, value); + } + + @Input + public boolean isWindowsVM() { + return isWindowsVM; + } + + public void setIsWindowsVM(boolean isWindowsVM) { + this.isWindowsVM = isWindowsVM; + } + + @Input + public File getVagrantfile() { + return this.vagrantfile.get().getAsFile(); + } + + public void setVagrantfile(File file) { + vagrantfile.set(file); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantMachine.java b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantMachine.java new file mode 100644 index 00000000000..aa89658d9a9 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantMachine.java @@ -0,0 +1,210 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.vagrant; + +import org.apache.commons.io.output.TeeOutputStream; +import org.elasticsearch.gradle.LoggedExec; +import org.elasticsearch.gradle.LoggingOutputStream; +import org.elasticsearch.gradle.ReaperService; +import org.elasticsearch.gradle.Util; +import org.gradle.api.Action; +import org.gradle.api.Project; +import org.gradle.internal.logging.progress.ProgressLogger; +import org.gradle.internal.logging.progress.ProgressLoggerFactory; + +import javax.inject.Inject; +import java.io.File; +import java.io.OutputStream; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Objects; +import java.util.function.UnaryOperator; + +/** + * An helper to manage a vagrant box. + * + * This is created alongside a {@link VagrantExtension} for a project to manage starting and + * stopping a single vagrant box. + */ +public class VagrantMachine { + + private final Project project; + private final VagrantExtension extension; + private final ReaperService reaper; + // pkg private so plugin can set this after construction + long refs; + private boolean isVMStarted = false; + + public VagrantMachine(Project project, VagrantExtension extension, ReaperService reaper) { + this.project = project; + this.extension = extension; + this.reaper = reaper; + } + + @Inject + protected ProgressLoggerFactory getProgressLoggerFactory() { + throw new UnsupportedOperationException(); + } + + public void execute(Action action) { + VagrantExecSpec vagrantSpec = new VagrantExecSpec(); + action.execute(vagrantSpec); + + Objects.requireNonNull(vagrantSpec.command); + + LoggedExec.exec(project, execSpec -> { + execSpec.setExecutable("vagrant"); + File vagrantfile = extension.getVagrantfile(); + execSpec.setEnvironment(System.getenv()); // pass through env + execSpec.environment("VAGRANT_CWD", vagrantfile.getParentFile().toString()); + execSpec.environment("VAGRANT_VAGRANTFILE", vagrantfile.getName()); + execSpec.environment("VAGRANT_LOG", "debug"); + extension.getHostEnv().forEach(execSpec::environment); + + execSpec.args(vagrantSpec.command); + if (vagrantSpec.subcommand != null) { + execSpec.args(vagrantSpec.subcommand); + } + execSpec.args(extension.getBox()); + if (vagrantSpec.args != null) { + execSpec.args(Arrays.asList(vagrantSpec.args)); + } + + UnaryOperator progressHandler = vagrantSpec.progressHandler; + if (progressHandler == null) { + progressHandler = new VagrantProgressLogger("==> " + extension.getBox() + ": "); + } + OutputStream output = execSpec.getStandardOutput(); + // output from vagrant needs to be manually curated because --machine-readable isn't actually "readable" + OutputStream progressStream = new ProgressOutputStream(vagrantSpec.command, progressHandler); + execSpec.setStandardOutput(new TeeOutputStream(output, progressStream)); + }); + } + + // start the configuration VM if it hasn't been started yet + void maybeStartVM() { + if (isVMStarted) { + return; + } + + execute(spec -> { + spec.setCommand("box"); + spec.setSubcommand("update"); + }); + + // Destroying before every execution can be annoying while iterating on tests locally. Therefore, we provide a flag that defaults + // to true that can be used to control whether or not to destroy any test boxes before test execution. + boolean destroyVM = Util.getBooleanProperty("vagrant.destroy", true); + if (destroyVM) { + execute(spec -> { + spec.setCommand("destroy"); + spec.setArgs("--force"); + }); + } + + // register box to be shutdown if gradle dies + reaper.registerCommand(extension.getBox(), "vagrant", "halt", "-f", extension.getBox()); + + // We lock the provider to virtualbox because the Vagrantfile specifies lots of boxes that only work + // properly in virtualbox. Virtualbox is vagrant's default but its possible to change that default and folks do. + execute(spec -> { + spec.setCommand("up"); + spec.setArgs("--provision", "--provider", "virtualbox"); + }); + isVMStarted = true; + } + + // stops the VM if refs are down to 0, or force was called + void maybeStopVM(boolean force) { + assert refs >= 1; + this.refs--; + if ((refs == 0 || force) && isVMStarted) { + execute(spec -> spec.setCommand("halt")); + reaper.unregister(extension.getBox()); + } + } + + // convert the given path from an elasticsearch repo path to a VM path + public static String convertLinuxPath(Project project, String path) { + return "/elasticsearch/" + project.getRootDir().toPath().relativize(Paths.get(path)); + } + + public static String convertWindowsPath(Project project, String path) { + return "C:\\elasticsearch\\" + project.getRootDir().toPath().relativize(Paths.get(path)).toString().replace('/', '\\'); + } + + public static class VagrantExecSpec { + private String command; + private String subcommand; + private String[] args; + private UnaryOperator progressHandler; + + private VagrantExecSpec() {} + + public void setCommand(String command) { + this.command = command; + } + + public void setSubcommand(String subcommand) { + this.subcommand = subcommand; + } + + public void setArgs(String... args) { + this.args = args; + } + + /** + * A function to translate output from the vagrant command execution to the progress line. + * + * The function takes the current line of output from vagrant, and returns a new + * progress line, or {@code null} if there is no update. + */ + public void setProgressHandler(UnaryOperator progressHandler) { + this.progressHandler = progressHandler; + } + } + + private class ProgressOutputStream extends LoggingOutputStream { + + private ProgressLogger progressLogger; + private UnaryOperator progressHandler; + + ProgressOutputStream(String command, UnaryOperator progressHandler) { + this.progressHandler = progressHandler; + this.progressLogger = getProgressLoggerFactory().newOperation("vagrant"); + progressLogger.start(extension.getBox() + "> " + command, "hello"); + } + + @Override + protected void logLine(String line) { + String progress = progressHandler.apply(line); + if (progress != null) { + progressLogger.progress(progress); + } + System.out.println(line); + } + + @Override + public void close() { + progressLogger.completed(); + } + } + +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.java b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantProgressLogger.java similarity index 73% rename from buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.java rename to buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantProgressLogger.java index 2e4a6123556..d041ebbda92 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantProgressLogger.java @@ -19,30 +19,23 @@ package org.elasticsearch.gradle.vagrant; -import org.elasticsearch.gradle.LoggingOutputStream; -import org.gradle.internal.logging.progress.ProgressLogger; +import java.util.function.UnaryOperator; + +public class VagrantProgressLogger implements UnaryOperator { -public class VagrantLoggerOutputStream extends LoggingOutputStream { private static final String HEADING_PREFIX = "==> "; - private final ProgressLogger progressLogger; private final String squashedPrefix; - private boolean isStarted = false; private String lastLine = ""; - private boolean inProgressReport = false; private String heading = ""; + private boolean inProgressReport = false; - VagrantLoggerOutputStream(ProgressLogger progressLogger, String squashedPrefix) { - this.progressLogger = progressLogger; + public VagrantProgressLogger(String squashedPrefix) { this.squashedPrefix = squashedPrefix; } @Override - protected void logLine(String line) { - if (isStarted == false) { - progressLogger.started("started"); - isStarted = true; - } + public String apply(String line) { if (line.startsWith("\r\u001b")) { /* We don't want to try to be a full terminal emulator but we want to keep the escape sequences from leaking and catch _some_ of the @@ -51,7 +44,7 @@ public class VagrantLoggerOutputStream extends LoggingOutputStream { if ("[K".equals(line)) { inProgressReport = true; } - return; + return null; } if (line.startsWith(squashedPrefix)) { line = line.substring(squashedPrefix.length()); @@ -67,14 +60,8 @@ public class VagrantLoggerOutputStream extends LoggingOutputStream { inProgressReport = false; line = lastLine + line; } else { - return; + return null; } - progressLogger.progress(line); - } - - @Override - public void close() { - flush(); - progressLogger.completed(); + return line; } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantShellTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantShellTask.java new file mode 100644 index 00000000000..12561712ccc --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/vagrant/VagrantShellTask.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.vagrant; + +import org.gradle.api.DefaultTask; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.TaskAction; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.UnaryOperator; +import java.util.stream.Collectors; + +import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath; +import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath; + +/** + * A shell script to run within a vagrant VM. + * + * The script is run as root within the VM. + */ +public abstract class VagrantShellTask extends DefaultTask { + + private final VagrantExtension extension; + private final VagrantMachine service; + private UnaryOperator progressHandler = UnaryOperator.identity(); + + public VagrantShellTask() { + extension = getProject().getExtensions().findByType(VagrantExtension.class); + if (extension == null) { + throw new IllegalStateException("elasticsearch.vagrant-base must be applied to create " + getClass().getName()); + } + service = getProject().getExtensions().getByType(VagrantMachine.class); + } + + @Input + protected abstract List getWindowsScript(); + + @Input + protected abstract List getLinuxScript(); + + @Input + public UnaryOperator getProgressHandler() { + return progressHandler; + } + + public void setProgressHandler(UnaryOperator progressHandler) { + this.progressHandler = progressHandler; + } + + @TaskAction + public void runScript() { + String rootDir = getProject().getRootDir().toString(); + if (extension.isWindowsVM()) { + service.execute(spec -> { + spec.setCommand("winrm"); + + List script = new ArrayList<>(); + script.add("try {"); + script.add("cd " + convertWindowsPath(getProject(), rootDir)); + extension.getVmEnv().forEach((k, v) -> script.add("$Env:" + k + " = \"" + v + "\"")); + script.addAll(getWindowsScript().stream().map(s -> " " + s).collect(Collectors.toList())); + script.addAll(Arrays.asList( + " exit $LASTEXITCODE", + "} catch {", + // catch if we have a failure to even run the script at all above, equivalent to set -e, sort of + " echo $_.Exception.Message", + " exit 1", + "}")); + spec.setArgs("--elevated", "--command", String.join("\n", script)); + spec.setProgressHandler(progressHandler); + }); + } else { + service.execute(spec -> { + spec.setCommand("ssh"); + + List script = new ArrayList<>(); + script.add("sudo bash -c '"); // start inline bash script + script.add("pwd"); + script.add("cd " + convertLinuxPath(getProject(), rootDir)); + extension.getVmEnv().forEach((k, v) -> script.add("export " + k + "=" + v)); + script.addAll(getLinuxScript()); + script.add("'"); // end inline bash script + spec.setArgs("--command", String.join("\n", script)); + spec.setProgressHandler(progressHandler); + }); + } + } + + +} diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.distro-test.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.distro-test.properties new file mode 100644 index 00000000000..90b2914e452 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.distro-test.properties @@ -0,0 +1,20 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +implementation-class=org.elasticsearch.gradle.test.DistroTestPlugin diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.reaper.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.reaper.properties new file mode 100644 index 00000000000..46d0f45ac5e --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.reaper.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.ReaperPlugin \ No newline at end of file diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrant.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrant.properties deleted file mode 100644 index 844310fa9d7..00000000000 --- a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrant.properties +++ /dev/null @@ -1 +0,0 @@ -implementation-class=org.elasticsearch.gradle.vagrant.VagrantTestPlugin diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrantsupport.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrantsupport.properties deleted file mode 100644 index 73a3f412349..00000000000 --- a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrantsupport.properties +++ /dev/null @@ -1 +0,0 @@ -implementation-class=org.elasticsearch.gradle.vagrant.VagrantSupportPlugin \ No newline at end of file diff --git a/buildSrc/src/minimumRuntime/java/org/elasticsearch/gradle/LoggedExec.java b/buildSrc/src/minimumRuntime/java/org/elasticsearch/gradle/LoggedExec.java index 89031916160..343ead0bdf6 100644 --- a/buildSrc/src/minimumRuntime/java/org/elasticsearch/gradle/LoggedExec.java +++ b/buildSrc/src/minimumRuntime/java/org/elasticsearch/gradle/LoggedExec.java @@ -16,10 +16,13 @@ import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.OutputStream; +import java.io.UncheckedIOException; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.function.Consumer; import java.util.function.Function; +import java.util.regex.Pattern; /** * A wrapper around gradle's Exec task to capture output and log on error. @@ -28,7 +31,7 @@ import java.util.function.Function; public class LoggedExec extends Exec { private Consumer outputLogger; - + public LoggedExec() { if (getLogger().isInfoEnabled() == false) { @@ -98,6 +101,8 @@ public class LoggedExec extends Exec { return genericExec(project, project::javaexec, action); } + private static final Pattern NEWLINE = Pattern.compile(System.lineSeparator()); + private static ExecResult genericExec( Project project, Function,ExecResult> function, @@ -107,19 +112,20 @@ public class LoggedExec extends Exec { return function.apply(action); } ByteArrayOutputStream output = new ByteArrayOutputStream(); - ByteArrayOutputStream error = new ByteArrayOutputStream(); try { return function.apply(spec -> { spec.setStandardOutput(output); - spec.setErrorOutput(error); + spec.setErrorOutput(output); action.execute(spec); + try { + output.write(("Output for " + spec.getExecutable() + ":").getBytes(StandardCharsets.UTF_8)); + } catch (IOException e) { + throw new UncheckedIOException(e); + } }); } catch (Exception e) { try { - project.getLogger().error("Standard output:"); - project.getLogger().error(output.toString("UTF-8")); - project.getLogger().error("Standard error:"); - project.getLogger().error(error.toString("UTF-8")); + NEWLINE.splitAsStream(output.toString("UTF-8")).forEach(s -> project.getLogger().error("| " + s)); } catch (UnsupportedEncodingException ue) { throw new GradleException("Failed to read exec output", ue); } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginIT.java index d83de5f2173..5f728392313 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginIT.java @@ -63,12 +63,12 @@ public class DistributionDownloadPluginIT extends GradleIntegrationTestCase { Files.newInputStream(Paths.get("src/testKit/distribution-download/distribution/files/fake_elasticsearch.zip"))) { filebytes = stream.readAllBytes(); } - String urlPath = "/downloads/elasticsearch/elasticsearch-1.0.0-windows-x86_64.zip"; + String urlPath = "/downloads/elasticsearch/elasticsearch-7.0.0-windows-x86_64.zip"; wireMock.stubFor(head(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200))); wireMock.stubFor(get(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200).withBody(filebytes))); wireMock.start(); - assertExtractedDistro("1.0.0", "archive", "windows", null, null, + assertExtractedDistro("7.0.0", "archive", "windows", null, null, "tests.download_service", wireMock.baseUrl()); } catch (Exception e) { // for debugging diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/ReaperPluginIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/ReaperPluginIT.java new file mode 100644 index 00000000000..fc22f85d12d --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/ReaperPluginIT.java @@ -0,0 +1,21 @@ +package org.elasticsearch.gradle; + +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; +import org.gradle.testkit.runner.BuildResult; +import org.gradle.testkit.runner.GradleRunner; +import org.junit.Before; + +public class ReaperPluginIT extends GradleIntegrationTestCase { + private GradleRunner runner; + + @Before + public void setup() { + runner = getGradleRunner("reaper"); + } + + public void testCanLaunchReaper() { + BuildResult result = runner.withArguments(":launchReaper", "-S", "--info").build(); + assertTaskSuccessful(result, ":launchReaper"); + assertOutputContains(result.getOutput(), "Copying reaper.jar..."); + } +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/DependencyLicensesTaskTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/DependencyLicensesTaskTests.java index 397c5938fba..5492a7cfa96 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/DependencyLicensesTaskTests.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/DependencyLicensesTaskTests.java @@ -1,11 +1,13 @@ package org.elasticsearch.gradle.precommit; import org.elasticsearch.gradle.test.GradleUnitTestCase; +import org.gradle.api.Action; import org.gradle.api.GradleException; import org.gradle.api.Project; import org.gradle.api.artifacts.Dependency; import org.gradle.api.file.FileCollection; import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.tasks.TaskProvider; import org.gradle.testfixtures.ProjectBuilder; import org.junit.Before; import org.junit.Rule; @@ -31,7 +33,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { private UpdateShasTask updateShas; - private DependencyLicensesTask task; + private TaskProvider task; private Project project; @@ -51,7 +53,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { expectedException.expectMessage(containsString("exists, but there are no dependencies")); getLicensesDir(project).mkdir(); - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -60,12 +62,12 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { expectedException.expectMessage(containsString("does not exist, but there are dependencies")); project.getDependencies().add("compile", dependency); - task.checkDependencies(); + task.get().checkDependencies(); } @Test public void givenProjectWithoutLicensesDirNorDependenciesThenShouldReturnSilently() throws Exception { - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -78,7 +80,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { createFileIn(licensesDir, "groovy-all-NOTICE.txt", ""); project.getDependencies().add("compile", project.getDependencies().localGroovy()); - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -90,7 +92,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { getLicensesDir(project).mkdir(); updateShas.updateShas(); - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -103,7 +105,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { createFileIn(getLicensesDir(project), "groovy-all-LICENSE.txt", ""); updateShas.updateShas(); - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -113,7 +115,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { File licensesDir = getLicensesDir(project); createAllDefaultDependencyFiles(licensesDir, "groovy-all"); - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -127,7 +129,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { createAllDefaultDependencyFiles(licensesDir, "groovy-all"); createFileIn(licensesDir, "non-declared-LICENSE.txt", ""); - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -141,7 +143,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { createAllDefaultDependencyFiles(licensesDir, "groovy-all"); createFileIn(licensesDir, "non-declared-NOTICE.txt", ""); - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -155,7 +157,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { createAllDefaultDependencyFiles(licensesDir, "groovy-all"); createFileIn(licensesDir, "non-declared.sha1", ""); - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -175,7 +177,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { Files.write(groovySha, new byte[] { 1 }, StandardOpenOption.CREATE); - task.checkDependencies(); + task.get().checkDependencies(); } @Test @@ -189,8 +191,8 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { mappings.put("from", "groovy-all"); mappings.put("to", "groovy"); - task.mapping(mappings); - task.checkDependencies(); + task.get().mapping(mappings); + task.get().checkDependencies(); } @Test @@ -201,8 +203,8 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { createFileIn(licensesDir, "groovy-all-LICENSE.txt", ""); createFileIn(licensesDir, "groovy-all-NOTICE.txt", ""); - task.ignoreSha("groovy-all"); - task.checkDependencies(); + task.get().ignoreSha("groovy-all"); + task.get().checkDependencies(); } @Test @@ -210,7 +212,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { expectedException.expect(GradleException.class); expectedException.expectMessage(containsString("isn't a valid directory")); - task.getShaFiles(); + task.get().getShaFiles(); } private Project createProject() { @@ -244,7 +246,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { Files.write(file, content.getBytes(StandardCharsets.UTF_8)); } - private UpdateShasTask createUpdateShasTask(Project project, DependencyLicensesTask dependencyLicensesTask) { + private UpdateShasTask createUpdateShasTask(Project project, TaskProvider dependencyLicensesTask) { UpdateShasTask task = project.getTasks() .register("updateShas", UpdateShasTask.class) .get(); @@ -253,12 +255,15 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase { return task; } - private DependencyLicensesTask createDependencyLicensesTask(Project project) { - DependencyLicensesTask task = project.getTasks() - .register("dependencyLicenses", DependencyLicensesTask.class) - .get(); + private TaskProvider createDependencyLicensesTask(Project project) { + TaskProvider task = project.getTasks() + .register("dependencyLicenses", DependencyLicensesTask.class, new Action() { + @Override + public void execute(DependencyLicensesTask dependencyLicensesTask) { + dependencyLicensesTask.setDependencies(getDependencies(project)); + } + }); - task.setDependencies(getDependencies(project)); return task; } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/UpdateShasTaskTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/UpdateShasTaskTests.java index 62ac9600a83..1cf523e2e62 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/UpdateShasTaskTests.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/UpdateShasTaskTests.java @@ -2,11 +2,13 @@ package org.elasticsearch.gradle.precommit; import org.apache.commons.io.FileUtils; import org.elasticsearch.gradle.test.GradleUnitTestCase; +import org.gradle.api.Action; import org.gradle.api.GradleException; import org.gradle.api.Project; import org.gradle.api.artifacts.Dependency; import org.gradle.api.file.FileCollection; import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.tasks.TaskProvider; import org.gradle.testfixtures.ProjectBuilder; import org.junit.Before; import org.junit.Rule; @@ -125,12 +127,15 @@ public class UpdateShasTaskTests extends GradleUnitTestCase { return task; } - private DependencyLicensesTask createDependencyLicensesTask(Project project) { - DependencyLicensesTask task = project.getTasks() - .register("dependencyLicenses", DependencyLicensesTask.class) - .get(); + private TaskProvider createDependencyLicensesTask(Project project) { + TaskProvider task = project.getTasks() + .register("dependencyLicenses", DependencyLicensesTask.class, new Action() { + @Override + public void execute(DependencyLicensesTask dependencyLicensesTask) { + dependencyLicensesTask.setDependencies(getDependencies(project)); + } + }); - task.setDependencies(getDependencies(project)); return task; } diff --git a/buildSrc/src/testKit/reaper/build.gradle b/buildSrc/src/testKit/reaper/build.gradle new file mode 100644 index 00000000000..64e9f48545c --- /dev/null +++ b/buildSrc/src/testKit/reaper/build.gradle @@ -0,0 +1,11 @@ +plugins { + id 'elasticsearch.reaper' +} + +task launchReaper { + doLast { + def reaper = project.extensions.getByName('reaper') + reaper.registerCommand('test', 'true') + reaper.unregister('test') + } +} \ No newline at end of file diff --git a/buildSrc/src/testKit/reaper/settings.gradle b/buildSrc/src/testKit/reaper/settings.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/buildSrc/version.properties b/buildSrc/version.properties index b71fca759b6..9f0b9967697 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -20,7 +20,7 @@ slf4j = 1.6.2 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 4.5.1 -netty = 4.1.36.Final +netty = 4.1.38.Final joda = 2.10.2 # when updating this version, you need to ensure compatibility with: diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java index e8ed27715c1..0eceed67c4f 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java @@ -55,7 +55,7 @@ public class NoopPlugin extends Plugin implements ActionPlugin { IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { return Arrays.asList( - new RestNoopBulkAction(settings, restController), - new RestNoopSearchAction(settings, restController)); + new RestNoopBulkAction(restController), + new RestNoopSearchAction(restController)); } } diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java index a8317fec83a..a6609d7af42 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Requests; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.BaseRestHandler; @@ -45,9 +44,8 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; import static org.elasticsearch.rest.RestStatus.OK; public class RestNoopBulkAction extends BaseRestHandler { - public RestNoopBulkAction(Settings settings, RestController controller) { - super(settings); + public RestNoopBulkAction(RestController controller) { controller.registerHandler(POST, "/_noop_bulk", this); controller.registerHandler(PUT, "/_noop_bulk", this); controller.registerHandler(POST, "/{index}/_noop_bulk", this); diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/RestNoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/RestNoopSearchAction.java index 39c9510b8a2..bc9fbe00257 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/RestNoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/RestNoopSearchAction.java @@ -20,20 +20,17 @@ package org.elasticsearch.plugin.noop.action.search; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestNoopSearchAction extends BaseRestHandler { - public RestNoopSearchAction(Settings settings, RestController controller) { - super(settings); + + public RestNoopSearchAction(RestController controller) { controller.registerHandler(GET, "/_noop_search", this); controller.registerHandler(POST, "/_noop_search", this); controller.registerHandler(GET, "/{index}/_noop_search", this); @@ -48,7 +45,7 @@ public class RestNoopSearchAction extends BaseRestHandler { } @Override - public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { SearchRequest searchRequest = new SearchRequest(); return channel -> client.execute(NoopSearchAction.INSTANCE, searchRequest, new RestStatusToXContentListener<>(channel)); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformCheckpointStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformCheckpointStats.java index 2239fe9f46c..7c8d853768b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformCheckpointStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformCheckpointStats.java @@ -19,10 +19,8 @@ package org.elasticsearch.client.dataframe.transforms; -import org.elasticsearch.client.core.IndexerState; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -33,16 +31,14 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona public class DataFrameTransformCheckpointStats { public static final ParseField CHECKPOINT = new ParseField("checkpoint"); - public static final ParseField INDEXER_STATE = new ParseField("indexer_state"); public static final ParseField POSITION = new ParseField("position"); public static final ParseField CHECKPOINT_PROGRESS = new ParseField("checkpoint_progress"); public static final ParseField TIMESTAMP_MILLIS = new ParseField("timestamp_millis"); public static final ParseField TIME_UPPER_BOUND_MILLIS = new ParseField("time_upper_bound_millis"); - public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, null, 0L, 0L); + public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, 0L, 0L); private final long checkpoint; - private final IndexerState indexerState; private final DataFrameIndexerPosition position; private final DataFrameTransformProgress checkpointProgress; private final long timestampMillis; @@ -51,19 +47,16 @@ public class DataFrameTransformCheckpointStats { public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( "data_frame_transform_checkpoint_stats", true, args -> { long checkpoint = args[0] == null ? 0L : (Long) args[0]; - IndexerState indexerState = (IndexerState) args[1]; - DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[2]; - DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[3]; - long timestamp = args[4] == null ? 0L : (Long) args[4]; - long timeUpperBound = args[5] == null ? 0L : (Long) args[5]; + DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[1]; + DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[2]; + long timestamp = args[3] == null ? 0L : (Long) args[3]; + long timeUpperBound = args[4] == null ? 0L : (Long) args[4]; - return new DataFrameTransformCheckpointStats(checkpoint, indexerState, position, checkpointProgress, timestamp, timeUpperBound); + return new DataFrameTransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound); }); static { LENIENT_PARSER.declareLong(optionalConstructorArg(), CHECKPOINT); - LENIENT_PARSER.declareField(optionalConstructorArg(), p -> IndexerState.fromString(p.text()), INDEXER_STATE, - ObjectParser.ValueType.STRING); LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameIndexerPosition.PARSER, POSITION); LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameTransformProgress.PARSER, CHECKPOINT_PROGRESS); LENIENT_PARSER.declareLong(optionalConstructorArg(), TIMESTAMP_MILLIS); @@ -74,11 +67,10 @@ public class DataFrameTransformCheckpointStats { return LENIENT_PARSER.parse(parser, null); } - public DataFrameTransformCheckpointStats(final long checkpoint, final IndexerState indexerState, - final DataFrameIndexerPosition position, final DataFrameTransformProgress checkpointProgress, - final long timestampMillis, final long timeUpperBoundMillis) { + public DataFrameTransformCheckpointStats(final long checkpoint, final DataFrameIndexerPosition position, + final DataFrameTransformProgress checkpointProgress, final long timestampMillis, + final long timeUpperBoundMillis) { this.checkpoint = checkpoint; - this.indexerState = indexerState; this.position = position; this.checkpointProgress = checkpointProgress; this.timestampMillis = timestampMillis; @@ -89,10 +81,6 @@ public class DataFrameTransformCheckpointStats { return checkpoint; } - public IndexerState getIndexerState() { - return indexerState; - } - public DataFrameIndexerPosition getPosition() { return position; } @@ -111,7 +99,7 @@ public class DataFrameTransformCheckpointStats { @Override public int hashCode() { - return Objects.hash(checkpoint, indexerState, position, checkpointProgress, timestampMillis, timeUpperBoundMillis); + return Objects.hash(checkpoint, position, checkpointProgress, timestampMillis, timeUpperBoundMillis); } @Override @@ -127,7 +115,6 @@ public class DataFrameTransformCheckpointStats { DataFrameTransformCheckpointStats that = (DataFrameTransformCheckpointStats) other; return this.checkpoint == that.checkpoint - && Objects.equals(this.indexerState, that.indexerState) && Objects.equals(this.position, that.position) && Objects.equals(this.checkpointProgress, that.checkpointProgress) && this.timestampMillis == that.timestampMillis diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStats.java index 4d83d36c109..578bed0d37f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStats.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Locale; import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; @@ -33,20 +34,20 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona public class DataFrameTransformStats { public static final ParseField ID = new ParseField("id"); - public static final ParseField TASK_STATE_FIELD = new ParseField("task_state"); + public static final ParseField STATE_FIELD = new ParseField("state"); public static final ParseField REASON_FIELD = new ParseField("reason"); public static final ParseField NODE_FIELD = new ParseField("node"); public static final ParseField STATS_FIELD = new ParseField("stats"); public static final ParseField CHECKPOINTING_INFO_FIELD = new ParseField("checkpointing"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_transform_state_and_stats_info", true, - a -> new DataFrameTransformStats((String) a[0], (DataFrameTransformTaskState) a[1], (String) a[2], - (NodeAttributes) a[3], (DataFrameIndexerTransformStats) a[4], (DataFrameTransformCheckpointingInfo) a[5])); + "data_frame_transform_state_and_stats_info", true, + a -> new DataFrameTransformStats((String) a[0], (State) a[1], (String) a[2], + (NodeAttributes) a[3], (DataFrameIndexerTransformStats) a[4], (DataFrameTransformCheckpointingInfo) a[5])); static { PARSER.declareString(constructorArg(), ID); - PARSER.declareField(optionalConstructorArg(), p -> DataFrameTransformTaskState.fromString(p.text()), TASK_STATE_FIELD, + PARSER.declareField(optionalConstructorArg(), p -> State.fromString(p.text()), STATE_FIELD, ObjectParser.ValueType.STRING); PARSER.declareString(optionalConstructorArg(), REASON_FIELD); PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE_FIELD, ObjectParser.ValueType.OBJECT); @@ -61,16 +62,15 @@ public class DataFrameTransformStats { private final String id; private final String reason; - private final DataFrameTransformTaskState taskState; + private final State state; private final NodeAttributes node; private final DataFrameIndexerTransformStats indexerStats; private final DataFrameTransformCheckpointingInfo checkpointingInfo; - public DataFrameTransformStats(String id, DataFrameTransformTaskState taskState, String reason, NodeAttributes node, - DataFrameIndexerTransformStats stats, + public DataFrameTransformStats(String id, State state, String reason, NodeAttributes node, DataFrameIndexerTransformStats stats, DataFrameTransformCheckpointingInfo checkpointingInfo) { this.id = id; - this.taskState = taskState; + this.state = state; this.reason = reason; this.node = node; this.indexerStats = stats; @@ -81,8 +81,8 @@ public class DataFrameTransformStats { return id; } - public DataFrameTransformTaskState getTaskState() { - return taskState; + public State getState() { + return state; } public String getReason() { @@ -103,7 +103,7 @@ public class DataFrameTransformStats { @Override public int hashCode() { - return Objects.hash(id, taskState, reason, node, indexerStats, checkpointingInfo); + return Objects.hash(id, state, reason, node, indexerStats, checkpointingInfo); } @Override @@ -119,10 +119,23 @@ public class DataFrameTransformStats { DataFrameTransformStats that = (DataFrameTransformStats) other; return Objects.equals(this.id, that.id) - && Objects.equals(this.taskState, that.taskState) + && Objects.equals(this.state, that.state) && Objects.equals(this.reason, that.reason) && Objects.equals(this.node, that.node) && Objects.equals(this.indexerStats, that.indexerStats) && Objects.equals(this.checkpointingInfo, that.checkpointingInfo); } + + public enum State { + + STARTED, INDEXING, ABORTING, STOPPING, STOPPED, FAILED; + + public static State fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } + + public String value() { + return name().toLowerCase(Locale.ROOT); + } + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java index a6899930086..2e0175f4164 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java @@ -44,7 +44,6 @@ import org.elasticsearch.client.dataframe.transforms.DataFrameIndexerTransformSt import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfigUpdate; import org.elasticsearch.client.dataframe.transforms.DataFrameTransformStats; -import org.elasticsearch.client.dataframe.transforms.DataFrameTransformTaskState; import org.elasticsearch.client.dataframe.transforms.DestConfig; import org.elasticsearch.client.dataframe.transforms.SourceConfig; import org.elasticsearch.client.dataframe.transforms.TimeSyncConfig; @@ -306,10 +305,11 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id), client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); assertThat(statsResponse.getTransformsStats(), hasSize(1)); - DataFrameTransformTaskState taskState = statsResponse.getTransformsStats().get(0).getTaskState(); + DataFrameTransformStats.State taskState = statsResponse.getTransformsStats().get(0).getState(); // Since we are non-continuous, the transform could auto-stop between being started earlier and us gathering the statistics - assertThat(taskState, is(oneOf(DataFrameTransformTaskState.STARTED, DataFrameTransformTaskState.STOPPED))); + assertThat(taskState, oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING, + DataFrameTransformStats.State.STOPPING, DataFrameTransformStats.State.STOPPED)); StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, Boolean.TRUE, null); StopDataFrameTransformResponse stopResponse = @@ -321,8 +321,8 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { // Calling stop with wait_for_completion assures that we will be in the `STOPPED` state for the transform task statsResponse = execute(new GetDataFrameTransformStatsRequest(id), client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); - taskState = statsResponse.getTransformsStats().get(0).getTaskState(); - assertThat(taskState, is(DataFrameTransformTaskState.STOPPED)); + taskState = statsResponse.getTransformsStats().get(0).getState(); + assertThat(taskState, is(DataFrameTransformStats.State.STOPPED)); } @SuppressWarnings("unchecked") @@ -405,7 +405,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { assertEquals(1, statsResponse.getTransformsStats().size()); DataFrameTransformStats stats = statsResponse.getTransformsStats().get(0); - assertEquals(DataFrameTransformTaskState.STOPPED, stats.getTaskState()); + assertEquals(DataFrameTransformStats.State.STOPPED, stats.getState()); DataFrameIndexerTransformStats zeroIndexerStats = new DataFrameIndexerTransformStats(0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L); assertEquals(zeroIndexerStats, stats.getIndexerStats()); @@ -420,8 +420,8 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); DataFrameTransformStats stateAndStats = response.getTransformsStats().get(0); assertNotEquals(zeroIndexerStats, stateAndStats.getIndexerStats()); - assertThat(stateAndStats.getTaskState(), - is(oneOf(DataFrameTransformTaskState.STARTED, DataFrameTransformTaskState.STOPPED))); + assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING, + DataFrameTransformStats.State.STOPPING, DataFrameTransformStats.State.STOPPED)); assertThat(stateAndStats.getReason(), is(nullValue())); }); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformCheckpointStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformCheckpointStatsTests.java index 4d4ba5967e7..ec7e8b6422e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformCheckpointStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformCheckpointStatsTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.client.dataframe.transforms; -import org.elasticsearch.client.core.IndexerState; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; @@ -41,7 +40,6 @@ public class DataFrameTransformCheckpointStatsTests extends ESTestCase { public static DataFrameTransformCheckpointStats randomDataFrameTransformCheckpointStats() { return new DataFrameTransformCheckpointStats(randomLongBetween(1, 1_000_000), - randomBoolean() ? null : randomFrom(IndexerState.values()), randomBoolean() ? null : DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(), randomBoolean() ? null : DataFrameTransformProgressTests.randomInstance(), randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000)); @@ -50,9 +48,6 @@ public class DataFrameTransformCheckpointStatsTests extends ESTestCase { public static void toXContent(DataFrameTransformCheckpointStats stats, XContentBuilder builder) throws IOException { builder.startObject(); builder.field(DataFrameTransformCheckpointStats.CHECKPOINT.getPreferredName(), stats.getCheckpoint()); - if (stats.getIndexerState() != null) { - builder.field(DataFrameTransformCheckpointStats.INDEXER_STATE.getPreferredName(), stats.getIndexerState().value()); - } if (stats.getPosition() != null) { builder.field(DataFrameTransformCheckpointStats.POSITION.getPreferredName()); DataFrameIndexerPositionTests.toXContent(stats.getPosition(), builder); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStatsTests.java index af3bf53704a..ae252069c61 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStatsTests.java @@ -41,7 +41,7 @@ public class DataFrameTransformStatsTests extends ESTestCase { public static DataFrameTransformStats randomInstance() { return new DataFrameTransformStats(randomAlphaOfLength(10), - randomBoolean() ? null : randomFrom(DataFrameTransformTaskState.values()), + randomBoolean() ? null : randomFrom(DataFrameTransformStats.State.values()), randomBoolean() ? null : randomAlphaOfLength(100), randomBoolean() ? null : NodeAttributesTests.createRandom(), DataFrameIndexerTransformStatsTests.randomStats(), @@ -51,9 +51,9 @@ public class DataFrameTransformStatsTests extends ESTestCase { public static void toXContent(DataFrameTransformStats stats, XContentBuilder builder) throws IOException { builder.startObject(); builder.field(DataFrameTransformStats.ID.getPreferredName(), stats.getId()); - if (stats.getTaskState() != null) { - builder.field(DataFrameTransformStats.TASK_STATE_FIELD.getPreferredName(), - stats.getTaskState().value()); + if (stats.getState() != null) { + builder.field(DataFrameTransformStats.STATE_FIELD.getPreferredName(), + stats.getState().value()); } if (stats.getReason() != null) { builder.field(DataFrameTransformStats.REASON_FIELD.getPreferredName(), stats.getReason()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java index 0a41cfc85e9..e0976f040de 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.client.dataframe.transforms.hlrc; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.client.AbstractHlrcXContentTestCase; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpointStats; -import org.elasticsearch.xpack.core.indexing.IndexerState; import java.io.IOException; import java.util.function.Predicate; @@ -34,7 +33,6 @@ public class DataFrameTransformCheckpointStatsTests extends AbstractHlrcXContent public static DataFrameTransformCheckpointStats fromHlrc( org.elasticsearch.client.dataframe.transforms.DataFrameTransformCheckpointStats instance) { return new DataFrameTransformCheckpointStats(instance.getCheckpoint(), - (instance.getIndexerState() != null) ? IndexerState.fromString(instance.getIndexerState().value()) : null, DataFrameIndexerPositionTests.fromHlrc(instance.getPosition()), DataFrameTransformProgressTests.fromHlrc(instance.getCheckpointProgress()), instance.getTimestampMillis(), @@ -55,7 +53,6 @@ public class DataFrameTransformCheckpointStatsTests extends AbstractHlrcXContent public static DataFrameTransformCheckpointStats randomDataFrameTransformCheckpointStats() { return new DataFrameTransformCheckpointStats(randomLongBetween(1, 1_000_000), - randomBoolean() ? null : randomFrom(IndexerState.values()), DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(), randomBoolean() ? null : DataFrameTransformProgressTests.randomDataFrameTransformProgress(), randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/hlrc/DataFrameTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/hlrc/DataFrameTransformStatsTests.java index e5dd37fcd9d..e65ecf10d96 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/hlrc/DataFrameTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/hlrc/DataFrameTransformStatsTests.java @@ -26,9 +26,7 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheck import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpointingInfo; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformProgress; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStats; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState; import org.elasticsearch.xpack.core.dataframe.transforms.NodeAttributes; -import org.elasticsearch.xpack.core.indexing.IndexerState; import java.io.IOException; import java.util.HashMap; @@ -50,7 +48,7 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase - DataFrameTransformTaskState taskState = - stateAndStatsInfo.getTaskState(); // <2> - IndexerState indexerState = - stateAndStatsInfo.getCheckpointingInfo() - .getNext().getIndexerState(); // <3> - DataFrameIndexerTransformStats transformStats = - stateAndStatsInfo.getIndexerStats(); // <4> + DataFrameTransformStats.State state = + stats.getState(); // <2> + DataFrameIndexerTransformStats indexerStats = + stats.getIndexerStats(); // <3> DataFrameTransformProgress progress = - stateAndStatsInfo.getCheckpointingInfo() - .getNext().getCheckpointProgress(); // <5> + stats.getCheckpointingInfo() + .getNext().getCheckpointProgress(); // <4> NodeAttributes node = - stateAndStatsInfo.getNode(); // <6> + stats.getNode(); // <5> // end::get-data-frame-transform-stats-response - assertEquals(DataFrameTransformTaskState.STOPPED, taskState); - assertNotNull(transformStats); + assertEquals(DataFrameTransformStats.State.STOPPED, state); + assertNotNull(indexerStats); assertNull(progress); } { diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 16dc03dc83e..7bf973e7edc 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -1,8 +1,7 @@ import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.LoggedExec -import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin +import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.test.fixtures' @@ -58,7 +57,7 @@ project.ext { } from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) { - MavenFilteringHack.filter(it, expansions(oss, local)) + expand(expansions(oss, local)) } } } @@ -66,7 +65,9 @@ project.ext { void addCopyDockerContextTask(final boolean oss) { task(taskName("copy", oss, "DockerContext"), type: Sync) { - inputs.properties(expansions(oss, true)) + expansions(oss, true).each { k, v -> + inputs.property(k, { v.toString() }) + } into files(oss) with dockerBuildContext(oss, true) diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index 827471a7a26..82188eac69d 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -13,7 +13,7 @@ FROM centos:7 AS builder -ENV PATH /usr/share/elasticsearch/bin:$PATH +ENV PATH /usr/share/elasticsearch/bin:\$PATH RUN groupadd -g 1000 elasticsearch && \ adduser -u 1000 -g 1000 -d /usr/share/elasticsearch elasticsearch @@ -41,8 +41,8 @@ ENV ELASTIC_CONTAINER true RUN for iter in {1..10}; do yum update -y && \ yum install -y nc && \ - yum clean all && exit_code=0 && break || exit_code=$? && echo "yum error: retry $iter in 10s" && sleep 10; done; \ - (exit $exit_code) + yum clean all && exit_code=0 && break || exit_code=\$? && echo "yum error: retry \$iter in 10s" && sleep 10; done; \ + (exit \$exit_code) RUN groupadd -g 1000 elasticsearch && \ adduser -u 1000 -g 1000 -G 0 -d /usr/share/elasticsearch elasticsearch && \ @@ -57,7 +57,7 @@ COPY --from=builder --chown=1000:0 /usr/share/elasticsearch /usr/share/elasticse # REF: https://github.com/elastic/elasticsearch-docker/issues/171 RUN ln -sf /etc/pki/ca-trust/extracted/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts -ENV PATH /usr/share/elasticsearch/bin:$PATH +ENV PATH /usr/share/elasticsearch/bin:\$PATH COPY --chown=1000:0 bin/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java index d18ac681d75..c0b57c62dc0 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java @@ -64,6 +64,9 @@ final class JvmErgonomics { ergonomicChoices.add("-Dio.netty.allocator.type=pooled"); } } + if (systemProperties.containsKey("io.netty.allocator.numDirectArenas") == false) { + ergonomicChoices.add("-Dio.netty.allocator.numDirectArenas=0"); + } final long maxDirectMemorySize = extractMaxDirectMemorySize(finalJvmOptions); if (maxDirectMemorySize == 0) { ergonomicChoices.add("-XX:MaxDirectMemorySize=" + heapSize / 2); diff --git a/docs/java-rest/high-level/dataframe/get_data_frame_stats.asciidoc b/docs/java-rest/high-level/dataframe/get_data_frame_stats.asciidoc index 578ea808b9e..76223e61c1d 100644 --- a/docs/java-rest/high-level/dataframe/get_data_frame_stats.asciidoc +++ b/docs/java-rest/high-level/dataframe/get_data_frame_stats.asciidoc @@ -48,9 +48,8 @@ The returned +{response}+ contains the requested {dataframe-transform} statistic include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- <1> The response contains a list of `DataFrameTransformStats` objects -<2> The running state of the transform task e.g `started` -<3> The running state of the transform indexer e.g `started`, `indexing`, etc. -<4> The overall transform statistics recording the number of documents indexed etc. -<5> The progress of the current run in the transform. Supplies the number of docs left until the next checkpoint +<2> The running state of the transform, for example `started`, `indexing`, etc. +<3> The overall transform statistics recording the number of documents indexed etc. +<4> The progress of the current run in the transform. Supplies the number of docs left until the next checkpoint and the total number of docs expected. -<6> The assigned node information if the task is currently assigned to a node and running. +<5> The assigned node information if the task is currently assigned to a node and running. diff --git a/docs/reference/cat/plugins.asciidoc b/docs/reference/cat/plugins.asciidoc index 6e9f4571698..17a3a1ae6d4 100644 --- a/docs/reference/cat/plugins.asciidoc +++ b/docs/reference/cat/plugins.asciidoc @@ -1,7 +1,35 @@ [[cat-plugins]] === cat plugins -The `plugins` command provides a view per node of running plugins. This information *spans nodes*. +Returns a list of plugins running on each node of a cluster. + + +[[cat-plugins-tasks-api-request]] +==== {api-request-title} + +`GET /_cat/plugins` + + +[[cat-plugins-tasks-api-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=http-format] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-h] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=help] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=local] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-s] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-v] + + +[[cat-plugins-api-example]] +==== {api-examples-title} [source,js] ------------------------------------------------------------------------------ @@ -9,7 +37,7 @@ GET /_cat/plugins?v&s=component&h=name,component,version,description ------------------------------------------------------------------------------ // CONSOLE -Might look like: +The API returns the following response: ["source","txt",subs="attributes,callouts"] ------------------------------------------------------------------------------ @@ -31,6 +59,4 @@ U7321H6 mapper-size {version_qualified} The Mapper Size plugin allow U7321H6 store-smb {version_qualified} The Store SMB plugin adds support for SMB stores. U7321H6 transport-nio {version_qualified} The nio transport. ------------------------------------------------------------------------------ -// TESTRESPONSE[s/([.()])/\\$1/ s/U7321H6/.+/ non_json] - -We can tell quickly how many plugins per node we have and which versions. +// TESTRESPONSE[s/([.()])/\\$1/ s/U7321H6/.+/ non_json] \ No newline at end of file diff --git a/docs/reference/cat/recovery.asciidoc b/docs/reference/cat/recovery.asciidoc index 4f7f153a155..ab7f6fbe7f1 100644 --- a/docs/reference/cat/recovery.asciidoc +++ b/docs/reference/cat/recovery.asciidoc @@ -1,16 +1,61 @@ [[cat-recovery]] === cat recovery -The `recovery` command is a view of index shard recoveries, both on-going and previously -completed. It is a more compact view of the JSON <> API. +Returns information about ongoing and completed index shard recoveries, similar +to the <> API. -A recovery event occurs anytime an index shard moves to a different node in the cluster. -This can happen during a snapshot recovery, a change in replication level, node failure, or -on node startup. This last type is called a local store recovery and is the normal -way for shards to be loaded from disk when a node starts up. -As an example, here is what the recovery state of a cluster may look like when there -are no shards in transit from one node to another: +[[cat-recovery-api-request]] +==== {api-request-title} + +`GET /_cat/recovery/{index}` + + +[[cat-recovery-api-desc]] +==== {api-description-title} + +The cat recovery API returns information about index shard recoveries, both +ongoing and completed. It is a more compact view of the JSON +<> API. + +A recovery event occurs anytime an index shard moves to a different node in the +cluster. This can happen during a snapshot recovery, a change in replication +level, node failure, or on node startup. This last type is called a local store +recovery and is the normal way for shards to be loaded from disk when a node +starts up. + + +[[cat-recovery-path-params]] +==== {api-path-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=index] + + +[[cat-recovery-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=bytes] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=http-format] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-h] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=help] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=local] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-s] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-v] + + +[[cat-recovery-api-example]] +==== {api-examples-title} + +[[cat-recovery-api-ex-dead]] +===== Example with no ongoing recoveries [source,js] ---------------------------------------------------------------------------- @@ -19,7 +64,7 @@ GET _cat/recovery?v // CONSOLE // TEST[setup:twitter] -The response of this request will be something like: +The API returns the following response: [source,txt] --------------------------------------------------------------------------- @@ -32,12 +77,15 @@ twitter 0 13ms store done n/a n/a 127.0.0.1 node-0 n // TESTRESPONSE[s/13ms/[0-9.]+m?s/] // TESTRESPONSE[s/13/\\d+/ non_json] -In the above case, the source and target nodes are the same because the recovery -type was store, i.e. they were read from local storage on node start. +In this example response, the source and target nodes are the same because the +recovery type is `store`, meaning they were read from local storage on node +start. -Now let's see what a live recovery looks like. By increasing the replica count -of our index and bringing another node online to host the replicas, we can see -what a live shard recovery looks like. +[[cat-recovery-api-ex-live]] +===== Example with a live shard recovery + +By increasing the replica count of an index and bringing another node online to +host the replicas, you can retrieve information about an ongoing recovery. [source,js] ---------------------------------------------------------------------------- @@ -46,7 +94,7 @@ GET _cat/recovery?v&h=i,s,t,ty,st,shost,thost,f,fp,b,bp // CONSOLE // TEST[setup:twitter] -This will return a line like: +The API returns the following response: [source,txt] ---------------------------------------------------------------------------- @@ -59,13 +107,16 @@ twitter 0 1252ms peer done 192.168.1.1 192.168.1.2 0 100.0% 0 100.0% // TESTRESPONSE[s/100.0%/0.0%/] // TESTRESPONSE[s/1252ms/[0-9.]+m?s/ non_json] -We can see in the above listing that our thw twitter shard was recovered from another node. -Notice that the recovery type is shown as `peer`. The files and bytes copied are -real-time measurements. +In this example response, the recovery type is `peer`, meaning the shard +recovered from another node. The returned files and bytes are real-time +measurements. -Finally, let's see what a snapshot recovery looks like. Assuming I have previously -made a backup of my index, I can restore it using the <> -API. +[[cat-recovery-api-ex-snapshot]] +===== Example with a snapshot recovery + +You can restore backups of an index using the <> API. You can use the cat recovery API retrieve information about a +snapshot recovery. [source,js] -------------------------------------------------------------------------------- @@ -74,11 +125,11 @@ GET _cat/recovery?v&h=i,s,t,ty,st,rep,snap,f,fp,b,bp // CONSOLE // TEST[skip:no need to execute snapshot/restore here] -This will show a recovery of type snapshot in the response +The API returns the following response with a recovery type of `snapshot`: [source,txt] -------------------------------------------------------------------------------- i s t ty st rep snap f fp b bp twitter 0 1978ms snapshot done twitter snap_1 79 8.0% 12086 9.0% -------------------------------------------------------------------------------- -// TESTRESPONSE[non_json] +// TESTRESPONSE[non_json] \ No newline at end of file diff --git a/docs/reference/cat/repositories.asciidoc b/docs/reference/cat/repositories.asciidoc index a0a4263aa47..1f4ed6a0d49 100644 --- a/docs/reference/cat/repositories.asciidoc +++ b/docs/reference/cat/repositories.asciidoc @@ -1,8 +1,35 @@ [[cat-repositories]] === cat repositories -The `repositories` command shows the snapshot repositories registered in the -cluster. For example: +Returns the <> for a cluster. + + +[[cat-repositories-api-request]] +==== {api-request-title} + +`GET /_cat/repositories` + + +[[cat-repositories-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=http-format] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-h] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=help] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=local] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-s] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-v] + + +[[cat-repositories-api-example]] +==== {api-examples-title} [source,js] -------------------------------------------------- @@ -11,7 +38,7 @@ GET /_cat/repositories?v // CONSOLE // TEST[s/^/PUT \/_snapshot\/repo1\n{"type": "fs", "settings": {"location": "repo\/1"}}\n/] -might looks like: +The API returns the following response: [source,txt] -------------------------------------------------- @@ -20,5 +47,3 @@ repo1 fs repo2 s3 -------------------------------------------------- // TESTRESPONSE[s/\nrepo2 s3// non_json] - -We can quickly see which repositories are registered and their type. diff --git a/docs/reference/cat/segments.asciidoc b/docs/reference/cat/segments.asciidoc index bab89d6a67c..577c781eeb9 100644 --- a/docs/reference/cat/segments.asciidoc +++ b/docs/reference/cat/segments.asciidoc @@ -1,9 +1,109 @@ [[cat-segments]] === cat segments -The `segments` command provides low level information about the segments -in the shards of an index. It provides information similar to the -link:indices-segments.html[_segments] endpoint. For example: +Returns low-level information about the https://lucene.apache.org/core/[Lucene] +segments in index shards, similar to the <> +API. + +[[cat-segments-api-request]] +==== {api-request-title} + +`GET /_cat/segments/{index}` + + +[[cat-segments-path-params]] +==== {api-path-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=index] + + +[[cat-segments-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=bytes] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=http-format] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-h] ++ +-- +If you do not specify which columns to include, the API returns the default +columns in the order listed below. If you explicitly specify one or more +columns, it only returns the specified columns. + +Valid columns are: + +`index`, `i`, `idx`:: +(Default) Name of the index, such as `twitter`. + +`shard`, `s`, `sh`:: +(Default) Name of the shard. + +`prirep`, `p`, `pr`, `primaryOrReplica`:: +(Default) Shard type. Returned values are `primary` or `replica`. + +`ip`:: +(Default) IP address of the segment's shard, such as `127.0.1.1`. + +`segment`:: +(Default) Name of the segment, such as `_0`. The segment name is derived from +the segment generation and used internally to create file names in the directory +of the shard. + +`generation`:: +(Default) Generation number, such as `0`. {es} increments this generation number +for each segment written. {es} then uses this number to derive the segment name. + +`docs.count`:: +(Default) Number of non-deleted documents in the segment, such as `25`. This +number is based on Lucene documents and may include documents from +<> fields. + +`docs.deleted`:: +(Default) Number of deleted documents in the segment, such as `0`. This number +is based on Lucene documents. {es} reclaims the disk space of deleted Lucene +documents when a segment is merged. + +`size`:: +(Default) Disk space used by the segment, such as `50kb`. + +`size.memory`:: +(Default) Bytes of segment data stored in memory for efficient search, such as +`1264`. + +`committed`:: +(Default) If `true`, the segment is committed to disk. Segments committed to +disk would survive a hard reboot. ++ +If `false`, the data from uncommitted segments is also stored in the transaction +log. {es} replays those changes on the next start. + +`searchable`:: +(Default) If `true`, the segment is searchable. ++ +If `false`, likely means the segment is written to disk but has not been +<>. + +`version`:: +(Default) Version of Lucene used to write the segment. + +`compound`:: +(Default) If `true`, the segment is stored in a compound file. This means Lucene +merged all files from the segment in a single file to save file descriptors. + +`id`:: +ID of the node, such as `k0zy`. +-- + +include::{docdir}/rest-api/common-parms.asciidoc[tag=help] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-s] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-v] + + +[[cat-shards-api-example]] +==== {api-examples-title} [source,js] -------------------------------------------------- @@ -12,7 +112,7 @@ GET /_cat/segments?v // CONSOLE // TEST[s/^/PUT \/test\/test\/1?refresh\n{"test":"test"}\nPUT \/test1\/test\/1?refresh\n{"test":"test"}\n/] -might look like: +The API returns the following response: ["source","txt",subs="attributes,callouts"] -------------------------------------------------- @@ -21,53 +121,3 @@ test 0 p 127.0.0.1 _0 0 1 0 3kb test1 0 p 127.0.0.1 _0 0 1 0 3kb 2042 false true {lucene_version} true -------------------------------------------------- // TESTRESPONSE[s/3kb/\\d+(\\.\\d+)?[mk]?b/ s/2042/\\d+/ non_json] - -The output shows information about index names and shard numbers in the first -two columns. - -If you only want to get information about segments in one particular index, -you can add the index name in the URL, for example `/_cat/segments/test`. Also, -several indexes can be queried like `/_cat/segments/test,test1` - - -The following columns provide additional monitoring information: - -prirep:: Whether this segment belongs to a primary or replica shard. - -ip:: The ip address of the segment's shard. - -segment:: A segment name, derived from the segment generation. The name - is internally used to generate the file names in the directory - of the shard this segment belongs to. - -generation:: The generation number is incremented with each segment that is written. - The name of the segment is derived from this generation number. - -docs.count:: The number of non-deleted documents that are stored in this segment. - Note that these are Lucene documents, so the count will include hidden - documents (e.g. from nested types). - -docs.deleted:: The number of deleted documents that are stored in this segment. - It is perfectly fine if this number is greater than 0, space is - going to be reclaimed when this segment gets merged. - -size:: The amount of disk space that this segment uses. - -size.memory:: Segments store some data into memory in order to be searchable efficiently. - This column shows the number of bytes in memory that are used. - -committed:: Whether the segment has been sync'ed on disk. Segments that are - committed would survive a hard reboot. No need to worry in case - of false, the data from uncommitted segments is also stored in - the transaction log so that Elasticsearch is able to replay - changes on the next start. - -searchable:: True if the segment is searchable. A value of false would most - likely mean that the segment has been written to disk but no - refresh occurred since then to make it searchable. - -version:: The version of Lucene that has been used to write this segment. - -compound:: Whether the segment is stored in a compound file. When true, this - means that Lucene merged all files from the segment in a single - one in order to save file descriptors. diff --git a/docs/reference/cat/snapshots.asciidoc b/docs/reference/cat/snapshots.asciidoc index af960e3604e..b7c6b0a35a6 100644 --- a/docs/reference/cat/snapshots.asciidoc +++ b/docs/reference/cat/snapshots.asciidoc @@ -1,9 +1,110 @@ [[cat-snapshots]] === cat snapshots -The `snapshots` command shows all snapshots that belong to a specific repository. -To find a list of available repositories to query, the command `/_cat/repositories` can be used. -Querying the snapshots of a repository named `repo1` then looks as follows. +Returns information about the <> stored in one or +more repositories. A snapshot is a backup of an index or running {es} cluster. + + +[[cat-snapshots-api-request]] +==== {api-request-title} + +`GET /_cat/snapshots/{repository}` + + +[[cat-snapshots-path-params]] +==== {api-path-parms-title} + +`{repository}`:: ++ +-- +(Optional, string) Comma-separated list of snapshot repositories used to limit +the request. Accepts wildcard expressions. `_all` returns all repositories. + +If any repository fails during the request, {es} returns an error. +-- + + +[[cat-snapshots-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=http-format] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-h] ++ +-- +If you do not specify which columns to include, the API returns the default +columns in the order listed below. If you explicitly specify one or more +columns, it only returns the specified columns. + +Valid columns are: + +`id`, `snapshot`:: +(Default) ID of the snapshot, such as `snap1`. + +`repository`, `re`, `repo`:: +(Default) Name of the repository, such as `repo1`. + +`status`, `s`:: +(Default) State of the snapshot process. Returned values are: ++ +* `FAILED`: The snapshot process failed. +* `INCOMPATIBLE`: The snapshot process is incompatible with the current cluster +version. +* `IN_PROGRESS`: The snapshot process started but has not completed. +* `PARTIAL`: The snapshot process completed with a partial success. +* `SUCCESS`: The snapshot process completed with a full success. + +`start_epoch`, `ste`, `startEpoch`:: +(Default) https://en.wikipedia.org/wiki/Unix_time[Unix `epoch` time] at which +the snapshot process started. + +`start_time`, `sti`, `startTime`:: +(Default) `HH:MM:SS` time at which the snapshot process started. + +`end_epoch`, `ete`, `endEpoch`:: +(Default) https://en.wikipedia.org/wiki/Unix_time[Unix `epoch` time] at which +the snapshot process ended. + +`end_time`, `eti`, `endTime`:: +(Default) `HH:MM:SS` time at which the snapshot process ended. + +`duration`, `dur`:: +(Default) Time it took the snapshot process to complete in <>. + +`indices`, `i`:: +(Default) Number of indices in the snapshot. + +`successful_shards`, `ss`:: +(Default) Number of successful shards in the snapshot. + +`failed_shards`, `fs`:: +(Default) Number of failed shards in the snapshot. + +`total_shards`, `ts`:: +(Default) Total number of shards in the snapshot. + +`reason, `r`:: +Reason for any snapshot failures. +-- + +include::{docdir}/rest-api/common-parms.asciidoc[tag=help] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=local] + +`ignore_unavailable`:: +(Optional, boolean) If `true`, the response does not include information from +unavailable snapshots. Defaults to `false`. + +include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-s] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-v] + + +[[cat-snapshots-api-example]] +==== {api-examples-title} [source,js] -------------------------------------------------- @@ -14,7 +115,7 @@ GET /_cat/snapshots/repo1?v&s=id // TEST[s/^/PUT \/_snapshot\/repo1\/snap2?wait_for_completion=true\n/] // TEST[s/^/PUT \/_snapshot\/repo1\n{"type": "fs", "settings": {"location": "repo\/1"}}\n/] -Which looks like: +The API returns the following response: [source,txt] -------------------------------------------------- @@ -28,7 +129,3 @@ snap2 SUCCESS 1445634298 23:04:58 1445634672 23:11:12 6.2m 2 // TESTRESPONSE[s/2 10 0 10/\\d+ \\d+ \\d+ \\d+/] // TESTRESPONSE[non_json] -Each snapshot contains information about when it was started and stopped. -Start and stop timestamps are available in two formats. -The `HH:MM:SS` output is simply for quick human consumption. -The epoch time retains more information, including date, and is machine sortable if the snapshot process spans days. diff --git a/docs/reference/cat/templates.asciidoc b/docs/reference/cat/templates.asciidoc index 6a6a810c404..700edb08dd3 100644 --- a/docs/reference/cat/templates.asciidoc +++ b/docs/reference/cat/templates.asciidoc @@ -1,7 +1,45 @@ [[cat-templates]] === cat templates -The `templates` command provides information about existing templates. +Returns information about <> in a cluster. +You can use index templates to apply <> +and <> to new indices at creation. + + +[[cat-templates-api-request]] +==== {api-request-title} + +`GET /_cat/templates/{template_name}` + + +[[cat-templates-path-params]] +==== {api-path-parms-title} + +`{template_name}`:: +(Optional, string) Comma-separated list of index template names used to limit +the request. Accepts wildcard expressions. + + +[[cat-templates-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=http-format] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-h] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=help] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=local] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-s] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-v] + + +[[cat-templates-api-example]] +==== {api-examples-title} [source,js] -------------------------------------------------- @@ -19,7 +57,7 @@ GET /_cat/templates?v&s=name // templates. // 2. Create some templates to expect in the response. -which looks like +The API returns the following response: [source,txt] -------------------------------------------------- @@ -29,10 +67,3 @@ template1 [tea*] 1 template2 [teak*] 2 7 -------------------------------------------------- // TESTRESPONSE[s/\*/\\*/ s/\[/\\[/ s/\]/\\]/ non_json] - -The output shows that there are three existing templates, -with template2 having a version value. - -The endpoint also supports giving a template name or pattern in the url -to filter the results, for example `/_cat/templates/template*` or -`/_cat/templates/template0`. diff --git a/docs/reference/cat/thread_pool.asciidoc b/docs/reference/cat/thread_pool.asciidoc index 5440bc4e3ac..cea2b5e4d49 100644 --- a/docs/reference/cat/thread_pool.asciidoc +++ b/docs/reference/cat/thread_pool.asciidoc @@ -1,8 +1,119 @@ [[cat-thread-pool]] === cat thread pool -The `thread_pool` command shows cluster wide thread pool statistics per node. By default the active, queue and rejected -statistics are returned for all thread pools. +Returns thread pool statistics for each node in a cluster. Returned information +includes all <> and custom thread +pools. + + +[[cat-thread-pool-api-request]] +==== {api-request-title} + +`GET /_cat/thread_pool/{thread_pool}` + +[[cat-thread-pool-path-params]] +==== {api-path-parms-title} + +`{thread_pool}`:: +(Optional, string) Comma-separated list of thread pool names used to limit the +request. Accepts wildcard expressions. + + +[[cat-thread-pool-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=http-format] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-h] ++ +-- +If you do not specify which columns to include, the API returns the default +columns in the order listed below. If you explicitly specify one or more +columns, it only returns the specified columns. + +Valid columns are: + +`node_name`:: +(Default) Node name, such as `I8hydUG`. + +`name`:: +(Default) Name of the thread pool, such as `analyze` or `generic`. + +`active`, `a`:: +(Default) Number of active threads in the current thread pool. + +`queue`,`q`:: +(Default) Number of tasks in the queue for the current thread pool. + +`rejected`, `r`:: +(Default) Number of tasks rejected by the thread pool executor. + +`completed`, `c`:: +Number of tasks completed by the thread pool executor. + +`core`, `cr`:: +Configured core number of active threads allowed in the current thread pool. + +`ephemeral_id`,`eid`:: +Ephemeral node ID. + +`host`, `h`:: +Hostname for the current node. + +`ip`, `i`:: +IP address for the current node. + +`keep_alive`, `k`:: +Configured keep alive time for threads. + +`largest`, `l`:: +Highest number of active threads in the current thread pool. + +`max`, `mx`:: +Configured maximum number of active threads allowed in the current thread pool. + +`node_id`, `id`:: +ID of the node, such as `k0zy`. + +`pid`, `p`:: +Process ID of the running node. + +`pool_size`, `psz`:: +Number of threads in the current thread pool. + +`port`, `po`:: +Bound transport port for the current node. + +`queue_size`, `qs`:: +Maximum number of tasks permitted in the queue for the current thread pool. + +`size`, `sz`:: +Configured fixed number of active threads allowed in the current thread pool. + +`type`, `t`:: +Type of thread pool. Returned values are `fixed` or `scaling`. + +-- + +include::{docdir}/rest-api/common-parms.asciidoc[tag=help] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=local] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-s] + +`size`:: +(Optional, <>) Multiplier used to display quantities. + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-v] + + +[[cat-thread-pool-api-example]] +==== {api-examples-title} + +[[cat-thread-pool-api-ex-default]] +===== Example with default columns [source,js] -------------------------------------------------- @@ -10,7 +121,7 @@ GET /_cat/thread_pool -------------------------------------------------- // CONSOLE -Which looks like: +The API returns the following response: [source,txt] -------------------------------------------------- @@ -33,66 +144,13 @@ node-0 write 0 0 0 // know how many there will be and we just want to assert that there are // numbers in the response, not *which* numbers are there. -The first column is the node name -[source,txt] --------------------------------------------------- -node_name -node-0 --------------------------------------------------- +[[cat-thread-pool-api-ex-headings]] +===== Example with explicit columns -The second column is the thread pool name -[source,txt] --------------------------------------------------- -name -analyze -ccr (default distro only) -fetch_shard_started -fetch_shard_store -flush -force_merge -generic -get -listener -management -ml_autodetect (default distro only) -ml_datafeed (default distro only) -ml_utility (default distro only) -refresh -rollup_indexing (default distro only) -search -security-token-key (default distro only) -snapshot -warmer -watcher (default distro only) -write --------------------------------------------------- - - -The next three columns show the active, queue, and rejected statistics for each thread pool - -[source,txt] --------------------------------------------------- -active queue rejected - 0 0 0 - 0 0 0 - 0 0 0 - 0 0 0 - 0 0 0 - 0 0 0 - 0 0 0 - 0 0 0 - 0 0 0 - 0 0 0 - 1 0 0 - 0 0 0 - 0 0 0 - 0 0 0 - 0 0 0 --------------------------------------------------- - -The cat thread pool API accepts a `thread_pool_patterns` URL parameter for specifying a -comma-separated list of regular expressions to match thread pool names. +The following API request returns the `id`, `name`, `active`, `rejected`, and +`completed` columns. The request limits returned information to the `generic` +thread pool. [source,js] -------------------------------------------------- @@ -100,7 +158,7 @@ GET /_cat/thread_pool/generic?v&h=id,name,active,rejected,completed -------------------------------------------------- // CONSOLE -which looks like: +The API returns the following response: [source,txt] -------------------------------------------------- @@ -109,46 +167,3 @@ id name active rejected completed -------------------------------------------------- // TESTRESPONSE[s/0EWUhXeBQtaVGlexUeVwMg/[\\w-]+/ s/\d+/\\d+/ non_json] -Here the host columns and the active, rejected and completed suggest thread pool statistics are displayed. - -All <> and custom thread pools are available. -[float] -===== Thread Pool Fields - -For each thread pool, you can load details about it by using the field names -in the table below. - -[cols="<,<,<",options="header"] -|======================================================================= -|Field Name |Alias |Description -|`type` |`t` |The current (*) type of thread pool (`fixed` or `scaling`) -|`active` |`a` |The number of active threads in the current thread pool -|`pool_size` |`psz` |The number of threads in the current thread pool -|`queue` |`q` |The number of tasks in the queue for the current thread pool -|`queue_size` |`qs` |The maximum number of tasks permitted in the queue for the current thread pool -|`rejected` |`r` |The number of tasks rejected by the thread pool executor -|`largest` |`l` |The highest number of active threads in the current thread pool -|`completed` |`c` |The number of tasks completed by the thread pool executor -|`core` |`cr` |The configured core number of active threads allowed in the current thread pool -|`max` |`mx` |The configured maximum number of active threads allowed in the current thread pool -|`size` |`sz` |The configured fixed number of active threads allowed in the current thread pool -|`keep_alive` |`k` |The configured keep alive time for threads -|======================================================================= - -[float] -==== Other Fields - -In addition to details about each thread pool, it is also convenient to get an -understanding of where those thread pools reside. As such, you can request -other details like the `ip` of the responding node(s). - -[cols="<,<,<",options="header"] -|======================================================================= -|Field Name |Alias |Description -|`node_id` |`id` |The unique node ID -|`ephemeral_id`|`eid` |The ephemeral node ID -|`pid` |`p` |The process ID of the running node -|`host` |`h` |The hostname for the current node -|`ip` |`i` |The IP address for the current node -|`port` |`po` |The bound transport port for the current node -|======================================================================= diff --git a/docs/reference/data-frames/apis/get-transform-stats.asciidoc b/docs/reference/data-frames/apis/get-transform-stats.asciidoc index 88536b93f9a..9235e189575 100644 --- a/docs/reference/data-frames/apis/get-transform-stats.asciidoc +++ b/docs/reference/data-frames/apis/get-transform-stats.asciidoc @@ -126,7 +126,7 @@ The API returns the following results: "transforms" : [ { "id" : "ecommerce_transform", - "task_state" : "started", + "state" : "indexing", "stats" : { "pages_processed" : 2, "documents_processed" : 1220, @@ -147,7 +147,6 @@ The API returns the following results: }, "next" : { "checkpoint" : 101, - "indexer_state" : "started", "position" : { "indexer_position" : { "hashtag" : "abcd1234" diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc index a885834ca3c..d0b552f4365 100644 --- a/docs/reference/modules/snapshots.asciidoc +++ b/docs/reference/modules/snapshots.asciidoc @@ -20,6 +20,9 @@ name of the restored index as well as some of its settings. There is a great deal of flexibility in how the snapshot and restore functionality can be used. // end::restore-intro[] +You can automate your snapshot backup and restore process by using +<>. + // tag::backup-warning[] WARNING: You cannot back up an Elasticsearch cluster by simply taking a copy of the data directories of all of its nodes. Elasticsearch may be making changes to @@ -124,7 +127,7 @@ which returns: ----------------------------------- // TESTRESPONSE -To retrieve information about multiple repositories, specify a comma-delimited +To retrieve information about multiple repositories, specify a comma-delimited list of repositories. You can also use the * wildcard when specifying repository names. For example, the following request retrieves information about all of the snapshot repositories that start with `repo` or diff --git a/docs/reference/query-dsl/intervals-query.asciidoc b/docs/reference/query-dsl/intervals-query.asciidoc index 6581f3eff3a..18723e87579 100644 --- a/docs/reference/query-dsl/intervals-query.asciidoc +++ b/docs/reference/query-dsl/intervals-query.asciidoc @@ -4,17 +4,25 @@ Intervals ++++ -An `intervals` query allows fine-grained control over the order and proximity of -matching terms. Matching rules are constructed from a small set of definitions, -and the rules are then applied to terms from a particular `field`. +Returns documents based on the order and proximity of matching terms. + +The `intervals` query uses *matching rules*, constructed from a small set of +definitions. Theses rules are then applied to terms from a specified `field`. The definitions produce sequences of minimal intervals that span terms in a -body of text. These intervals can be further combined and filtered by +body of text. These intervals can be further combined and filtered by parent sources. -The example below will search for the phrase `my favourite food` appearing -before the terms `hot` and `water` or `cold` and `porridge` in any order, in -the field `my_text` + +[[intervals-query-ex-request]] +==== Example request + +The following `intervals` search returns documents containing `my +favorite food` immediately followed by `hot water` or `cold porridge` in the +`my_text` field. + +This search would match a `my_text` value of `my favorite food is cold +porridge` but not `when it's cold my favorite food is porridge`. [source,js] -------------------------------------------------- @@ -28,7 +36,7 @@ POST _search "intervals" : [ { "match" : { - "query" : "my favourite food", + "query" : "my favorite food", "max_gaps" : 0, "ordered" : true } @@ -42,8 +50,7 @@ POST _search } } ] - }, - "_name" : "favourite_food" + } } } } @@ -51,69 +58,103 @@ POST _search -------------------------------------------------- // CONSOLE -In the above example, the text `my favourite food is cold porridge` would -match because the two intervals matching `my favourite food` and `cold -porridge` appear in the correct order, but the text `when it's cold my -favourite food is porridge` would not match, because the interval matching -`cold porridge` starts before the interval matching `my favourite food`. +[[intervals-top-level-params]] +==== Top-level parameters for `intervals` +[[intervals-rules]] +``:: ++ +-- +(Required, rule object) Field you wish to search. + +The value of this parameter is a rule object used to match documents +based on matching terms, order, and proximity. + +Valid rules include: + +* <> +* <> +* <> +* <> +* <> +* <> +-- [[intervals-match]] -==== `match` +==== `match` rule parameters -The `match` rule matches analyzed text, and takes the following parameters: +The `match` rule matches analyzed text. -[horizontal] `query`:: -The text to match. +(Required, string) Text you wish to find in the provided ``. + `max_gaps`:: -Specify a maximum number of gaps between the terms in the text. Terms that -appear further apart than this will not match. If unspecified, or set to -1, -then there is no width restriction on the match. If set to 0 then the terms -must appear next to each other. ++ +-- +(Optional, integer) Maximum number of positions between the matching terms. +Terms further apart than this are not considered matches. Defaults to +`-1`. + +If unspecified or set to `-1`, there is no width restriction on the match. If +set to `0`, the terms must appear next to each other. +-- + `ordered`:: -Whether or not the terms must appear in their specified order. Defaults to -`false` +(Optional, boolean) +If `true`, matching terms must appear in their specified order. Defaults to +`false`. + `analyzer`:: -Which analyzer should be used to analyze terms in the `query`. By -default, the search analyzer of the top-level field will be used. +(Optional, string) <> used to analyze terms in the `query`. +Defaults to the top-level ``'s analyzer. + `filter`:: -An optional <> +(Optional, <> rule object) An optional interval +filter. + `use_field`:: -If specified, then match intervals from this field rather than the top-level field. -Terms will be analyzed using the search analyzer from this field. This allows you -to search across multiple fields as if they were all the same field; for example, -you could index the same text into stemmed and unstemmed fields, and search for -stemmed tokens near unstemmed ones. +(Optional, string) If specified, then match intervals from this +field rather than the top-level ``. Terms are analyzed using the +search analyzer from this field. This allows you to search across multiple +fields as if they were all the same field; for example, you could index the same +text into stemmed and unstemmed fields, and search for stemmed tokens near +unstemmed ones. [[intervals-prefix]] -==== `prefix` +==== `prefix` rule parameters -The `prefix` rule finds terms that start with a specified prefix. The prefix will -expand to match at most 128 terms; if there are more matching terms in the index, -then an error will be returned. To avoid this limit, enable the -<> option on the field being searched. +The `prefix` rule matches terms that start with a specified set of characters. +This prefix can expand to match at most 128 terms. If the prefix matches more +than 128 terms, {es} returns an error. You can use the +<> option in the field mapping to avoid this +limit. -[horizontal] `prefix`:: -Match terms starting with this prefix +(Required, string) Beginning characters of terms you wish to find in the +top-level ``. + `analyzer`:: -Which analyzer should be used to normalize the `prefix`. By default, the -search analyzer of the top-level field will be used. +(Optional, string) <> used to normalize the `prefix`. +Defaults to the top-level ``'s analyzer. + `use_field`:: -If specified, then match intervals from this field rather than the top-level field. -The `prefix` will be normalized using the search analyzer from this field, unless -`analyzer` is specified separately. ++ +-- +(Optional, string) If specified, then match intervals from this field rather +than the top-level ``. + +The `prefix` is normalized using the search analyzer from this field, unless a +separate `analyzer` is specified. +-- [[intervals-wildcard]] -==== `wildcard` +==== `wildcard` rule parameters -The `wildcard` rule finds terms that match a wildcard pattern. The pattern will -expand to match at most 128 terms; if there are more matching terms in the index, -then an error will be returned. +The `wildcard` rule matches terms using a wildcard pattern. This pattern can +expand to match at most 128 terms. If the pattern matches more than 128 terms, +{es} returns an error. -[horizontal] `pattern`:: -Find terms matching this pattern +(Required, string) Wildcard pattern used to find matching terms. + -- This parameter supports two wildcard operators: @@ -125,51 +166,112 @@ WARNING: Avoid beginning patterns with `*` or `?`. This can increase the iterations needed to find matching terms and slow search performance. -- `analyzer`:: -Which analyzer should be used to normalize the `pattern`. By default, the -search analyzer of the top-level field will be used. +(Optional, string) <> used to normalize the `pattern`. +Defaults to the top-level ``'s analyzer. + `use_field`:: -If specified, then match intervals from this field rather than the top-level field. -The `pattern` will be normalized using the search analyzer from this field, unless ++ +-- +(Optional, string) If specified, match intervals from this field rather than the +top-level ``. + +The `pattern` is normalized using the search analyzer from this field, unless `analyzer` is specified separately. +-- [[intervals-all_of]] -==== `all_of` +==== `all_of` rule parameters -`all_of` returns returns matches that span a combination of other rules. +The `all_of` rule returns matches that span a combination of other rules. -[horizontal] `intervals`:: -An array of rules to combine. All rules must produce a match in a -document for the overall source to match. +(Required, array of rule objects) An array of rules to combine. All rules must +produce a match in a document for the overall source to match. + `max_gaps`:: -Specify a maximum number of gaps between the rules. Combinations that match -across a distance greater than this will not match. If set to -1 or -unspecified, there is no restriction on this distance. If set to 0, then the -matches produced by the rules must all appear immediately next to each other. ++ +-- +(Optional, integer) Maximum number of positions between the matching terms. +Intervals produced by the rules further apart than this are not considered +matches. Defaults to `-1`. + +If unspecified or set to `-1`, there is no width restriction on the match. If +set to `0`, the terms must appear next to each other. +-- + `ordered`:: -Whether the intervals produced by the rules should appear in the order in -which they are specified. Defaults to `false` +(Optional, boolean) If `true`, intervals produced by the rules should appear in +the order in which they are specified. Defaults to `false`. + `filter`:: -An optional <> +(Optional, <> rule object) Rule used to filter +returned intervals. [[intervals-any_of]] -==== `any_of` +==== `any_of` rule parameters -The `any_of` rule emits intervals produced by any of its sub-rules. +The `any_of` rule returns intervals produced by any of its sub-rules. -[horizontal] `intervals`:: -An array of rules to match +(Required, array of rule objects) An array of rules to match. + `filter`:: -An optional <> +(Optional, <> rule object) Rule used to filter +returned intervals. [[interval_filter]] -==== filters +==== `filter` rule parameters -You can filter intervals produced by any rules by their relation to the -intervals produced by another rule. The following example will return -documents that have the words `hot` and `porridge` within 10 positions -of each other, without the word `salty` in between: +The `filter` rule returns intervals based on a query. See +<> for an example. + +`after`:: +(Optional, query object) Query used to return intervals that follow an interval +from the `filter` rule. + +`before`:: +(Optional, query object) Query used to return intervals that occur before an +interval from the `filter` rule. + +`contained_by`:: +(Optional, query object) Query used to return intervals contained by an interval +from the `filter` rule. + +`containing`:: +(Optional, query object) Query used to return intervals that contain an interval +from the `filter` rule. + +`not_contained_by`:: +(Optional, query object) Query used to return intervals that are *not* +contained by an interval from the `filter` rule. + +`not_containing`:: +(Optional, query object) Query used to return intervals that do *not* contain +an interval from the `filter` rule. + +`not_overlapping`:: +(Optional, query object) Query used to return intervals that do *not* overlap +with an interval from the `filter` rule. + +`overlapping`:: +(Optional, query object) Query used to return intervals that overlap with an +interval from the `filter` rule. + +`script`:: +(Optional, <>) Script used to return +matching documents. This script must return a boolean value, `true` or `false`. +See <> for an example. + + +[[intervals-query-note]] +==== Notes + +[[interval-filter-rule-ex]] +===== Filter example + +The following search includes a `filter` rule. It returns documents that have +the words `hot` and `porridge` within 10 positions of each other, without the +word `salty` in between: [source,js] -------------------------------------------------- @@ -196,31 +298,12 @@ POST _search -------------------------------------------------- // CONSOLE -The following filters are available: -[horizontal] -`containing`:: -Produces intervals that contain an interval from the filter rule -`contained_by`:: -Produces intervals that are contained by an interval from the filter rule -`not_containing`:: -Produces intervals that do not contain an interval from the filter rule -`not_contained_by`:: -Produces intervals that are not contained by an interval from the filter rule -`overlapping`:: -Produces intervals that overlap with an interval from the filter rule -`not_overlapping`:: -Produces intervals that do not overlap with an interval from the filter rule -`before`:: -Produces intervals that appear before an interval from the filter role -`after`:: -Produces intervals that appear after an interval from the filter role - [[interval-script-filter]] -==== Script filters +===== Script filters -You can also filter intervals based on their start position, end position and -internal gap count, using a script. The script has access to an `interval` -variable, with `start`, `end` and `gaps` methods: +You can use a script to filter intervals based on their start position, end +position, and internal gap count. The following `filter` script uses the +`interval` variable with the `start`, `end`, and `gaps` methods: [source,js] -------------------------------------------------- @@ -244,12 +327,13 @@ POST _search -------------------------------------------------- // CONSOLE + [[interval-minimization]] -==== Minimization +===== Minimization The intervals query always minimizes intervals, to ensure that queries can -run in linear time. This can sometimes cause surprising results, particularly -when using `max_gaps` restrictions or filters. For example, take the +run in linear time. This can sometimes cause surprising results, particularly +when using `max_gaps` restrictions or filters. For example, take the following query, searching for `salty` contained within the phrase `hot porridge`: @@ -277,15 +361,15 @@ POST _search -------------------------------------------------- // CONSOLE -This query will *not* match a document containing the phrase `hot porridge is +This query does *not* match a document containing the phrase `hot porridge is salty porridge`, because the intervals returned by the match query for `hot porridge` only cover the initial two terms in this document, and these do not overlap the intervals covering `salty`. Another restriction to be aware of is the case of `any_of` rules that contain -sub-rules which overlap. In particular, if one of the rules is a strict -prefix of the other, then the longer rule will never be matched, which can -cause surprises when used in combination with `max_gaps`. Consider the +sub-rules which overlap. In particular, if one of the rules is a strict +prefix of the other, then the longer rule can never match, which can +cause surprises when used in combination with `max_gaps`. Consider the following query, searching for `the` immediately followed by `big` or `big bad`, immediately followed by `wolf`: @@ -316,10 +400,10 @@ POST _search -------------------------------------------------- // CONSOLE -Counter-intuitively, this query *will not* match the document `the big bad -wolf`, because the `any_of` rule in the middle will only produce intervals +Counter-intuitively, this query does *not* match the document `the big bad +wolf`, because the `any_of` rule in the middle only produces intervals for `big` - intervals for `big bad` being longer than those for `big`, while -starting at the same position, and so being minimized away. In these cases, +starting at the same position, and so being minimized away. In these cases, it's better to rewrite the query so that all of the options are explicitly laid out at the top level: diff --git a/docs/reference/query-dsl/match-query.asciidoc b/docs/reference/query-dsl/match-query.asciidoc index 27dde4c2a91..a894ef0dae2 100644 --- a/docs/reference/query-dsl/match-query.asciidoc +++ b/docs/reference/query-dsl/match-query.asciidoc @@ -185,15 +185,3 @@ The example above creates a boolean query: that matches documents with the term `ny` or the conjunction `new AND york`. By default the parameter `auto_generate_synonyms_phrase_query` is set to `true`. - -.Comparison to query_string / field -************************************************** - -The match family of queries does not go through a "query parsing" -process. It does not support field name prefixes, wildcard characters, -or other "advanced" features. For this reason, chances of it failing are -very small / non existent, and it provides an excellent behavior when it -comes to just analyze and run that text as a query behavior (which is -usually what a text search box does). - -************************************************** diff --git a/docs/reference/query-dsl/query-string-query.asciidoc b/docs/reference/query-dsl/query-string-query.asciidoc index 967dd906eec..cced4f30eeb 100644 --- a/docs/reference/query-dsl/query-string-query.asciidoc +++ b/docs/reference/query-dsl/query-string-query.asciidoc @@ -4,8 +4,39 @@ Query string ++++ -A query that uses a query parser in order to parse its content. Here is -an example: +Returns documents based on a provided query string, using a parser with a strict +syntax. + +This query uses a <> to parse and split the provided +query string based on operators, such as `AND` or `NOT`. The query +then <> each split text independently before returning +matching documents. + +You can use the `query_string` query to create a complex search that includes +wildcard characters, searches across multiple fields, and more. While versatile, +the query is strict and returns an error if the query string includes any +invalid syntax. + +[WARNING] +==== +Because it returns an error for any invalid syntax, we don't recommend using +the `query_string` query for search boxes. + +If you don't need to support a query syntax, consider using the +<> query. If you need the features of a query +syntax, use the <> +query, which is less strict. +==== + + +[[query-string-query-ex-request]] +==== Example request + +When running the following search, the `query_string` query splits `(new york +city) OR (big apple)` into two parts: `new york city` and `big apple`. The +`content` field's analyzer then independently converts each part into tokens +before returning matching documents. Because the query syntax does not use +whitespace as an operator, `new york city` is passed as-is to the analyzer. [source,js] -------------------------------------------------- @@ -13,154 +44,211 @@ GET /_search { "query": { "query_string" : { - "default_field" : "content", - "query" : "this AND that OR thus" + "query" : "(new york city) OR (big apple)", + "default_field" : "content" } } } -------------------------------------------------- // CONSOLE -The `query_string` query parses the input and splits text around operators. -Each textual part is analyzed independently of each other. For instance the following query: +[[query-string-top-level-params]] +==== Top-level parameters for `query_string` +`query`:: +(Required, string) Query string you wish to parse and use for search. See +<>. -[source,js] --------------------------------------------------- -GET /_search -{ - "query": { - "query_string" : { - "default_field" : "content", - "query" : "(new york city) OR (big apple)" <1> - } - } -} --------------------------------------------------- -// CONSOLE +`default_field`:: ++ +-- +(Optional, string) Default field you wish to search if no field is provided in +the query string. -<1> will be split into `new york city` and `big apple` and each part is then -analyzed independently by the analyzer configured for the field. +Defaults to the `index.query.default_field` index setting, which has a default +value of `*`. The `*` value extracts all fields that are eligible to term +queries and filters the metadata fields. All extracted fields are then combined +to build a query if no `prefix` is specified. -WARNING: Whitespaces are not considered operators, this means that `new york city` -will be passed "as is" to the analyzer configured for the field. If the field is a `keyword` -field the analyzer will create a single term `new york city` and the query builder will -use this term in the query. If you want to query each term separately you need to add explicit -operators around the terms (e.g. `new AND york AND city`). +WARNING: There is a limit on the number of fields that can be queried at once. +It is defined by the `indices.query.bool.max_clause_count` +<>, which defaults to 1024. +-- -When multiple fields are provided it is also possible to modify how the different -field queries are combined inside each textual part using the `type` parameter. -The possible modes are described <> and the default is `best_fields`. +`allow_leading_wildcard`:: +(Optional, boolean) If `true`, the wildcard characters `*` and `?` are allowed +as the first character of the query string. Defaults to `true`. -The `query_string` top level parameters include: +`analyze_wildcard`:: +(Optional, boolean) If `true`, the query attempts to analyze wildcard terms in +the query string. Defaults to `false`. -[cols="<,<",options="header",] -|======================================================================= -|Parameter |Description -|`query` |The actual query to be parsed. See <>. +`analyzer`:: +(Optional, string) <> used to convert text in the +query string into tokens. Defaults to the +<> mapped for the +`default_field`. If no analyzer is mapped, the index's default analyzer is used. -|`default_field` |The default field for query terms if no prefix field is -specified. Defaults to the `index.query.default_field` index settings, which in -turn defaults to `*`. `*` extracts all fields in the mapping that are eligible -to term queries and filters the metadata fields. All extracted fields are then -combined to build a query when no prefix field is provided. +`auto_generate_synonyms_phrase_query`:: +(Optional, boolean) If `true`, <> +queries are automatically created for multi-term synonyms. Defaults to `true`. +See <> for an example. -WARNING: There is a limit on the number of fields that can be queried -at once. It is defined by the `indices.query.bool.max_clause_count` <> -which defaults to 1024. +`boost`:: ++ +-- +(Optional, float) Floating point number used to decrease or increase the +<> of the query. Defaults to `1.0`. -|`default_operator` |The default operator used if no explicit operator -is specified. For example, with a default operator of `OR`, the query -`capital of Hungary` is translated to `capital OR of OR Hungary`, and -with default operator of `AND`, the same query is translated to -`capital AND of AND Hungary`. The default value is `OR`. +Boost values are relative to the default value of `1.0`. A boost value between +`0` and `1.0` decreases the relevance score. A value greater than `1.0` +increases the relevance score. +-- -|`analyzer` |The analyzer name used to analyze the query string. +`default_operator`:: ++ +-- +(Optional, string) Default boolean logic used to interpret text in the query +string if no operators are specified. Valid values are: -|`quote_analyzer` |The name of the analyzer that is used to analyze -quoted phrases in the query string. For those parts, it overrides other -analyzers that are set using the `analyzer` parameter or the -<> setting. + `OR` (Default):: +For example, a query string of `capital of Hungary` is interpreted as `capital +OR of OR Hungary`. -|`allow_leading_wildcard` |When set, `*` or `?` are allowed as the first -character. Defaults to `true`. + `AND`:: +For example, a query string of `capital of Hungary` is interpreted as `capital +AND of AND Hungary`. +-- -|`enable_position_increments` |Set to `true` to enable position -increments in result queries. Defaults to `true`. +`enable_position_increments`:: +(Optional, boolean) If `true`, enable position increments in queries constructed +from a `query_string` search. Defaults to `true`. -|`fuzzy_max_expansions` |Controls the number of terms fuzzy queries will -expand to. Defaults to `50` +`fields`:: ++ +-- +(Optional, array of strings) Array of fields you wish to search. -|`fuzziness` |Set the fuzziness for fuzzy queries. Defaults -to `AUTO`. See <> for allowed settings. +You can use this parameter query to search across multiple fields. See +<>. +-- -|`fuzzy_prefix_length` |Set the prefix length for fuzzy queries. Default -is `0`. +`fuzziness`:: +(Optional, string) Maximum edit distance allowed for matching. See <> +for valid values and more information. -|`fuzzy_transpositions` |Set to `false` to disable fuzzy transpositions (`ab` -> `ba`). -Default is `true`. +`fuzzy_max_expansions`:: +(Optional, integer) Maximum number of terms to which the query expands for fuzzy +matching. Defaults to `50`. -|`phrase_slop` |Sets the default slop for phrases. If zero, then exact -phrase matches are required. Default value is `0`. +`fuzzy_prefix_length`:: +(Optional, integer) Number of beginning characters left unchanged for fuzzy +matching. Defaults to `0`. -|`boost` |Sets the boost value of the query. Defaults to `1.0`. +`fuzzy_transpositions`:: +(Optional, boolean) If `true`, edits for fuzzy matching include +transpositions of two adjacent characters (ab → ba). Defaults to `true`. -|`analyze_wildcard` |By default, wildcards terms in a query string are -not analyzed. By setting this value to `true`, a best effort will be -made to analyze those as well. +`lenient`:: +(Optional, boolean) If `true`, format-based errors, such as providing a text +value for a <> field, are ignored. Defaults to `false`. -|`max_determinized_states` |Limit on how many automaton states regexp -queries are allowed to create. This protects against too-difficult -(e.g. exponentially hard) regexps. Defaults to 10000. +`max_determinized_states`:: ++ +-- +(Optional, integer) Maximum number of +https://en.wikipedia.org/wiki/Deterministic_finite_automaton[automaton states] +required for the query. Default is `10000`. -|`minimum_should_match` |A value controlling how many "should" clauses -in the resulting boolean query should match. It can be an absolute value -(`2`), a percentage (`30%`) or a -<>. +{es} uses https://lucene.apache.org/core/[Apache Lucene] internally to parse +regular expressions. Lucene converts each regular expression to a finite +automaton containing a number of determinized states. -|`lenient` |If set to `true` will cause format based failures (like -providing text to a numeric field) to be ignored. +You can use this parameter to prevent that conversion from unintentionally +consuming too many resources. You may need to increase this limit to run complex +regular expressions. +-- -|`time_zone` | Time Zone to be applied to any range query related to dates. +`minimum_should_match`:: +(Optional, string) Minimum number of clauses that must match for a document to +be returned. See the <> for valid values and more information. See +<> for an example. -|`quote_field_suffix` | A suffix to append to fields for quoted parts of -the query string. This allows to use a field that has a different analysis chain -for exact matching. Look <> for a -comprehensive example. +`quote_analyzer`:: ++ +-- +(Optional, string) <> used to convert quoted text in the +query string into tokens. Defaults to the +<> mapped for the +`default_field`. -|`auto_generate_synonyms_phrase_query` |Whether phrase queries should be automatically generated for multi terms synonyms. -Defaults to `true`. +For quoted text, this parameter overrides the analyzer specified in the +`analyzer` parameter. +-- -|======================================================================= +`phrase_slop`:: +(Optional, integer) Maximum number of positions allowed between matching tokens +for phrases. Defaults to `0`. If `0`, exact phrase matches are required. +Transposed terms have a slop of `2`. -When a multi term query is being generated, one can control how it gets -rewritten using the -<> -parameter. +`quote_field_suffix`:: ++ +-- +(Optional, string) Suffix appended to quoted text in the query string. -[float] -==== Default Field +You can use this suffix to use a different analysis method for exact matches. +See <>. +-- -When not explicitly specifying the field to search on in the query -string syntax, the `index.query.default_field` will be used to derive -which field to search on. If the `index.query.default_field` is not specified, -the `query_string` will automatically attempt to determine the existing fields in the index's -mapping that are queryable, and perform the search on those fields. -This will not include nested documents, use a nested query to search those documents. +`rewrite`:: +(Optional, string) Method used to rewrite the query. For valid values and more +information, see the <>. -NOTE: For mappings with a large number of fields, searching across all queryable -fields in the mapping could be expensive. +`time_zone`:: ++ +-- +(Optional, string) +https://en.wikipedia.org/wiki/List_of_UTC_time_offsets[Coordinated Universal +Time (UTC) offset] or +https://en.wikipedia.org/wiki/List_of_tz_database_time_zones[IANA time zone] +used to convert `date` values in the query string to UTC. -[float] -==== Multi Field +Valid values are ISO 8601 UTC offsets, such as `+01:00` or -`08:00`, and IANA +time zone IDs, such as `America/Los_Angeles`. -The `query_string` query can also run against multiple fields. Fields can be -provided via the `fields` parameter (example below). +[NOTE] +==== +The `time_zone` parameter does **not** affect the <> value +of `now`. `now` is always the current system time in UTC. However, the +`time_zone` parameter does convert dates calculated using `now` and +<>. For example, the `time_zone` parameter will +convert a value of `now/d`. +==== +-- + +[[query-string-query-notes]] +==== Notes + +include::query-string-syntax.asciidoc[] + +[[query-string-nested]] +====== Avoid using the `query_string` query for nested documents + +`query_string` searches do not return <> documents. To search +nested documents, use the <>. + +[[query-string-multi-field]] +====== Search multiple fields + +You can use the `fields` parameter to perform a `query_string` search across +multiple fields. The idea of running the `query_string` query against multiple fields is to expand each query term to an OR clause like this: - field1:query_term OR field2:query_term | ... +``` +field1:query_term OR field2:query_term | ... +``` For example, the following query @@ -252,21 +340,6 @@ GET /_search NOTE: Since `\` (backslash) is a special character in json strings, it needs to be escaped, hence the two backslashes in the above `query_string`. -When running the `query_string` query against multiple fields, the -following additional parameters are allowed: - -[cols="<,<",options="header",] -|======================================================================= -|Parameter |Description - -|`type` |How the fields should be combined to build the text query. -See <> for a complete example. -Defaults to `best_fields` - -|`tie_breaker` |The disjunction max tie breaker for multi fields. -Defaults to `0` -|======================================================================= - The fields parameter can also include pattern based field names, allowing to automatically expand to the relevant fields (dynamically introduced fields included). For example: @@ -285,8 +358,50 @@ GET /_search -------------------------------------------------- // CONSOLE -[float] -==== Synonyms +[[query-string-multi-field-parms]] +====== Additional parameters for multiple field searches + +When running the `query_string` query against multiple fields, the +following additional parameters are supported. + +`type`:: ++ +-- +(Optional, string) Determines how the query matches and scores documents. Valid +values are: + +`best_fields` (Default):: +Finds documents which match any field and uses the highest +<> from any matching field. See +<>. + +`bool_prefix`:: +Creates a `match_bool_prefix` query on each field and combines the `_score` from +each field. See <>. + +`cross_fields`:: +Treats fields with the same `analyzer` as though they were one big field. Looks +for each word in **any** field. See <>. + +`most_fields`:: +Finds documents which match any field and combines the `_score` from each field. +See <>. + +`phrase`:: +Runs a `match_phrase` query on each field and uses the `_score` from the best +field. See <>. + +`phrase_prefix`:: +Runs a `match_phrase_prefix` query on each field and uses the `_score` from the +best field. See <>. + +NOTE: +Additional top-level `multi_match` parameters may be available based on the +<> value. +-- + +[[query-string-synonyms]] +===== Synonyms and the `query_string` query The `query_string` query supports multi-terms synonym expansion with the <> token filter. When this filter is used, the parser creates a phrase query for each multi-terms synonyms. @@ -318,8 +433,8 @@ The example above creates a boolean query: that matches documents with the term `ny` or the conjunction `new AND york`. By default the parameter `auto_generate_synonyms_phrase_query` is set to `true`. -[float] -==== Minimum should match +[[query-string-min-should-match]] +===== How `minimum_should_match` works The `query_string` splits the query around each operator to create a boolean query for the entire input. You can use `minimum_should_match` to control how @@ -349,8 +464,8 @@ The example above creates a boolean query: that matches documents with at least two of the terms `this`, `that` or `thus` in the single field `title`. -[float] -===== Multi Field +[[query-string-min-should-match-multi]] +===== How `minimum_should_match` works for multiple fields [source,js] -------------------------------------------------- @@ -404,8 +519,11 @@ The example above creates a boolean query: that matches documents with at least two of the three "should" clauses, each of them made of the disjunction max over the fields for each term. -[float] -===== Cross Field +[[query-string-min-should-match-cross]] +===== How `minimum_should_match` works for cross-field searches + +A `cross_fields` value in the `type` field indicates fields with the same +analyzer are grouped together when the input is analyzed. [source,js] -------------------------------------------------- @@ -426,13 +544,8 @@ GET /_search -------------------------------------------------- // CONSOLE -The `cross_fields` value in the `type` field indicates that fields that have the -same analyzer should be grouped together when the input is analyzed. - The example above creates a boolean query: `(blended(terms:[field2:this, field1:this]) blended(terms:[field2:that, field1:that]) blended(terms:[field2:thus, field1:thus]))~2` that matches documents with at least two of the three per-term blended queries. - -include::query-string-syntax.asciidoc[] diff --git a/docs/reference/query-dsl/query-string-syntax.asciidoc b/docs/reference/query-dsl/query-string-syntax.asciidoc index 765b54b5883..03a2e8b8212 100644 --- a/docs/reference/query-dsl/query-string-syntax.asciidoc +++ b/docs/reference/query-dsl/query-string-syntax.asciidoc @@ -1,6 +1,6 @@ [[query-string-syntax]] -==== Query string syntax +===== Query string syntax The query string ``mini-language'' is used by the <> and by the @@ -14,10 +14,9 @@ phrase, in the same order. Operators allow you to customize the search -- the available options are explained below. -===== Field names +====== Field names -As mentioned in <>, the `default_field` is searched for the -search terms, but it is possible to specify other fields in the query syntax: +You can specify fields to search in the query syntax: * where the `status` field contains `active` @@ -40,7 +39,7 @@ search terms, but it is possible to specify other fields in the query syntax: _exists_:title -===== Wildcards +====== Wildcards Wildcard searches can be run on individual terms, using `?` to replace a single character, and `*` to replace zero or more characters: @@ -88,7 +87,7 @@ analyzed and a boolean query will be built out of the different tokens, by ensuring exact matches on the first N-1 tokens, and prefix match on the last token. -===== Regular expressions +====== Regular expressions Regular expression patterns can be embedded in the query string by wrapping them in forward-slashes (`"/"`): @@ -108,7 +107,7 @@ Elasticsearch to visit every term in the index: Use with caution! ======= -===== Fuzziness +====== Fuzziness We can search for terms that are similar to, but not exactly like our search terms, using the ``fuzzy'' @@ -128,7 +127,7 @@ sufficient to catch 80% of all human misspellings. It can be specified as: quikc~1 -===== Proximity searches +====== Proximity searches While a phrase query (eg `"john smith"`) expects all of the terms in exactly the same order, a proximity query allows the specified words to be further @@ -143,7 +142,7 @@ query string, the more relevant that document is considered to be. When compared to the above example query, the phrase `"quick fox"` would be considered more relevant than `"quick brown fox"`. -===== Ranges +====== Ranges Ranges can be specified for date, numeric or string fields. Inclusive ranges are specified with square brackets `[min TO max]` and exclusive ranges with @@ -197,7 +196,7 @@ The parsing of ranges in query strings can be complex and error prone. It is much more reliable to use an explicit <>. -===== Boosting +====== Boosting Use the _boost_ operator `^` to make one term more relevant than another. For instance, if we want to find all documents about foxes, but we are @@ -212,7 +211,7 @@ Boosts can also be applied to phrases or to groups: "john smith"^2 (foo bar)^4 -===== Boolean operators +====== Boolean operators By default, all terms are optional, as long as one term matches. A search for `foo bar baz` will find any document that contains one or more of @@ -255,7 +254,7 @@ would look like this: } -===== Grouping +====== Grouping Multiple terms or clauses can be grouped together with parentheses, to form sub-queries: @@ -267,7 +266,7 @@ of a sub-query: status:(active OR pending) title:(full text search)^2 -===== Reserved characters +====== Reserved characters If you need to use any of the characters which function as operators in your query itself (and not as operators), then you should escape them with @@ -283,7 +282,9 @@ NOTE: `<` and `>` can't be escaped at all. The only way to prevent them from attempting to create a range query is to remove them from the query string entirely. -===== Empty Query +====== Whitespaces and empty queries + +Whitespace is not considered an operator. If the query string is empty or only contains whitespaces the query will yield an empty result set. diff --git a/docs/reference/query-dsl/simple-query-string-query.asciidoc b/docs/reference/query-dsl/simple-query-string-query.asciidoc index cb8e302e259..44f811007a6 100644 --- a/docs/reference/query-dsl/simple-query-string-query.asciidoc +++ b/docs/reference/query-dsl/simple-query-string-query.asciidoc @@ -4,10 +4,21 @@ Simple query string ++++ -A query that uses the SimpleQueryParser to parse its context. Unlike the -regular `query_string` query, the `simple_query_string` query will never -throw an exception, and discards invalid parts of the query. Here is -an example: +Returns documents based on a provided query string, using a parser with a +limited but fault-tolerant syntax. + +This query uses a <> to parse and +split the provided query string into terms based on special operators. The query +then <> each term independently before returning matching +documents. + +While its syntax is more limited than the +<>, the `simple_query_string` +query does not return errors for invalid syntax. Instead, it ignores any invalid +parts of the query string. + +[[simple-query-string-query-ex-request]] +==== Example request [source,js] -------------------------------------------------- @@ -24,72 +35,108 @@ GET /_search -------------------------------------------------- // CONSOLE -The `simple_query_string` top level parameters include: -[cols="<,<",options="header",] -|======================================================================= -|Parameter |Description -|`query` |The actual query to be parsed. See below for syntax. +[[simple-query-string-top-level-params]] +==== Top-level parameters for `simple_query_string` -|`fields` |The fields to perform the parsed query against. Defaults to the -`index.query.default_field` index settings, which in turn defaults to `*`. `*` -extracts all fields in the mapping that are eligible to term queries and filters -the metadata fields. +`query`:: +(Required, string) Query string you wish to parse and use for search. See <>. -WARNING: There is a limit on the number of fields that can be queried -at once. It is defined by the `indices.query.bool.max_clause_count` <> -which defaults to 1024. +`fields`:: ++ +-- +(Optional, array of strings) Array of fields you wish to search. -|`default_operator` |The default operator used if no explicit operator -is specified. For example, with a default operator of `OR`, the query -`capital of Hungary` is translated to `capital OR of OR Hungary`, and -with default operator of `AND`, the same query is translated to -`capital AND of AND Hungary`. The default value is `OR`. +This field accepts wildcard expressions. You also can boost relevance scores for +matches to particular fields using a caret (`^`) notation. See +<> for examples. -|`analyzer` |Force the analyzer to use to analyze each term of the query when -creating composite queries. +Defaults to the `index.query.default_field` index setting, which has a default +value of `*`. The `*` value extracts all fields that are eligible to term +queries and filters the metadata fields. All extracted fields are then combined +to build a query if no `prefix` is specified. -|`flags` |A set of <> specifying which features of the -`simple_query_string` to enable. Defaults to `ALL`. +WARNING: There is a limit on the number of fields that can be queried at once. +It is defined by the `indices.query.bool.max_clause_count` +<>, which defaults to `1024`. +-- -|`analyze_wildcard` | Whether terms of prefix queries should be automatically -analyzed or not. If `true` a best effort will be made to analyze the prefix. However, -some analyzers will be not able to provide a meaningful results -based just on the prefix of a term. Defaults to `false`. +`default_operator`:: ++ +-- +(Optional, string) Default boolean logic used to interpret text in the query +string if no operators are specified. Valid values are: -|`lenient` | If set to `true` will cause format based failures -(like providing text to a numeric field) to be ignored. +`OR` (Default):: +For example, a query string of `capital of Hungary` is interpreted as `capital +OR of OR Hungary`. -|`minimum_should_match` | The minimum number of clauses that must match for a - document to be returned. See the - <> documentation for the - full list of options. +`AND`:: +For example, a query string of `capital of Hungary` is interpreted as `capital +AND of AND Hungary`. +-- -|`quote_field_suffix` | A suffix to append to fields for quoted parts of -the query string. This allows to use a field that has a different analysis chain -for exact matching. Look <> for a -comprehensive example. +`all_fields`:: +deprecated:[6.0.0, set `fields` to `*` instead](Optional, boolean) If `true`, +search all searchable fields in the index's field mapping. -|`auto_generate_synonyms_phrase_query` |Whether phrase queries should be automatically generated for multi terms synonyms. -Defaults to `true`. +`analyze_wildcard`:: +(Optional, boolean) If `true`, the query attempts to analyze wildcard terms in +the query string. Defaults to `false`. -|`all_fields` | deprecated[6.0.0, set `fields` to `*` instead] -Perform the query on all fields detected in the mapping that can -be queried. +`analyzer`:: +(Optional, string) <> used to convert text in the +query string into tokens. Defaults to the +<> mapped for the +`default_field`. If no analyzer is mapped, the index's default analyzer is used. -|`fuzzy_prefix_length` |Set the prefix length for fuzzy queries. Default -is `0`. +`auto_generate_synonyms_phrase_query`:: +(Optional, boolean) If `true`, <> +queries are automatically created for multi-term synonyms. Defaults to `true`. +See <> for an example. -|`fuzzy_max_expansions` |Controls the number of terms fuzzy queries will -expand to. Defaults to `50` +`flags`:: +(Optional, string) List of enabled operators for the +<>. Defaults to `ALL` +(all operators). See <> for valid values. -|`fuzzy_transpositions` |Set to `false` to disable fuzzy transpositions (`ab` -> `ba`). -Default is `true`. -|======================================================================= +`fuzzy_max_expansions`:: +(Optional, integer) Maximum number of terms to which the query expands for fuzzy +matching. Defaults to `50`. -[float] -===== Simple Query String Syntax -The `simple_query_string` supports the following special characters: +`fuzzy_prefix_length`:: +(Optional, integer) Number of beginning characters left unchanged for fuzzy +matching. Defaults to `0`. + +`fuzzy_transpositions`:: +(Optional, boolean) If `true`, edits for fuzzy matching include +transpositions of two adjacent characters (ab → ba). Defaults to `true`. + +`lenient`:: +(Optional, boolean) If `true`, format-based errors, such as providing a text +value for a <> field, are ignored. Defaults to `false`. + +`minimum_should_match`:: +(Optional, string) Minimum number of clauses that must match for a document to +be returned. See the <> for valid values and more information. + +`quote_field_suffix`:: ++ +-- +(Optional, string) Suffix appended to quoted text in the query string. + +You can use this suffix to use a different analysis method for exact matches. +See <>. +-- + + +[[simple-query-string-query-notes]] +==== Notes + +[[simple-query-string-syntax]] +===== Simple query string syntax +The `simple_query_string` query supports the following operators: * `+` signifies AND operation * `|` signifies OR operation @@ -100,11 +147,11 @@ The `simple_query_string` supports the following special characters: * `~N` after a word signifies edit distance (fuzziness) * `~N` after a phrase signifies slop amount -In order to search for any of these special characters, they will need to -be escaped with `\`. +To use one of these characters literally, escape it with a preceding backslash +(`\`). -Be aware that this syntax may have a different behavior depending on the -`default_operator` value. For example, consider the following query: +The behavior of these operators may differ depending on the `default_operator` +value. For example: [source,js] -------------------------------------------------- @@ -120,47 +167,20 @@ GET /_search -------------------------------------------------- // CONSOLE -You may expect that documents containing only "foo" or "bar" will be returned, -as long as they do not contain "baz", however, due to the `default_operator` -being OR, this really means "match documents that contain "foo" or documents -that contain "bar", or documents that don't contain "baz". If this is unintended -then the query can be switched to `"foo bar +-baz"` which will not return -documents that contain "baz". +This search is intended to only return documents containing `foo` or `bar` that +also do **not** contain `baz`. However because of a `default_operator` of `OR`, +this search actually returns documents that contain `foo` or `bar` and any +documents that don't contain `baz`. To return documents as intended, change the +query string to `foo bar +-baz`. -[float] -==== Default Field -When not explicitly specifying the field to search on in the query -string syntax, the `index.query.default_field` will be used to derive -which fields to search on. It defaults to `*` and the query will automatically -attempt to determine the existing fields in the index's mapping that are queryable, -and perform the search on those fields. - -[float] -==== Multi Field -The fields parameter can also include pattern based field names, -allowing to automatically expand to the relevant fields (dynamically -introduced fields included). For example: - -[source,js] --------------------------------------------------- -GET /_search -{ - "query": { - "simple_query_string" : { - "fields" : ["content", "name.*^5"], - "query" : "foo bar baz" - } - } -} --------------------------------------------------- -// CONSOLE - -[float] [[supported-flags]] -==== Flags -`simple_query_string` support multiple flags to specify which parsing features -should be enabled. It is specified as a `|`-delimited string with the -`flags` parameter: +===== Limit operators +You can use the `flags` parameter to limit the supported operators for the +simple query string syntax. + +To explicitly enable only specific operators, use a `|` separator. For example, +a `flags` value of `OR|AND|PREFIX` disables all operators except `OR`, `AND`, +and `PREFIX`. [source,js] -------------------------------------------------- @@ -176,28 +196,92 @@ GET /_search -------------------------------------------------- // CONSOLE +[[supported-flags-values]] +====== Valid values The available flags are: -[cols="<,<",options="header",] -|======================================================================= -|Flag |Description -|`ALL` |Enables all parsing features. This is the default. -|`NONE` |Switches off all parsing features. -|`AND` |Enables the `+` AND operator. -|`OR` |Enables the `\|` OR operator. -|`NOT` |Enables the `-` NOT operator. -|`PREFIX` |Enables the `*` Prefix operator. -|`PHRASE` |Enables the `"` quotes operator used to search for phrases. -|`PRECEDENCE` |Enables the `(` and `)` operators to control operator precedence. -|`ESCAPE` |Enables `\` as the escape character. -|`WHITESPACE` |Enables whitespaces as split characters. -|`FUZZY` |Enables the `~N` operator after a word where N is an integer denoting the allowed edit distance for matching (see <>). -|`SLOP` |Enables the `~N` operator after a phrase where N is an integer denoting the slop amount. -|`NEAR` |Synonymous to `SLOP`. -|======================================================================= +`ALL` (Default):: +Enables all optional operators. -[float] -==== Synonyms +`AND`:: +Enables the `+` AND operator. + +`ESCAPE`:: +Enables `\` as an escape character. + +`FUZZY`:: +Enables the `~N` operator after a word, where `N` is an integer denoting the +allowed edit distance for matching. See <>. + +`NEAR`:: +Enables the `~N` operator, after a phrase where `N` is the maximum number of +positions allowed between matching tokens. Synonymous to `SLOP`. + +`NONE`:: +Disables all operators. + +`NOT`:: +Enables the `-` NOT operator. + +`OR`:: +Enables the `\|` OR operator. + +`PHRASE`:: +Enables the `"` quotes operator used to search for phrases. + +`PRECEDENCE`:: +Enables the `(` and `)` operators to control operator precedence. + +`PREFIX`:: +Enables the `*` prefix operator. + +`SLOP`:: +Enables the `~N` operator, after a phrase where `N` is maximum number of +positions allowed between matching tokens. Synonymous to `NEAR`. + +`WHITESPACE`:: +Enables whitespace as split characters. + +[[simple-query-string-boost]] +===== Wildcards and per-field boosts in the `fields` parameter + +Fields can be specified with wildcards, eg: + +[source,js] +-------------------------------------------------- +GET /_search +{ + "query": { + "simple_query_string" : { + "query": "Will Smith", + "fields": [ "title", "*_name" ] <1> + } + } +} +-------------------------------------------------- +// CONSOLE +<1> Query the `title`, `first_name` and `last_name` fields. + +Individual fields can be boosted with the caret (`^`) notation: + +[source,js] +-------------------------------------------------- +GET /_search +{ + "query": { + "simple_query_string" : { + "query" : "this is a test", + "fields" : [ "subject^3", "message" ] <1> + } + } +} +-------------------------------------------------- +// CONSOLE + +<1> The `subject` field is three times as important as the `message` field. + +[[simple-query-string-synonyms]] +===== Synonyms The `simple_query_string` query supports multi-terms synonym expansion with the <> token filter. When this filter is used, the parser creates a phrase query for each multi-terms synonyms. diff --git a/docs/reference/scripting/security.asciidoc b/docs/reference/scripting/security.asciidoc index 421cec2ccf7..4c7449bde65 100644 --- a/docs/reference/scripting/security.asciidoc +++ b/docs/reference/scripting/security.asciidoc @@ -101,7 +101,7 @@ to be `none`. [source,yaml] ---- -script.allowed_contexts: search, update <1> +script.allowed_contexts: score, update <1> ---- -<1> This will allow only search and update scripts to be executed but not +<1> This will allow only scoring and update scripts to be executed but not aggs or plugin scripts (or any other contexts). diff --git a/docs/reference/search/request/search-type.asciidoc b/docs/reference/search/request/search-type.asciidoc index 684d8435889..0695b604338 100644 --- a/docs/reference/search/request/search-type.asciidoc +++ b/docs/reference/search/request/search-type.asciidoc @@ -50,6 +50,13 @@ During the second phase, the coordinating node requests the document content (and highlighted snippets, if any) from *only the relevant shards*. +[source,js] +-------------------------------------------------- +GET twitter/_search?search_type=query_then_fetch +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + NOTE: This is the default setting, if you do not specify a `search_type` in your request. @@ -62,4 +69,9 @@ Same as "Query Then Fetch", except for an initial scatter phase which goes and computes the distributed term frequencies for more accurate scoring. - +[source,js] +-------------------------------------------------- +GET twitter/_search?search_type=dfs_query_then_fetch +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] diff --git a/docs/reference/sql/endpoints/odbc/configuration.asciidoc b/docs/reference/sql/endpoints/odbc/configuration.asciidoc index 70ba437b648..0a8f9eac6fd 100644 --- a/docs/reference/sql/endpoints/odbc/configuration.asciidoc +++ b/docs/reference/sql/endpoints/odbc/configuration.asciidoc @@ -273,12 +273,12 @@ logging. The following is a list of additional parameters that can be configured for a particular connection, in case the default behavior of the driver is not -suitable. This can be done within the client application, in a manner -particular to that application, generally in a free text input box (sometimes -named "Connection string", "String extras", or similar). The format of the -string is `Attribute1=Value1`. Multiple attributes can be specified, separated -by a semicolon `Attribute1=Value1;Attribute2=Value2;`. The attribute names are -given below. +suitable. For earlier versions of the driver, this needs to be done within the +client application, in a manner particular to that application, generally in a +free text input box (sometimes named "Connection string", "String extras", or +similar). The format of the string is `Attribute1=Value1`. Multiple attributes +can be specified, separated by a semicolon +`Attribute1=Value1;Attribute2=Value2;`. The attribute names are given below. `Timeout` (default: `0`):: The maximum time (in seconds) a request to the server can take. This can be @@ -354,3 +354,18 @@ it as the value for the column. If not set, the server will return an error. This corresponds to {es-sql}'s request parameter `field_multi_value_leniency` (see <>). + +`AutoEscapePVA` (default: `true`):: +The pattern-value arguments make use of `_` and `%` as special characters to +build patern matching values. Some applications however use these chars as +regular ones, which can lead to {es-sql} returning more data than the app +intended. With the auto escaping, the driver will inspect the arguments and +will escape these special characters if not already done by the application. + + +`IndexIncludeFrozen` (default: `false`):: +If this parameter is `true`, the server will include the frozen indices in the +query execution. +This corresponds to {es-sql}'s request parameter `index_include_frozen` +(see <>). + diff --git a/docs/reference/sql/endpoints/odbc/installation.asciidoc b/docs/reference/sql/endpoints/odbc/installation.asciidoc index 3a024e443d7..8f264c7f9ee 100644 --- a/docs/reference/sql/endpoints/odbc/installation.asciidoc +++ b/docs/reference/sql/endpoints/odbc/installation.asciidoc @@ -8,14 +8,15 @@ The {odbc} can be installed on Microsoft Windows using an MSI package. The insta [[prerequisites]] ==== Installation Prerequisites +The recommended installation platform is Windows 10 64 bit _or_ Windows Server 2016 64 bit. + Before you install the {odbc} you need to meet the following prerequisites; -* Windows 10 64 bit _or_ Windows Server 2016 64 bit operating system * .NET Framework 4.0 full - https://www.microsoft.com/en-au/download/details.aspx?id=17718 * Microsoft Visual C++ Redistributable for Visual Studio 2017 - https://support.microsoft.com/en-au/help/2977003/the-latest-supported-visual-c-downloads -- The 64 bit driver requires the x64 redistributable (this also installs the components needed for the 32 bit driver) -- The 32 bit driver requires the x86 redistributable -* Elevated privileges (administrator) for the User performing the installation +- The 64 bit driver requires the x64 redistributable +- The 32 bit driver requires the x86 or the x64 redistributable (the latter also installs the components needed for the 32 bit driver) +* Elevated privileges (administrator) for the User performing the installation. If you fail to meet any of the prerequisites the installer will show an error message and abort the installation. diff --git a/docs/reference/vectors/vector-functions.asciidoc b/docs/reference/vectors/vector-functions.asciidoc index d08af2d03bf..31b6b211e6a 100644 --- a/docs/reference/vectors/vector-functions.asciidoc +++ b/docs/reference/vectors/vector-functions.asciidoc @@ -29,6 +29,9 @@ PUT my_index }, "my_sparse_vector" : { "type" : "sparse_vector" + }, + "status" : { + "type" : "keyword" } } } @@ -37,13 +40,15 @@ PUT my_index PUT my_index/_doc/1 { "my_dense_vector": [0.5, 10, 6], - "my_sparse_vector": {"2": 1.5, "15" : 2, "50": -1.1, "4545": 1.1} + "my_sparse_vector": {"2": 1.5, "15" : 2, "50": -1.1, "4545": 1.1}, + "status" : "published" } PUT my_index/_doc/2 { "my_dense_vector": [-0.5, 10, 10], - "my_sparse_vector": {"2": 2.5, "10" : 1.3, "55": -2.3, "113": 1.6} + "my_sparse_vector": {"2": 2.5, "10" : 1.3, "55": -2.3, "113": 1.6}, + "status" : "published" } -------------------------------------------------- @@ -59,13 +64,19 @@ GET my_index/_search { "query": { "script_score": { - "query": { - "match_all": {} + "query" : { + "bool" : { + "filter" : { + "term" : { + "status" : "published" <1> + } + } + } }, "script": { - "source": "cosineSimilarity(params.query_vector, doc['my_dense_vector']) + 1.0", <1> + "source": "cosineSimilarity(params.query_vector, doc['my_dense_vector']) + 1.0", <2> "params": { - "query_vector": [4, 3.4, -0.2] <2> + "query_vector": [4, 3.4, -0.2] <3> } } } @@ -73,8 +84,9 @@ GET my_index/_search } -------------------------------------------------- // CONSOLE -<1> The script adds 1.0 to the cosine similarity to prevent the score from being negative. -<2> To take advantage of the script optimizations, provide a query vector as a script parameter. +<1> To restrict the number of documents on which script score calculation is applied, provide a filter. +<2> The script adds 1.0 to the cosine similarity to prevent the score from being negative. +<3> To take advantage of the script optimizations, provide a query vector as a script parameter. NOTE: If a document's dense vector field has a number of dimensions different from the query's vector, an error will be thrown. @@ -88,8 +100,14 @@ GET my_index/_search { "query": { "script_score": { - "query": { - "match_all": {} + "query" : { + "bool" : { + "filter" : { + "term" : { + "status" : "published" + } + } + } }, "script": { "source": "cosineSimilaritySparse(params.query_vector, doc['my_sparse_vector']) + 1.0", @@ -112,8 +130,14 @@ GET my_index/_search { "query": { "script_score": { - "query": { - "match_all": {} + "query" : { + "bool" : { + "filter" : { + "term" : { + "status" : "published" + } + } + } }, "script": { "source": """ @@ -141,8 +165,14 @@ GET my_index/_search { "query": { "script_score": { - "query": { - "match_all": {} + "query" : { + "bool" : { + "filter" : { + "term" : { + "status" : "published" + } + } + } }, "script": { "source": """ @@ -169,8 +199,14 @@ GET my_index/_search { "query": { "script_score": { - "query": { - "match_all": {} + "query" : { + "bool" : { + "filter" : { + "term" : { + "status" : "published" + } + } + } }, "script": { "source": "1 / (1 + l1norm(params.queryVector, doc['my_dense_vector']))", <1> @@ -202,8 +238,14 @@ GET my_index/_search { "query": { "script_score": { - "query": { - "match_all": {} + "query" : { + "bool" : { + "filter" : { + "term" : { + "status" : "published" + } + } + } }, "script": { "source": "1 / (1 + l1normSparse(params.queryVector, doc['my_sparse_vector']))", @@ -227,8 +269,14 @@ GET my_index/_search { "query": { "script_score": { - "query": { - "match_all": {} + "query" : { + "bool" : { + "filter" : { + "term" : { + "status" : "published" + } + } + } }, "script": { "source": "1 / (1 + l2norm(params.queryVector, doc['my_dense_vector']))", @@ -251,8 +299,14 @@ GET my_index/_search { "query": { "script_score": { - "query": { - "match_all": {} + "query" : { + "bool" : { + "filter" : { + "term" : { + "status" : "published" + } + } + } }, "script": { "source": "1 / (1 + l2normSparse(params.queryVector, doc['my_sparse_vector']))", diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java index 7196fdbf984..606284f0462 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java @@ -149,6 +149,10 @@ public enum XContentType { return type; } } + // we also support newline delimited JSON: http://specs.okfnlabs.org/ndjson/ + if (lowercaseMediaType.toLowerCase(Locale.ROOT).equals("application/x-ndjson")) { + return XContentType.JSON; + } return null; } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index f1f0d7c8f96..2458c5eb40a 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -117,8 +116,7 @@ public class GrokProcessorGetAction extends ActionType nodesInCluster) { - return Arrays.asList(new GrokProcessorGetAction.RestAction(settings, restController)); + return Arrays.asList(new GrokProcessorGetAction.RestAction(restController)); } @Override diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index ee0d50ad2cc..42c140cada0 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -59,8 +59,8 @@ public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { return Arrays.asList( - new RestSearchTemplateAction(settings, restController), + new RestSearchTemplateAction(restController), new RestMultiSearchTemplateAction(settings, restController), - new RestRenderSearchTemplateAction(settings, restController)); + new RestRenderSearchTemplateAction(restController)); } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java index 2c46b6f694a..5eccdef8afc 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java @@ -58,7 +58,6 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler { private final boolean allowExplicitIndex; public RestMultiSearchTemplateAction(Settings settings, RestController controller) { - super(settings); this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); controller.registerHandler(GET, "/_msearch/template", this); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java index 75acc094243..767f6e95c1f 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -34,8 +33,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestRenderSearchTemplateAction extends BaseRestHandler { - public RestRenderSearchTemplateAction(Settings settings, RestController controller) { - super(settings); + + public RestRenderSearchTemplateAction(RestController controller) { controller.registerHandler(GET, "/_render/template", this); controller.registerHandler(POST, "/_render/template", this); controller.registerHandler(GET, "/_render/template/{id}", this); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index 70a12f0c8bf..8ec85295b93 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -47,9 +46,7 @@ public class RestSearchTemplateAction extends BaseRestHandler { RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } - public RestSearchTemplateAction(Settings settings, RestController controller) { - super(settings); - + public RestSearchTemplateAction(RestController controller) { controller.registerHandler(GET, "/_search/template", this); controller.registerHandler(POST, "/_search/template", this); controller.registerHandler(GET, "/{index}/_search/template", this); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java index 0da8afbae04..562ffaf8e1f 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.script.mustache; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.test.rest.FakeRestRequest; @@ -32,7 +31,7 @@ public class RestSearchTemplateActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestSearchTemplateAction(Settings.EMPTY, controller()); + new RestSearchTemplateAction(controller()); } public void testTypeInPath() { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 0ea3d4af81f..206d6e029ee 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -208,8 +208,8 @@ final class Compiler { */ Constructor compile(Loader loader, MainMethodReserved reserved, String name, String source, CompilerSettings settings) { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); - SSource root = Walker.buildPainlessTree(scriptClassInfo, reserved, name, source, settings, painlessLookup, - null); + SSource root = Walker.buildPainlessTree(scriptClassInfo, reserved, name, source, settings, painlessLookup, null); + root.storeSettings(settings); root.analyze(painlessLookup); Map statics = root.write(); @@ -240,6 +240,7 @@ final class Compiler { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); SSource root = Walker.buildPainlessTree(scriptClassInfo, new MainMethodReserved(), name, source, settings, painlessLookup, debugStream); + root.storeSettings(settings); root.analyze(painlessLookup); root.write(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 96a03fe5914..6239d33f10e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -142,8 +142,8 @@ public final class PainlessPlugin extends Plugin implements ScriptPlugin, Extens IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { List handlers = new ArrayList<>(); - handlers.add(new PainlessExecuteAction.RestAction(settings, restController)); - handlers.add(new PainlessContextAction.RestAction(settings, restController)); + handlers.add(new PainlessExecuteAction.RestAction(restController)); + handlers.add(new PainlessContextAction.RestAction(restController)); return handlers; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java index ac1b95b3b90..9075b9e030d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessContextAction.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.painless.PainlessScriptEngine; @@ -195,8 +194,7 @@ public class PainlessContextAction extends ActionType { statements.add((AStatement)visit(statement)); } - return new SSource(scriptClassInfo, settings, sourceName, sourceText, debugStream, + return new SSource(scriptClassInfo, sourceName, sourceText, debugStream, (MainMethodReserved)reserved.pop(), location(ctx), functions, statements); } @@ -319,8 +319,6 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitWhile(WhileContext ctx) { - reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); - AExpression expression = (AExpression)visit(ctx.expression()); if (ctx.trailer() != null) { @@ -336,8 +334,6 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitDo(DoContext ctx) { - reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); - AExpression expression = (AExpression)visit(ctx.expression()); SBlock block = (SBlock)visit(ctx.block()); @@ -346,8 +342,6 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitFor(ForContext ctx) { - reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); - ANode initializer = ctx.initializer() == null ? null : visit(ctx.initializer()); AExpression expression = ctx.expression() == null ? null : (AExpression)visit(ctx.expression()); AExpression afterthought = ctx.afterthought() == null ? null : (AExpression)visit(ctx.afterthought()); @@ -365,8 +359,6 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitEach(EachContext ctx) { - reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); - String type = ctx.decltype().getText(); String name = ctx.ID().getText(); AExpression expression = (AExpression)visit(ctx.expression()); @@ -377,8 +369,6 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitIneach(IneachContext ctx) { - reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); - String name = ctx.ID().getText(); AExpression expression = (AExpression)visit(ctx.expression()); SBlock block = (SBlock)visit(ctx.trailer()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java index 567ffcef8a3..50430c68a1b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -53,6 +54,11 @@ public abstract class ANode { this.location = Objects.requireNonNull(location); } + /** + * Store settings required for future compiler passes. + */ + abstract void storeSettings(CompilerSettings settings); + /** * Adds all variable names referenced to the variable set. *

diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java index 584e5df6342..a54dae82a38 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java @@ -21,6 +21,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -62,6 +63,15 @@ public final class EAssignment extends AExpression { this.operation = operation; } + @Override + void storeSettings(CompilerSettings settings) { + lhs.storeSettings(settings); + + if (rhs != null) { + rhs.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { lhs.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 00abe788bf4..36bf103936d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -57,6 +58,12 @@ public final class EBinary extends AExpression { this.right = Objects.requireNonNull(right); } + @Override + void storeSettings(CompilerSettings settings) { + left.storeSettings(settings); + right.storeSettings(settings); + } + @Override void extractVariables(Set variables) { left.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java index 72d8dd0f10b..792ffc458ac 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -47,6 +48,12 @@ public final class EBool extends AExpression { this.right = Objects.requireNonNull(right); } + @Override + void storeSettings(CompilerSettings settings) { + left.storeSettings(settings); + right.storeSettings(settings); + } + @Override void extractVariables(Set variables) { left.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java index c7e20e57f1a..68f297f800c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -37,6 +38,11 @@ public final class EBoolean extends AExpression { this.constant = constant; } + @Override + void storeSettings(CompilerSettings settings) { + // Do nothing. + } + @Override void extractVariables(Set variables) { // Do nothing. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index ba150ea5f9e..b16a3424342 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.LocalMethod; @@ -59,6 +60,13 @@ public final class ECallLocal extends AExpression { this.arguments = Objects.requireNonNull(arguments); } + @Override + void storeSettings(CompilerSettings settings) { + for (AExpression argument : arguments) { + argument.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { for (AExpression argument : arguments) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index a649fa7611c..8f78442ab37 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; @@ -52,6 +53,11 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda this.call = Objects.requireNonNull(call); } + @Override + void storeSettings(CompilerSettings settings) { + // Do nothing. + } + @Override void extractVariables(Set variables) { variables.add(variable); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index 08236a965fe..abbc2755004 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -44,9 +45,14 @@ final class ECast extends AExpression { this.cast = Objects.requireNonNull(cast); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { - throw new IllegalStateException("Illegal tree structure."); + throw createError(new IllegalStateException("Illegal tree structure.")); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index 4d8a71ae3eb..11f94db76ed 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -56,6 +57,12 @@ public final class EComp extends AExpression { this.right = Objects.requireNonNull(right); } + @Override + void storeSettings(CompilerSettings settings) { + left.storeSettings(settings); + right.storeSettings(settings); + } + @Override void extractVariables(Set variables) { left.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java index fee5cba1aac..ea98f05a1db 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -47,6 +48,13 @@ public final class EConditional extends AExpression { this.right = Objects.requireNonNull(right); } + @Override + void storeSettings(CompilerSettings settings) { + condition.storeSettings(settings); + left.storeSettings(settings); + right.storeSettings(settings); + } + @Override void extractVariables(Set variables) { condition.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java index 84e7f9a2d0d..bd028458385 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -38,6 +39,11 @@ final class EConstant extends AExpression { this.constant = constant; } + @Override + void storeSettings(CompilerSettings settings) { + throw new IllegalStateException("illegal tree structure"); + } + @Override void extractVariables(Set variables) { throw new IllegalStateException("Illegal tree structure."); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java index 3912f6d087f..03e47cc7c24 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -41,7 +42,14 @@ public final class EDecimal extends AExpression { } @Override - void extractVariables(Set variables) {} + void storeSettings(CompilerSettings settings) { + // Do nothing. + } + + @Override + void extractVariables(Set variables) { + // Do nothing. + } @Override void analyze(Locals locals) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java index cb4d9323962..7daac4f4a7e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -45,6 +46,12 @@ public class EElvis extends AExpression { this.rhs = requireNonNull(rhs); } + @Override + void storeSettings(CompilerSettings settings) { + lhs.storeSettings(settings); + rhs.storeSettings(settings); + } + @Override void extractVariables(Set variables) { lhs.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index 3ad3018c61e..97779b2e801 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -42,6 +43,11 @@ public final class EExplicit extends AExpression { this.child = Objects.requireNonNull(child); } + @Override + void storeSettings(CompilerSettings settings) { + child.storeSettings(settings); + } + @Override void extractVariables(Set variables) { child.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index c97cc66c7c7..03246e186a3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -47,7 +48,14 @@ public final class EFunctionRef extends AExpression implements ILambda { } @Override - void extractVariables(Set variables) {} + void storeSettings(CompilerSettings settings) { + // do nothing + } + + @Override + void extractVariables(Set variables) { + // do nothing + } @Override void analyze(Locals locals) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index 73e4f176ea1..810f7caed9b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -47,6 +48,11 @@ public final class EInstanceof extends AExpression { this.type = Objects.requireNonNull(type); } + @Override + void storeSettings(CompilerSettings settings) { + expression.storeSettings(settings); + } + @Override void extractVariables(Set variables) { expression.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 7eb0cb15115..53e0f87779a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -68,6 +69,8 @@ public final class ELambda extends AExpression implements ILambda { private final List paramNameStrs; private final List statements; + private CompilerSettings settings; + // desugared synthetic method (lambda body) private SFunction desugared; // captured variables @@ -87,6 +90,15 @@ public final class ELambda extends AExpression implements ILambda { this.statements = Collections.unmodifiableList(statements); } + @Override + void storeSettings(CompilerSettings settings) { + for (AStatement statement : statements) { + statement.storeSettings(settings); + } + + this.settings = settings; + } + @Override void extractVariables(Set variables) { for (AStatement statement : statements) { @@ -168,9 +180,10 @@ public final class ELambda extends AExpression implements ILambda { String name = locals.getNextSyntheticName(); desugared = new SFunction(reserved, location, PainlessLookupUtility.typeToCanonicalTypeName(returnType), name, paramTypes, paramNames, statements, true); + desugared.storeSettings(settings); desugared.generateSignature(locals.getPainlessLookup()); desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), desugared.name, returnType, - desugared.parameters, captures.size(), reserved.getMaxLoopCounter())); + desugared.parameters, captures.size(), settings.getMaxLoopCounter())); // setup method reference to synthetic method if (expected == null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 8c9154aaaf3..0ccdbdc7586 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -50,6 +51,13 @@ public final class EListInit extends AExpression { this.values = values; } + @Override + void storeSettings(CompilerSettings settings) { + for (AExpression value : values) { + value.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { for (AExpression value : values) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index 11c12b2cd0a..92c215974d3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -52,6 +53,17 @@ public final class EMapInit extends AExpression { this.values = values; } + @Override + void storeSettings(CompilerSettings settings) { + for (AExpression key : keys) { + key.storeSettings(settings); + } + + for (AExpression value : values) { + value.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { for (AExpression key : keys) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java index cef005de9c3..418f8f91e93 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -45,6 +46,13 @@ public final class ENewArray extends AExpression { this.initialize = initialize; } + @Override + void storeSettings(CompilerSettings settings) { + for (AExpression argument : arguments) { + argument.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { for (AExpression argument : arguments) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java index 4fc6753d972..c9b2ca95308 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -36,6 +37,8 @@ import java.util.Set; public final class ENewArrayFunctionRef extends AExpression implements ILambda { private final String type; + private CompilerSettings settings; + private SFunction function; private FunctionRef ref; private String defPointer; @@ -47,16 +50,24 @@ public final class ENewArrayFunctionRef extends AExpression implements ILambda { } @Override - void extractVariables(Set variables) {} + void storeSettings(CompilerSettings settings) { + this.settings = settings; + } + + @Override + void extractVariables(Set variables) { + // do nothing + } @Override void analyze(Locals locals) { SReturn code = new SReturn(location, new ENewArray(location, type, Arrays.asList(new EVariable(location, "size")), false)); function = new SFunction(new SFunction.FunctionReserved(), location, type, locals.getNextSyntheticName(), Arrays.asList("int"), Arrays.asList("size"), Arrays.asList(code), true); + function.storeSettings(settings); function.generateSignature(locals.getPainlessLookup()); function.analyze(Locals.newLambdaScope(locals.getProgramScope(), function.name, function.returnType, - function.parameters, 0, 0)); + function.parameters, 0, settings.getMaxLoopCounter())); if (expected == null) { ref = null; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index 9423ed5d109..fcca44a457d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -51,6 +52,13 @@ public final class ENewObj extends AExpression { this.arguments = Objects.requireNonNull(arguments); } + @Override + void storeSettings(CompilerSettings settings) { + for (AExpression argument : arguments) { + argument.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { for (AExpression argument : arguments) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index 3a47dfc725f..46836c179fc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -37,6 +38,11 @@ public final class ENull extends AExpression { super(location); } + @Override + void storeSettings(CompilerSettings settings) { + // do nothing + } + @Override void extractVariables(Set variables) { // Do nothing. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java index cf62f30ad45..59f28ccb437 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -42,6 +43,11 @@ public final class ENumeric extends AExpression { this.radix = radix; } + @Override + void storeSettings(CompilerSettings settings) { + // do nothing + } + @Override void extractVariables(Set variables) { // Do nothing. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java index fa249b9df62..356c1958c6e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Constant; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -53,6 +54,11 @@ public final class ERegex extends AExpression { this.flags = flags; } + @Override + void storeSettings(CompilerSettings settings) { + // do nothing + } + @Override void extractVariables(Set variables) { // Do nothing. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java index 0d8c94db0f1..7214d0c7ed6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -40,6 +41,11 @@ public final class EStatic extends AExpression { this.type = Objects.requireNonNull(type); } + @Override + void storeSettings(CompilerSettings settings) { + // do nothing + } + @Override void extractVariables(Set variables) { // Do nothing. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java index 3c34d0f1c73..1adf4966745 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -38,6 +39,11 @@ public final class EString extends AExpression { this.constant = Objects.requireNonNull(string); } + @Override + void storeSettings(CompilerSettings settings) { + // do nothing + } + @Override void extractVariables(Set variables) { // Do nothing. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index 1c0fce81876..f713082ba5e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -53,6 +54,11 @@ public final class EUnary extends AExpression { this.child = Objects.requireNonNull(child); } + @Override + void storeSettings(CompilerSettings settings) { + child.storeSettings(settings); + } + @Override void extractVariables(Set variables) { child.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java index 3dd3e73ac79..734e1e9c5ea 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -44,6 +45,11 @@ public final class EVariable extends AStoreable { this.name = Objects.requireNonNull(name); } + @Override + void storeSettings(CompilerSettings settings) { + // do nothing + } + @Override void extractVariables(Set variables) { variables.add(name); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java index 26471f67f65..65d4b87ab58 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -46,6 +47,12 @@ public final class PBrace extends AStoreable { this.index = Objects.requireNonNull(index); } + @Override + void storeSettings(CompilerSettings settings) { + prefix.storeSettings(settings); + index.storeSettings(settings); + } + @Override void extractVariables(Set variables) { prefix.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 25ae1ed9774..4103b6165ac 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -51,6 +52,15 @@ public final class PCallInvoke extends AExpression { this.arguments = Objects.requireNonNull(arguments); } + @Override + void storeSettings(CompilerSettings settings) { + prefix.storeSettings(settings); + + for (AExpression argument : arguments) { + argument.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { prefix.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 7efd6a29899..9a00421c616 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -52,6 +53,11 @@ public final class PField extends AStoreable { this.value = Objects.requireNonNull(value); } + @Override + void storeSettings(CompilerSettings settings) { + prefix.storeSettings(settings); + } + @Override void extractVariables(Set variables) { prefix.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java index 0148046f7ec..83464f45a25 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -42,6 +43,11 @@ final class PSubArrayLength extends AStoreable { this.value = Objects.requireNonNull(value); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java index abd7128a042..79c28168ab7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -42,9 +43,14 @@ final class PSubBrace extends AStoreable { this.index = Objects.requireNonNull(index); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { - index.extractVariables(variables); + throw createError(new IllegalStateException("illegal tree structure")); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java index fe2ae52603b..d5fd2849aba 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -46,6 +47,11 @@ final class PSubCallInvoke extends AExpression { this.arguments = Objects.requireNonNull(arguments); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java index c5291879d01..4dec674f22e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -43,6 +44,11 @@ final class PSubDefArray extends AStoreable { this.index = Objects.requireNonNull(index); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java index 8323c17a076..ff5bfe7dd3f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -52,6 +53,11 @@ final class PSubDefCall extends AExpression { this.arguments = Objects.requireNonNull(arguments); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java index e2da3840ea0..b9c971570ed 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -43,6 +44,11 @@ final class PSubDefField extends AStoreable { this.value = Objects.requireNonNull(value); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index 9e09f810250..c1d5b45d76c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -44,6 +45,11 @@ final class PSubField extends AStoreable { this.field = Objects.requireNonNull(field); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 3bc4913fde9..0e730cdf9be 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -48,6 +49,11 @@ final class PSubListShortcut extends AStoreable { this.index = Objects.requireNonNull(index); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index 0a0f099bd68..a3e1ff4d198 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -47,6 +48,11 @@ final class PSubMapShortcut extends AStoreable { this.index = Objects.requireNonNull(index); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java index 43b0feb0009..5a450808388 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -43,9 +44,14 @@ public class PSubNullSafeCallInvoke extends AExpression { this.guarded = requireNonNull(guarded); } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { - guarded.extractVariables(variables); + throw createError(new IllegalStateException("illegal tree structure")); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java index b6bed360355..aa8bc005eae 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -38,9 +39,14 @@ public class PSubNullSafeField extends AStoreable { this.guarded = guarded; } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { - guarded.extractVariables(variables); + throw createError(new IllegalStateException("illegal tree structure")); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java index 16975660477..4737eb93030 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -46,6 +47,11 @@ final class PSubShortcut extends AStoreable { this.setter = setter; } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java index 43e0a62fb00..b00d2305d4b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -43,6 +44,13 @@ public final class SBlock extends AStatement { this.statements = Collections.unmodifiableList(statements); } + @Override + void storeSettings(CompilerSettings settings) { + for (AStatement statement : statements) { + statement.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { for (AStatement statement : statements) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java index bd446901e3a..b1c99988ccc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -35,6 +36,11 @@ public final class SBreak extends AStatement { super(location); } + @Override + void storeSettings(CompilerSettings settings) { + // do nothing + } + @Override void extractVariables(Set variables) { // Do nothing. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java index 0c8ba5de6b2..0ddb54e4b1d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -53,6 +54,11 @@ public final class SCatch extends AStatement { this.block = block; } + @Override + void storeSettings(CompilerSettings settings) { + block.storeSettings(settings); + } + @Override void extractVariables(Set variables) { variables.add(name); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java index 2cd1ed15cd2..18a0fe33cae 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -35,6 +36,11 @@ public final class SContinue extends AStatement { super(location); } + @Override + void storeSettings(CompilerSettings settings) { + // do nothing + } + @Override void extractVariables(Set variables) { // Do nothing. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java index 95af41ba2b8..5a68ca1a450 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -43,6 +44,13 @@ public final class SDeclBlock extends AStatement { this.declarations = Collections.unmodifiableList(declarations); } + @Override + void storeSettings(CompilerSettings settings) { + for (SDeclaration declaration: declarations) { + declaration.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { for (SDeclaration declaration : declarations) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index 7ead673c70b..e731555e20a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -48,6 +49,13 @@ public final class SDeclaration extends AStatement { this.expression = expression; } + @Override + void storeSettings(CompilerSettings settings) { + if (expression != null) { + expression.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { variables.add(name); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java index 3c0279707fe..04a386d763e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -46,6 +47,15 @@ public final class SDo extends AStatement { this.block = block; } + @Override + void storeSettings(CompilerSettings settings) { + condition.storeSettings(settings); + + if (block != null) { + block.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { condition.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index cf41105c4fe..2e9dd826f99 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -51,6 +52,15 @@ public class SEach extends AStatement { this.block = block; } + @Override + void storeSettings(CompilerSettings settings) { + expression.storeSettings(settings); + + if (block != null) { + block.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { variables.add(name); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java index e499ad3273c..e1ee2e6e5bd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -40,6 +41,11 @@ public final class SExpression extends AStatement { this.expression = Objects.requireNonNull(expression); } + @Override + void storeSettings(CompilerSettings settings) { + expression.storeSettings(settings); + } + @Override void extractVariables(Set variables) { expression.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java index c12ad5473f9..14580a1d65b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -52,6 +53,25 @@ public final class SFor extends AStatement { this.block = block; } + @Override + void storeSettings(CompilerSettings settings) { + if (initializer != null) { + initializer.storeSettings(settings); + } + + if (condition != null) { + condition.storeSettings(settings); + } + + if (afterthought != null) { + afterthought.storeSettings(settings); + } + + if (block != null) { + block.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { if (initializer != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 9464a54ea2c..a7aa7fc99ed 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -49,7 +49,6 @@ import static java.util.Collections.unmodifiableSet; public final class SFunction extends AStatement { public static final class FunctionReserved implements Reserved { private final Set usedVariables = new HashSet<>(); - private int maxLoopCounter = 0; @Override public void markUsedVariable(String name) { @@ -65,16 +64,6 @@ public final class SFunction extends AStatement { public void addUsedVariables(FunctionReserved reserved) { usedVariables.addAll(reserved.getUsedVariables()); } - - @Override - public void setMaxLoopCounter(int max) { - maxLoopCounter = max; - } - - @Override - public int getMaxLoopCounter() { - return maxLoopCounter; - } } final FunctionReserved reserved; @@ -85,6 +74,8 @@ public final class SFunction extends AStatement { private final List statements; public final boolean synthetic; + private CompilerSettings settings; + Class returnType; List> typeParameters; MethodType methodType; @@ -108,6 +99,15 @@ public final class SFunction extends AStatement { this.synthetic = synthetic; } + @Override + void storeSettings(CompilerSettings settings) { + for (AStatement statement : statements) { + statement.storeSettings(settings); + } + + this.settings = settings; + } + @Override void extractVariables(Set variables) { // we should never be extracting from a function, as functions are top-level! @@ -176,13 +176,13 @@ public final class SFunction extends AStatement { throw createError(new IllegalArgumentException("Not all paths provide a return value for method [" + name + "].")); } - if (reserved.getMaxLoopCounter() > 0) { + if (settings.getMaxLoopCounter() > 0) { loop = locals.getVariable(null, Locals.LOOP); } } /** Writes the function to given ClassVisitor. */ - void write (ClassVisitor writer, CompilerSettings settings, Globals globals) { + void write(ClassVisitor writer, CompilerSettings settings, Globals globals) { int access = Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC; if (synthetic) { access |= Opcodes.ACC_SYNTHETIC; @@ -195,10 +195,10 @@ public final class SFunction extends AStatement { @Override void write(MethodWriter function, Globals globals) { - if (reserved.getMaxLoopCounter() > 0) { + if (settings.getMaxLoopCounter() > 0) { // if there is infinite loop protection, we do this once: // int #loop = settings.getMaxLoopCounter() - function.push(reserved.getMaxLoopCounter()); + function.push(settings.getMaxLoopCounter()); function.visitVarInsn(Opcodes.ISTORE, loop.getSlot()); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java index 3409e08804c..43472d82ccb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -44,6 +45,15 @@ public final class SIf extends AStatement { this.ifblock = ifblock; } + @Override + void storeSettings(CompilerSettings settings) { + condition.storeSettings(settings); + + if (ifblock != null) { + ifblock.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { condition.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java index 2f7aa82b780..e4460de645c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -49,6 +50,19 @@ public final class SIfElse extends AStatement { this.elseblock = elseblock; } + @Override + void storeSettings(CompilerSettings settings) { + condition.storeSettings(settings); + + if (ifblock != null) { + ifblock.storeSettings(settings); + } + + if (elseblock != null) { + elseblock.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { condition.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java index 2341bad830f..68d5e286ec6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -40,9 +41,18 @@ public final class SReturn extends AStatement { this.expression = expression; } + @Override + void storeSettings(CompilerSettings settings) { + if (expression != null) { + expression.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { - expression.extractVariables(variables); + if (expression != null) { + expression.extractVariables(variables); + } } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index bfdeba2c8ef..85e45c3f776 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -92,14 +92,10 @@ public final class SSource extends AStatement { void markUsedVariable(String name); Set getUsedVariables(); void addUsedVariables(FunctionReserved reserved); - - void setMaxLoopCounter(int max); - int getMaxLoopCounter(); } public static final class MainMethodReserved implements Reserved { private final Set usedVariables = new HashSet<>(); - private int maxLoopCounter = 0; @Override public void markUsedVariable(String name) { @@ -115,20 +111,9 @@ public final class SSource extends AStatement { public void addUsedVariables(FunctionReserved reserved) { usedVariables.addAll(reserved.getUsedVariables()); } - - @Override - public void setMaxLoopCounter(int max) { - maxLoopCounter = max; - } - - @Override - public int getMaxLoopCounter() { - return maxLoopCounter; - } } private final ScriptClassInfo scriptClassInfo; - private final CompilerSettings settings; private final String name; private final Printer debugStream; private final MainMethodReserved reserved; @@ -136,15 +121,16 @@ public final class SSource extends AStatement { private final Globals globals; private final List statements; + private CompilerSettings settings; + private Locals mainMethod; private final List getMethods; private byte[] bytes; - public SSource(ScriptClassInfo scriptClassInfo, CompilerSettings settings, String name, String sourceText, Printer debugStream, + public SSource(ScriptClassInfo scriptClassInfo, String name, String sourceText, Printer debugStream, MainMethodReserved reserved, Location location, List functions, List statements) { super(location); this.scriptClassInfo = Objects.requireNonNull(scriptClassInfo); - this.settings = Objects.requireNonNull(settings); this.name = Objects.requireNonNull(name); this.debugStream = debugStream; this.reserved = Objects.requireNonNull(reserved); @@ -155,6 +141,19 @@ public final class SSource extends AStatement { this.getMethods = new ArrayList<>(); } + @Override + public void storeSettings(CompilerSettings settings) { + for (SFunction function : functions) { + function.storeSettings(settings); + } + + for (AStatement statement : statements) { + statement.storeSettings(settings); + } + + this.settings = settings; + } + @Override void extractVariables(Set variables) { // we should never be extracting from a function, as functions are top-level! @@ -183,7 +182,7 @@ public final class SSource extends AStatement { void analyze(Locals program) { for (SFunction function : functions) { Locals functionLocals = - Locals.newFunctionScope(program, function.returnType, function.parameters, function.reserved.getMaxLoopCounter()); + Locals.newFunctionScope(program, function.returnType, function.parameters, settings.getMaxLoopCounter()); function.analyze(functionLocals); } @@ -191,7 +190,7 @@ public final class SSource extends AStatement { throw createError(new IllegalArgumentException("Cannot generate an empty script.")); } - mainMethod = Locals.newMainMethodScope(scriptClassInfo, program, reserved.getMaxLoopCounter()); + mainMethod = Locals.newMainMethodScope(scriptClassInfo, program, settings.getMaxLoopCounter()); for (int get = 0; get < scriptClassInfo.getGetMethods().size(); ++get) { org.objectweb.asm.commons.Method method = scriptClassInfo.getGetMethods().get(get); @@ -405,13 +404,13 @@ public final class SSource extends AStatement { Label endCatch = new Label(); writer.mark(startTry); - if (reserved.getMaxLoopCounter() > 0) { + if (settings.getMaxLoopCounter() > 0) { // if there is infinite loop protection, we do this once: // int #loop = settings.getMaxLoopCounter() Variable loop = mainMethod.getVariable(null, Locals.LOOP); - writer.push(reserved.getMaxLoopCounter()); + writer.push(settings.getMaxLoopCounter()); writer.visitVarInsn(Opcodes.ISTORE, loop.getSlot()); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java index 7e0d74865f9..9a689a428f4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -54,6 +55,11 @@ final class SSubEachArray extends AStatement { this.block = block; } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index 46dfa056874..c18bc759c3b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -63,6 +64,11 @@ final class SSubEachIterable extends AStatement { this.block = block; } + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new IllegalStateException("illegal tree structure")); + } + @Override void extractVariables(Set variables) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java index 797938c3e03..a5e1c094b0c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -40,6 +41,11 @@ public final class SThrow extends AStatement { this.expression = Objects.requireNonNull(expression); } + @Override + void storeSettings(CompilerSettings settings) { + expression.storeSettings(settings); + } + @Override void extractVariables(Set variables) { expression.extractVariables(variables); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java index d9cd2da0f92..16f97f09436 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -46,6 +47,17 @@ public final class STry extends AStatement { this.catches = Collections.unmodifiableList(catches); } + @Override + void storeSettings(CompilerSettings settings) { + if (block != null) { + block.storeSettings(settings); + } + + for (SCatch ctch : catches) { + ctch.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { if (block != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java index 506758e562a..82210710088 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless.node; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -46,6 +47,15 @@ public final class SWhile extends AStatement { this.block = block; } + @Override + void storeSettings(CompilerSettings settings) { + condition.storeSettings(settings); + + if (block != null) { + block.storeSettings(settings); + } + } + @Override void extractVariables(Set variables) { condition.extractVariables(variables); diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java index 0e5d754778f..48b42973781 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java @@ -50,7 +50,7 @@ public class RankEvalPlugin extends Plugin implements ActionPlugin { public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { - return Arrays.asList(new RestRankEvalAction(settings, restController)); + return Arrays.asList(new RestRankEvalAction(restController)); } @Override diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java index c3616d3b9b5..b4876eafe93 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RestRankEvalAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.rankeval; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -90,8 +89,7 @@ public class RestRankEvalAction extends BaseRestHandler { public static String ENDPOINT = "_rank_eval"; - public RestRankEvalAction(Settings settings, RestController controller) { - super(settings); + public RestRankEvalAction(RestController controller) { controller.registerHandler(GET, "/" + ENDPOINT, this); controller.registerHandler(POST, "/" + ENDPOINT, this); controller.registerHandler(GET, "/{index}/" + ENDPOINT, this); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java index 63451abb7cc..3423bdccee3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java @@ -19,11 +19,10 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -43,8 +42,7 @@ public abstract class AbstractBaseReindexRestHandler< private final A action; - protected AbstractBaseReindexRestHandler(Settings settings, A action) { - super(settings); + protected AbstractBaseReindexRestHandler(A action) { this.action = action; } @@ -113,7 +111,7 @@ public abstract class AbstractBaseReindexRestHandler< return request; } - private RestChannelConsumer sendTask(String localNodeId, Task task) throws IOException { + private RestChannelConsumer sendTask(String localNodeId, Task task) { return channel -> { try (XContentBuilder builder = channel.newBuilder()) { builder.startObject(); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java index 834703cddc7..a1546ffdf05 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -41,8 +40,8 @@ public abstract class AbstractBulkByQueryRestHandler< Request extends AbstractBulkByScrollRequest, A extends ActionType> extends AbstractBaseReindexRestHandler { - protected AbstractBulkByQueryRestHandler(Settings settings, A action) { - super(settings, action); + protected AbstractBulkByQueryRestHandler(A action) { + super(action); } protected void parseInternalRequest(Request internal, RestRequest restRequest, diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java index 52fc321286a..fefee4f348b 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java @@ -74,10 +74,10 @@ public class ReindexPlugin extends Plugin implements ActionPlugin { IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { return Arrays.asList( - new RestReindexAction(settings, restController), - new RestUpdateByQueryAction(settings, restController), - new RestDeleteByQueryAction(settings, restController), - new RestRethrottleAction(settings, restController, nodesInCluster)); + new RestReindexAction(restController), + new RestUpdateByQueryAction(restController), + new RestDeleteByQueryAction(restController), + new RestRethrottleAction(restController, nodesInCluster)); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java index 5dab947a25d..2618d9af2b5 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -32,8 +31,9 @@ import java.util.function.Consumer; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { - public RestDeleteByQueryAction(Settings settings, RestController controller) { - super(settings, DeleteByQueryAction.INSTANCE); + + public RestDeleteByQueryAction(RestController controller) { + super(DeleteByQueryAction.INSTANCE); controller.registerHandler(POST, "/{index}/_delete_by_query", this); controller.registerHandler(POST, "/{index}/{type}/_delete_by_query", this); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index e534717e775..516d47b8358 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -35,8 +34,8 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class RestReindexAction extends AbstractBaseReindexRestHandler { - public RestReindexAction(Settings settings, RestController controller) { - super(settings, ReindexAction.INSTANCE); + public RestReindexAction(RestController controller) { + super(ReindexAction.INSTANCE); controller.registerHandler(POST, "/_reindex", this); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java index 9eed89885b9..cdd29a89b92 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java @@ -21,13 +21,11 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.tasks.TaskId; -import java.io.IOException; import java.util.function.Supplier; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -36,8 +34,7 @@ import static org.elasticsearch.rest.action.admin.cluster.RestListTasksAction.li public class RestRethrottleAction extends BaseRestHandler { private final Supplier nodesInCluster; - public RestRethrottleAction(Settings settings, RestController controller, Supplier nodesInCluster) { - super(settings); + public RestRethrottleAction(RestController controller, Supplier nodesInCluster) { this.nodesInCluster = nodesInCluster; controller.registerHandler(POST, "/_update_by_query/{taskId}/_rethrottle", this); controller.registerHandler(POST, "/_delete_by_query/{taskId}/_rethrottle", this); @@ -50,7 +47,7 @@ public class RestRethrottleAction extends BaseRestHandler { } @Override - public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { RethrottleRequest internalRequest = new RethrottleRequest(); internalRequest.setTaskId(new TaskId(request.param("taskId"))); Float requestsPerSecond = AbstractBaseReindexRestHandler.parseRequestsPerSecond(request); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 3442e5de0ac..49626c4a3fa 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -39,8 +38,8 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.script.Script.DEFAULT_SCRIPT_LANG; public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { - public RestUpdateByQueryAction(Settings settings, RestController controller) { - super(settings, UpdateByQueryAction.INSTANCE); + public RestUpdateByQueryAction(RestController controller) { + super(UpdateByQueryAction.INSTANCE); controller.registerHandler(POST, "/{index}/_update_by_query", this); controller.registerHandler(POST, "/{index}/{type}/_update_by_query", this); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java index ef6a4ff41fe..2c0f0b8b1f7 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -39,20 +39,19 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.reindex.RejectAwareActionListener; import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.io.InputStream; -import java.util.Iterator; import java.util.function.BiFunction; import java.util.function.Consumer; @@ -77,31 +76,31 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { } @Override - protected void doStart(Consumer onResponse) { - lookupRemoteVersion(version -> { + protected void doStart(RejectAwareActionListener searchListener) { + lookupRemoteVersion(RejectAwareActionListener.withResponseHandler(searchListener, version -> { remoteVersion = version; execute(RemoteRequestBuilders.initialSearch(searchRequest, query, remoteVersion), - RESPONSE_PARSER, r -> onStartResponse(onResponse, r)); - }); + RESPONSE_PARSER, RejectAwareActionListener.withResponseHandler(searchListener, r -> onStartResponse(searchListener, r))); + })); } - void lookupRemoteVersion(Consumer onVersion) { - execute(new Request("GET", ""), MAIN_ACTION_PARSER, onVersion); + void lookupRemoteVersion(RejectAwareActionListener listener) { + execute(new Request("GET", ""), MAIN_ACTION_PARSER, listener); } - private void onStartResponse(Consumer onResponse, Response response) { + private void onStartResponse(RejectAwareActionListener searchListener, Response response) { if (Strings.hasLength(response.getScrollId()) && response.getHits().isEmpty()) { logger.debug("First response looks like a scan response. Jumping right to the second. scroll=[{}]", response.getScrollId()); - doStartNextScroll(response.getScrollId(), timeValueMillis(0), onResponse); + doStartNextScroll(response.getScrollId(), timeValueMillis(0), searchListener); } else { - onResponse.accept(response); + searchListener.onResponse(response); } } @Override - protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse) { + protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, RejectAwareActionListener searchListener) { TimeValue keepAlive = timeValueNanos(searchRequest.scroll().keepAlive().nanos() + extraKeepAlive.nanos()); - execute(RemoteRequestBuilders.scroll(scrollId, keepAlive, remoteVersion), RESPONSE_PARSER, onResponse); + execute(RemoteRequestBuilders.scroll(scrollId, keepAlive, remoteVersion), RESPONSE_PARSER, searchListener); } @Override @@ -153,91 +152,77 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { } private void execute(Request request, - BiFunction parser, Consumer listener) { + BiFunction parser, RejectAwareActionListener listener) { // Preserve the thread context so headers survive after the call java.util.function.Supplier contextSupplier = threadPool.getThreadContext().newRestorableContext(true); - class RetryHelper extends AbstractRunnable { - private final Iterator retries = backoffPolicy.iterator(); - - @Override - protected void doRun() throws Exception { - client.performRequestAsync(request, new ResponseListener() { - @Override - public void onSuccess(org.elasticsearch.client.Response response) { - // Restore the thread context to get the precious headers - try (ThreadContext.StoredContext ctx = contextSupplier.get()) { - assert ctx != null; // eliminates compiler warning - T parsedResponse; - try { - HttpEntity responseEntity = response.getEntity(); - InputStream content = responseEntity.getContent(); - XContentType xContentType = null; - if (responseEntity.getContentType() != null) { - final String mimeType = ContentType.parse(responseEntity.getContentType().getValue()).getMimeType(); - xContentType = XContentType.fromMediaType(mimeType); + try { + client.performRequestAsync(request, new ResponseListener() { + @Override + public void onSuccess(org.elasticsearch.client.Response response) { + // Restore the thread context to get the precious headers + try (ThreadContext.StoredContext ctx = contextSupplier.get()) { + assert ctx != null; // eliminates compiler warning + T parsedResponse; + try { + HttpEntity responseEntity = response.getEntity(); + InputStream content = responseEntity.getContent(); + XContentType xContentType = null; + if (responseEntity.getContentType() != null) { + final String mimeType = ContentType.parse(responseEntity.getContentType().getValue()).getMimeType(); + xContentType = XContentType.fromMediaType(mimeType); + } + if (xContentType == null) { + try { + throw new ElasticsearchException( + "Response didn't include Content-Type: " + bodyMessage(response.getEntity())); + } catch (IOException e) { + ElasticsearchException ee = new ElasticsearchException("Error extracting body from response"); + ee.addSuppressed(e); + throw ee; } - if (xContentType == null) { - try { - throw new ElasticsearchException( - "Response didn't include Content-Type: " + bodyMessage(response.getEntity())); - } catch (IOException e) { - ElasticsearchException ee = new ElasticsearchException("Error extracting body from response"); - ee.addSuppressed(e); - throw ee; - } - } - // EMPTY is safe here because we don't call namedObject - try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, content)) { - parsedResponse = parser.apply(xContentParser, xContentType); - } catch (XContentParseException e) { + } + // EMPTY is safe here because we don't call namedObject + try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, content)) { + parsedResponse = parser.apply(xContentParser, xContentType); + } catch (XContentParseException e) { /* Because we're streaming the response we can't get a copy of it here. The best we can do is hint that it * is totally wrong and we're probably not talking to Elasticsearch. */ - throw new ElasticsearchException( - "Error parsing the response, remote is likely not an Elasticsearch instance", e); - } - } catch (IOException e) { throw new ElasticsearchException( - "Error deserializing response, remote is likely not an Elasticsearch instance", e); + "Error parsing the response, remote is likely not an Elasticsearch instance", e); } - listener.accept(parsedResponse); + } catch (IOException e) { + throw new ElasticsearchException( + "Error deserializing response, remote is likely not an Elasticsearch instance", e); } + listener.onResponse(parsedResponse); } + } - @Override - public void onFailure(Exception e) { - try (ThreadContext.StoredContext ctx = contextSupplier.get()) { - assert ctx != null; // eliminates compiler warning - if (e instanceof ResponseException) { - ResponseException re = (ResponseException) e; - if (RestStatus.TOO_MANY_REQUESTS.getStatus() == re.getResponse().getStatusLine().getStatusCode()) { - if (retries.hasNext()) { - TimeValue delay = retries.next(); - logger.trace( - (Supplier) () -> new ParameterizedMessage("retrying rejected search after [{}]", delay), e); - countSearchRetry.run(); - threadPool.schedule(RetryHelper.this, delay, ThreadPool.Names.SAME); - return; - } - } - e = wrapExceptionToPreserveStatus(re.getResponse().getStatusLine().getStatusCode(), - re.getResponse().getEntity(), re); - } else if (e instanceof ContentTooLongException) { - e = new IllegalArgumentException( - "Remote responded with a chunk that was too large. Use a smaller batch size.", e); + @Override + public void onFailure(Exception e) { + try (ThreadContext.StoredContext ctx = contextSupplier.get()) { + assert ctx != null; // eliminates compiler warning + if (e instanceof ResponseException) { + ResponseException re = (ResponseException) e; + int statusCode = re.getResponse().getStatusLine().getStatusCode(); + e = wrapExceptionToPreserveStatus(statusCode, + re.getResponse().getEntity(), re); + if (RestStatus.TOO_MANY_REQUESTS.getStatus() == statusCode) { + listener.onRejection(e); + return; } - fail.accept(e); + } else if (e instanceof ContentTooLongException) { + e = new IllegalArgumentException( + "Remote responded with a chunk that was too large. Use a smaller batch size.", e); } + listener.onFailure(e); } - }); - } - - @Override - public void onFailure(Exception t) { - fail.accept(t); - } + } + }); + } catch (Exception e) { + listener.onFailure(e); } - new RetryHelper().run(); } /** @@ -261,7 +246,7 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { } } - static String bodyMessage(@Nullable HttpEntity entity) throws IOException { + private static String bodyMessage(@Nullable HttpEntity entity) throws IOException { if (entity == null) { return "No error body."; } else { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestDeleteByQueryActionTests.java index 788fad32100..069b85afeb4 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestDeleteByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestDeleteByQueryActionTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -36,7 +35,7 @@ public class RestDeleteByQueryActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestDeleteByQueryAction(Settings.EMPTY, controller()); + action = new RestDeleteByQueryAction(controller()); } public void testTypeInPath() throws IOException { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java index e9c46bbb8e4..fd0a6fde01d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -41,7 +40,7 @@ public class RestReindexActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestReindexAction(Settings.EMPTY, controller()); + action = new RestReindexAction(controller()); } public void testPipelineQueryParameterIsError() throws IOException { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestUpdateByQueryActionTests.java index b5333226bb9..f7a273644bb 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestUpdateByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestUpdateByQueryActionTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -37,7 +36,7 @@ public class RestUpdateByQueryActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestUpdateByQueryAction(Settings.EMPTY, controller()); + action = new RestUpdateByQueryAction(controller()); } public void testTypeInPath() throws IOException { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 68cc0effc1d..f9e952baa12 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -52,6 +52,8 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.reindex.RejectAwareActionListener; +import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.index.reindex.ScrollableHitSource.Response; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -67,10 +69,14 @@ import java.io.IOException; import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.StandardCharsets; +import java.util.Queue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import java.util.stream.Stream; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; @@ -91,6 +97,8 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { private SearchRequest searchRequest; private int retriesAllowed; + private final Queue responseQueue = new LinkedBlockingQueue<>(); + @Before @Override public void setUp() throws Exception { @@ -122,6 +130,11 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { terminate(threadPool); } + @After + public void validateAllConsumed() { + assertTrue(responseQueue.isEmpty()); + } + public void testLookupRemoteVersion() throws Exception { assertLookupRemoteVersion(Version.fromString("0.20.5"), "main/0_20_5.json"); assertLookupRemoteVersion(Version.fromString("0.90.13"), "main/0_90_13.json"); @@ -135,16 +148,17 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { private void assertLookupRemoteVersion(Version expected, String s) throws Exception { AtomicBoolean called = new AtomicBoolean(); - sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, s).lookupRemoteVersion(v -> { - assertEquals(expected, v); - called.set(true); - }); + sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, s) + .lookupRemoteVersion(wrapAsListener(v -> { + assertEquals(expected, v); + called.set(true); + })); assertTrue(called.get()); } public void testParseStartOk() throws Exception { AtomicBoolean called = new AtomicBoolean(); - sourceWithMockedRemoteCall("start_ok.json").doStart(r -> { + sourceWithMockedRemoteCall("start_ok.json").doStart(wrapAsListener(r -> { assertFalse(r.isTimedOut()); assertEquals(FAKE_SCROLL_ID, r.getScrollId()); assertEquals(4, r.getTotalHits()); @@ -156,13 +170,13 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals("{\"test\":\"test2\"}", r.getHits().get(0).getSource().utf8ToString()); assertNull(r.getHits().get(0).getRouting()); called.set(true); - }); + })); assertTrue(called.get()); } public void testParseScrollOk() throws Exception { AtomicBoolean called = new AtomicBoolean(); - sourceWithMockedRemoteCall("scroll_ok.json").doStartNextScroll("", timeValueMillis(0), r -> { + sourceWithMockedRemoteCall("scroll_ok.json").doStartNextScroll("", timeValueMillis(0), wrapAsListener(r -> { assertFalse(r.isTimedOut()); assertEquals(FAKE_SCROLL_ID, r.getScrollId()); assertEquals(4, r.getTotalHits()); @@ -174,7 +188,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); assertNull(r.getHits().get(0).getRouting()); called.set(true); - }); + })); assertTrue(called.get()); } @@ -183,12 +197,12 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { */ public void testParseScrollFullyLoaded() throws Exception { AtomicBoolean called = new AtomicBoolean(); - sourceWithMockedRemoteCall("scroll_fully_loaded.json").doStartNextScroll("", timeValueMillis(0), r -> { + sourceWithMockedRemoteCall("scroll_fully_loaded.json").doStartNextScroll("", timeValueMillis(0), wrapAsListener(r -> { assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); assertEquals("testrouting", r.getHits().get(0).getRouting()); called.set(true); - }); + })); assertTrue(called.get()); } @@ -197,12 +211,12 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { */ public void testParseScrollFullyLoadedFrom1_7() throws Exception { AtomicBoolean called = new AtomicBoolean(); - sourceWithMockedRemoteCall("scroll_fully_loaded_1_7.json").doStartNextScroll("", timeValueMillis(0), r -> { + sourceWithMockedRemoteCall("scroll_fully_loaded_1_7.json").doStartNextScroll("", timeValueMillis(0), wrapAsListener(r -> { assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); assertEquals("testrouting", r.getHits().get(0).getRouting()); called.set(true); - }); + })); assertTrue(called.get()); } @@ -212,7 +226,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { */ public void testScanJumpStart() throws Exception { AtomicBoolean called = new AtomicBoolean(); - sourceWithMockedRemoteCall("start_scan.json", "scroll_ok.json").doStart(r -> { + sourceWithMockedRemoteCall("start_scan.json", "scroll_ok.json").doStart(wrapAsListener(r -> { assertFalse(r.isTimedOut()); assertEquals(FAKE_SCROLL_ID, r.getScrollId()); assertEquals(4, r.getTotalHits()); @@ -224,7 +238,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); assertNull(r.getHits().get(0).getRouting()); called.set(true); - }); + })); assertTrue(called.get()); } @@ -252,10 +266,10 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals("{\"test\":\"test1\"}", r.getHits().get(0).getSource().utf8ToString()); called.set(true); }; - sourceWithMockedRemoteCall("rejection.json").doStart(checkResponse); + sourceWithMockedRemoteCall("rejection.json").doStart(wrapAsListener(checkResponse)); assertTrue(called.get()); called.set(false); - sourceWithMockedRemoteCall("rejection.json").doStartNextScroll("scroll", timeValueMillis(0), checkResponse); + sourceWithMockedRemoteCall("rejection.json").doStartNextScroll("scroll", timeValueMillis(0), wrapAsListener(checkResponse)); assertTrue(called.get()); } @@ -281,10 +295,11 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals("{\"test\":\"test10000\"}", r.getHits().get(0).getSource().utf8ToString()); called.set(true); }; - sourceWithMockedRemoteCall("failure_with_status.json").doStart(checkResponse); + sourceWithMockedRemoteCall("failure_with_status.json").doStart(wrapAsListener(checkResponse)); assertTrue(called.get()); called.set(false); - sourceWithMockedRemoteCall("failure_with_status.json").doStartNextScroll("scroll", timeValueMillis(0), checkResponse); + sourceWithMockedRemoteCall("failure_with_status.json").doStartNextScroll("scroll", timeValueMillis(0), + wrapAsListener(checkResponse)); assertTrue(called.get()); } @@ -302,48 +317,51 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals(14, failure.getColumnNumber()); called.set(true); }; - sourceWithMockedRemoteCall("request_failure.json").doStart(checkResponse); + sourceWithMockedRemoteCall("request_failure.json").doStart(wrapAsListener(checkResponse)); assertTrue(called.get()); called.set(false); - sourceWithMockedRemoteCall("request_failure.json").doStartNextScroll("scroll", timeValueMillis(0), checkResponse); + sourceWithMockedRemoteCall("request_failure.json").doStartNextScroll("scroll", timeValueMillis(0), wrapAsListener(checkResponse)); assertTrue(called.get()); } public void testRetryAndSucceed() throws Exception { - AtomicBoolean called = new AtomicBoolean(); - Consumer checkResponse = r -> { - assertThat(r.getFailures(), hasSize(0)); - called.set(true); - }; retriesAllowed = between(1, Integer.MAX_VALUE); - sourceWithMockedRemoteCall("fail:rejection.json", "start_ok.json").doStart(checkResponse); - assertTrue(called.get()); + sourceWithMockedRemoteCall("fail:rejection.json", "start_ok.json", "fail:rejection.json", "scroll_ok.json").start(); + ScrollableHitSource.AsyncResponse response = responseQueue.poll(); + assertNotNull(response); + assertThat(response.response().getFailures(), empty()); + assertTrue(responseQueue.isEmpty()); assertEquals(1, retries); retries = 0; - called.set(false); - sourceWithMockedRemoteCall("fail:rejection.json", "scroll_ok.json").doStartNextScroll("scroll", timeValueMillis(0), - checkResponse); - assertTrue(called.get()); + response.done(timeValueMillis(0)); + response = responseQueue.poll(); + assertNotNull(response); + assertThat(response.response().getFailures(), empty()); + assertTrue(responseQueue.isEmpty()); assertEquals(1, retries); } public void testRetryUntilYouRunOutOfTries() throws Exception { - AtomicBoolean called = new AtomicBoolean(); - Consumer checkResponse = r -> called.set(true); retriesAllowed = between(0, 10); String[] paths = new String[retriesAllowed + 2]; for (int i = 0; i < retriesAllowed + 2; i++) { paths[i] = "fail:rejection.json"; } - RuntimeException e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall(paths).doStart(checkResponse)); + RuntimeException e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall(paths).start()); assertEquals("failed", e.getMessage()); - assertFalse(called.get()); + assertTrue(responseQueue.isEmpty()); assertEquals(retriesAllowed, retries); retries = 0; - e = expectThrows(RuntimeException.class, - () -> sourceWithMockedRemoteCall(paths).doStartNextScroll("scroll", timeValueMillis(0), checkResponse)); + String[] searchOKPaths = Stream.concat(Stream.of("start_ok.json"), Stream.of(paths)).toArray(String[]::new); + sourceWithMockedRemoteCall(searchOKPaths).start(); + ScrollableHitSource.AsyncResponse response = responseQueue.poll(); + assertNotNull(response); + assertThat(response.response().getFailures(), empty()); + assertTrue(responseQueue.isEmpty()); + + e = expectThrows(RuntimeException.class, () -> response.done(timeValueMillis(0))); assertEquals("failed", e.getMessage()); - assertFalse(called.get()); + assertTrue(responseQueue.isEmpty()); assertEquals(retriesAllowed, retries); } @@ -351,10 +369,10 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { String header = randomAlphaOfLength(5); threadPool.getThreadContext().putHeader("test", header); AtomicBoolean called = new AtomicBoolean(); - sourceWithMockedRemoteCall("start_ok.json").doStart(r -> { + sourceWithMockedRemoteCall("start_ok.json").doStart(wrapAsListener(r -> { assertEquals(header, threadPool.getThreadContext().getHeader("test")); called.set(true); - }); + })); assertTrue(called.get()); } @@ -424,10 +442,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { }); RemoteScrollableHitSource source = sourceWithMockedClient(true, httpClient); - AtomicBoolean called = new AtomicBoolean(); - Consumer checkResponse = r -> called.set(true); - Throwable e = expectThrows(RuntimeException.class, - () -> source.doStartNextScroll(FAKE_SCROLL_ID, timeValueMillis(0), checkResponse)); + Throwable e = expectThrows(RuntimeException.class, source::start); // Unwrap the some artifacts from the test while (e.getMessage().equals("failed")) { e = e.getCause(); @@ -436,24 +451,24 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals("Remote responded with a chunk that was too large. Use a smaller batch size.", e.getMessage()); // And that exception is reported as being caused by the underlying exception returned by the client assertSame(tooLong, e.getCause()); - assertFalse(called.get()); + assertTrue(responseQueue.isEmpty()); } - public void testNoContentTypeIsError() throws Exception { - Exception e = expectThrows(RuntimeException.class, () -> - sourceWithMockedRemoteCall(false, null, "main/0_20_5.json").lookupRemoteVersion(null)); - assertThat(e.getCause().getCause().getCause().getMessage(), containsString("Response didn't include Content-Type: body={")); + public void testNoContentTypeIsError() { + RuntimeException e = expectListenerFailure(RuntimeException.class, (RejectAwareActionListener listener) -> + sourceWithMockedRemoteCall(false, null, "main/0_20_5.json").lookupRemoteVersion(listener)); + assertThat(e.getMessage(), containsString("Response didn't include Content-Type: body={")); } - public void testInvalidJsonThinksRemoveIsNotES() throws IOException { - Exception e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall("some_text.txt").doStart(null)); + public void testInvalidJsonThinksRemoteIsNotES() throws IOException { + Exception e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall("some_text.txt").start()); assertEquals("Error parsing the response, remote is likely not an Elasticsearch instance", e.getCause().getCause().getCause().getMessage()); } - public void testUnexpectedJsonThinksRemoveIsNotES() throws IOException { + public void testUnexpectedJsonThinksRemoteIsNotES() throws IOException { // Use the response from a main action instead of a proper start response to generate a parse error - Exception e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall("main/2_3_3.json").doStart(null)); + Exception e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall("main/2_3_3.json").start()); assertEquals("Error parsing the response, remote is likely not an Elasticsearch instance", e.getCause().getCause().getCause().getMessage()); } @@ -486,8 +501,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { * synchronously rather than asynchronously. */ @SuppressWarnings("unchecked") - private RemoteScrollableHitSource sourceWithMockedRemoteCall(boolean mockRemoteVersion, ContentType contentType, String... paths) - throws Exception { + private RemoteScrollableHitSource sourceWithMockedRemoteCall(boolean mockRemoteVersion, ContentType contentType, String... paths) { URL[] resources = new URL[paths.length]; for (int i = 0; i < paths.length; i++) { resources[i] = Thread.currentThread().getContextClassLoader().getResource("responses/" + paths[i].replace("fail:", "")); @@ -533,8 +547,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { return sourceWithMockedClient(mockRemoteVersion, httpClient); } - private RemoteScrollableHitSource sourceWithMockedClient(boolean mockRemoteVersion, CloseableHttpAsyncClient httpClient) - throws Exception { + private RemoteScrollableHitSource sourceWithMockedClient(boolean mockRemoteVersion, CloseableHttpAsyncClient httpClient) { HttpAsyncClientBuilder clientBuilder = mock(HttpAsyncClientBuilder.class); when(clientBuilder.build()).thenReturn(httpClient); @@ -543,11 +556,11 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { TestRemoteScrollableHitSource hitSource = new TestRemoteScrollableHitSource(restClient) { @Override - void lookupRemoteVersion(Consumer onVersion) { + void lookupRemoteVersion(RejectAwareActionListener listener) { if (mockRemoteVersion) { - onVersion.accept(Version.CURRENT); + listener.onResponse(Version.CURRENT); } else { - super.lookupRemoteVersion(onVersion); + super.lookupRemoteVersion(listener); } } }; @@ -572,8 +585,30 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { private class TestRemoteScrollableHitSource extends RemoteScrollableHitSource { TestRemoteScrollableHitSource(RestClient client) { super(RemoteScrollableHitSourceTests.this.logger, backoff(), RemoteScrollableHitSourceTests.this.threadPool, - RemoteScrollableHitSourceTests.this::countRetry, r -> fail(), RemoteScrollableHitSourceTests.this::failRequest, client, - new BytesArray("{}"), RemoteScrollableHitSourceTests.this.searchRequest); + RemoteScrollableHitSourceTests.this::countRetry, + responseQueue::add, RemoteScrollableHitSourceTests.this::failRequest, + client, new BytesArray("{}"), RemoteScrollableHitSourceTests.this.searchRequest); } } + + private RejectAwareActionListener wrapAsListener(Consumer consumer) { + Consumer throwing = e -> { + throw new AssertionError(e); + }; + return RejectAwareActionListener.wrap(consumer::accept, throwing, throwing); + } + + @SuppressWarnings("unchecked") + private T expectListenerFailure(Class expectedException, Consumer> subject) { + AtomicReference exception = new AtomicReference<>(); + subject.accept(RejectAwareActionListener.wrap( + r -> fail(), + e -> { + assertThat(e, instanceOf(expectedException)); + assertTrue(exception.compareAndSet(null, (T) e)); + }, + e -> fail())); + assertNotNull(exception.get()); + return exception.get(); + } } diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index b4ec74355d2..e7fcec40675 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -52,6 +52,9 @@ test { * other if we allow them to set the number of available processors as it's set-once in Netty. */ systemProperty 'es.set.netty.runtime.available.processors', 'false' + + // Disable direct buffer pooling as it is disabled by default in Elasticsearch + systemProperty 'io.netty.allocator.numDirectArenas', '0' } integTestRunner { @@ -60,6 +63,9 @@ integTestRunner { * other if we allow them to set the number of available processors as it's set-once in Netty. */ systemProperty 'es.set.netty.runtime.available.processors', 'false' + + // Disable direct buffer pooling as it is disabled by default in Elasticsearch + systemProperty 'io.netty.allocator.numDirectArenas', '0' } thirdPartyAudit { @@ -138,12 +144,13 @@ thirdPartyAudit { 'net.jpountz.lz4.LZ4Compressor', 'net.jpountz.lz4.LZ4Factory', 'net.jpountz.lz4.LZ4FastDecompressor', - 'net.jpountz.xxhash.StreamingXXHash32', + 'net.jpountz.xxhash.XXHash32', 'net.jpountz.xxhash.XXHashFactory', 'io.netty.internal.tcnative.CertificateCallback', 'io.netty.internal.tcnative.CertificateVerifier', 'io.netty.internal.tcnative.SessionTicketKey', 'io.netty.internal.tcnative.SniHostNameMatcher', + 'io.netty.internal.tcnative.SSL', 'org.eclipse.jetty.alpn.ALPN$ClientProvider', 'org.eclipse.jetty.alpn.ALPN$ServerProvider', 'org.eclipse.jetty.alpn.ALPN', diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.36.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.36.Final.jar.sha1 deleted file mode 100644 index 90895a5e168..00000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7f2db0921dd57df4db076229830ab09bba713aeb \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.38.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..5f99086039f --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +d16cf15d29c409987cecde77407fbb6f1e16d262 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.36.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.36.Final.jar.sha1 deleted file mode 100644 index efd6e5a3277..00000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8462116d327bb3d1ec24258071f2e7345a73dbfc \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.38.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..2db183d46ca --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +ccfbdfc727cbf702350572a0b12fe92185ebf162 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.36.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.36.Final.jar.sha1 deleted file mode 100644 index 4e86fef0e12..00000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -62b73d439dbddf3c0dde092b048580139695ab46 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.38.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..0a75cdf9957 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +4d55b3cdb74cd140d262de96987ebd369125a64c \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.36.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.36.Final.jar.sha1 deleted file mode 100644 index d9d50d776e9..00000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f38fde652a70ea579897edc80e52353e487ae6 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.38.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..e6ccf03b910 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +6f8aae763f743d91fb1ba1e9011dae0ef4f6ff34 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.36.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.36.Final.jar.sha1 deleted file mode 100644 index d943140f363..00000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1c38a5920a10c01b1cce4cdc964447ec76abf1b5 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.38.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..10e2094ebbd --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +ebf1f2bd0dad5e16aa1fc48d32e5dbe507b38d53 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.36.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.36.Final.jar.sha1 deleted file mode 100644 index 1499233b60d..00000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e4d243fbf4e6837fa294f892bf97149e18129100 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.38.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..01512737b8d --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +b00be4aa309e9b56e498191aa8c73e4f393759ed \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.36.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.36.Final.jar.sha1 deleted file mode 100644 index f36c1b17d74..00000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8546e6be47be587acab86bbd106ca023678f07d9 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.38.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..197ce53e032 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +cd8b612d5daa42d1be3bb3203e4857597d5db79b \ No newline at end of file diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 647a793ee47..769e883205d 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -20,6 +20,7 @@ package org.elasticsearch.http.netty4; import io.netty.bootstrap.ServerBootstrap; +import io.netty.buffer.ByteBufAllocator; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandler; @@ -62,6 +63,7 @@ import org.elasticsearch.http.HttpReadTimeoutException; import org.elasticsearch.http.HttpServerChannel; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.CopyBytesServerSocketChannel; import org.elasticsearch.transport.netty4.Netty4Utils; import java.net.InetSocketAddress; @@ -145,7 +147,7 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { private final int maxCompositeBufferComponents; - protected volatile ServerBootstrap serverBootstrap; + private volatile ServerBootstrap serverBootstrap; public Netty4HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, NamedXContentRegistry xContentRegistry, Dispatcher dispatcher) { @@ -183,7 +185,15 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { serverBootstrap.group(new NioEventLoopGroup(workerCount, daemonThreadFactory(settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX))); - serverBootstrap.channel(NioServerSocketChannel.class); + + // If direct buffer pooling is disabled, use the CopyBytesServerSocketChannel which will create child + // channels of type CopyBytesSocketChannel. CopyBytesSocketChannel pool a single direct buffer + // per-event-loop thread to be used for IO operations. + if (ByteBufAllocator.DEFAULT.isDirectBufferPooled()) { + serverBootstrap.channel(NioServerSocketChannel.class); + } else { + serverBootstrap.channel(CopyBytesServerSocketChannel.class); + } serverBootstrap.childHandler(configureServerChannelHandler()); serverBootstrap.handler(new ServerChannelExceptionHandler(this)); diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/CopyBytesServerSocketChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/CopyBytesServerSocketChannel.java new file mode 100644 index 00000000000..a4c6544916b --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/CopyBytesServerSocketChannel.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +/* + * Copyright 2012 The Netty Project + * + * The Netty Project licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.util.internal.SocketUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.nio.channels.SocketChannel; +import java.util.List; + +/** + * This class is adapted from {@link NioServerSocketChannel} class in the Netty project. It overrides the + * channel read messages behavior to ensure that a {@link CopyBytesSocketChannel} socket channel is created. + */ +public class CopyBytesServerSocketChannel extends NioServerSocketChannel { + + private static final Logger logger = LogManager.getLogger(CopyBytesServerSocketChannel.class); + + @Override + protected int doReadMessages(List buf) throws Exception { + SocketChannel ch = SocketUtils.accept(javaChannel()); + + try { + if (ch != null) { + buf.add(new CopyBytesSocketChannel(this, ch)); + return 1; + } + } catch (Throwable t) { + logger.warn("Failed to create a new channel from an accepted socket.", t); + + try { + ch.close(); + } catch (Throwable t2) { + logger.warn("Failed to close a socket.", t2); + } + } + + return 0; + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/CopyBytesSocketChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/CopyBytesSocketChannel.java new file mode 100644 index 00000000000..dd7ba056010 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/CopyBytesSocketChannel.java @@ -0,0 +1,191 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +/* + * Copyright 2012 The Netty Project + * + * The Netty Project licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import io.netty.buffer.ByteBuf; +import io.netty.channel.Channel; +import io.netty.channel.ChannelOutboundBuffer; +import io.netty.channel.RecvByteBufAllocator; +import io.netty.channel.socket.nio.NioSocketChannel; +import org.elasticsearch.common.SuppressForbidden; + +import java.nio.ByteBuffer; +import java.nio.channels.SocketChannel; + +import static io.netty.channel.internal.ChannelUtils.MAX_BYTES_PER_GATHERING_WRITE_ATTEMPTED_LOW_THRESHOLD; + + +/** + * This class is adapted from {@link NioSocketChannel} class in the Netty project. It overrides the channel + * read/write behavior to ensure that the bytes are always copied to a thread-local direct bytes buffer. This + * happens BEFORE the call to the Java {@link SocketChannel} is issued. + * + * The purpose of this class is to allow the disabling of netty direct buffer pooling while allowing us to + * control how bytes end up being copied to direct memory. If we simply disabled netty pooling, we would rely + * on the JDK's internal thread local buffer pooling. Instead, this class allows us to create a one thread + * local buffer with a defined size. + */ +@SuppressForbidden(reason = "Channel#write") +public class CopyBytesSocketChannel extends NioSocketChannel { + + private static final int MAX_BYTES_PER_WRITE = 1 << 20; + + private static final ThreadLocal ioBuffer = ThreadLocal.withInitial(() -> ByteBuffer.allocateDirect(MAX_BYTES_PER_WRITE)); + private final WriteConfig writeConfig = new WriteConfig(); + + public CopyBytesSocketChannel() { + super(); + } + + CopyBytesSocketChannel(Channel parent, SocketChannel socket) { + super(parent, socket); + } + + @Override + protected void doWrite(ChannelOutboundBuffer in) throws Exception { + SocketChannel ch = javaChannel(); + int writeSpinCount = config().getWriteSpinCount(); + do { + if (in.isEmpty()) { + // All written so clear OP_WRITE + clearOpWrite(); + // Directly return here so incompleteWrite(...) is not called. + return; + } + + // Ensure the pending writes are made of ByteBufs only. + int maxBytesPerGatheringWrite = writeConfig.getMaxBytesPerGatheringWrite(); + ByteBuffer[] nioBuffers = in.nioBuffers(1024, maxBytesPerGatheringWrite); + int nioBufferCnt = in.nioBufferCount(); + + if (nioBufferCnt == 0) {// We have something else beside ByteBuffers to write so fallback to normal writes. + writeSpinCount -= doWrite0(in); + } else { + // Zero length buffers are not added to nioBuffers by ChannelOutboundBuffer, so there is no need + // to check if the total size of all the buffers is non-zero. + ByteBuffer ioBuffer = getIoBuffer(); + copyBytes(nioBuffers, nioBufferCnt, ioBuffer); + ioBuffer.flip(); + + int attemptedBytes = ioBuffer.remaining(); + final int localWrittenBytes = ch.write(ioBuffer); + if (localWrittenBytes <= 0) { + incompleteWrite(true); + return; + } + adjustMaxBytesPerGatheringWrite(attemptedBytes, localWrittenBytes, maxBytesPerGatheringWrite); + setWrittenBytes(nioBuffers, localWrittenBytes); + in.removeBytes(localWrittenBytes); + --writeSpinCount; + } + } while (writeSpinCount > 0); + + incompleteWrite(writeSpinCount < 0); + } + + @Override + protected int doReadBytes(ByteBuf byteBuf) throws Exception { + final RecvByteBufAllocator.Handle allocHandle = unsafe().recvBufAllocHandle(); + allocHandle.attemptedBytesRead(byteBuf.writableBytes()); + ByteBuffer ioBuffer = getIoBuffer(); + int bytesRead = javaChannel().read(ioBuffer); + ioBuffer.flip(); + if (bytesRead > 0) { + byteBuf.writeBytes(ioBuffer); + } + return bytesRead; + } + + private static ByteBuffer getIoBuffer() { + ByteBuffer ioBuffer = CopyBytesSocketChannel.ioBuffer.get(); + ioBuffer.clear(); + return ioBuffer; + } + + private void adjustMaxBytesPerGatheringWrite(int attempted, int written, int oldMaxBytesPerGatheringWrite) { + // By default we track the SO_SNDBUF when ever it is explicitly set. However some OSes may dynamically change + // SO_SNDBUF (and other characteristics that determine how much data can be written at once) so we should try + // make a best effort to adjust as OS behavior changes. + if (attempted == written) { + if (attempted << 1 > oldMaxBytesPerGatheringWrite) { + writeConfig.setMaxBytesPerGatheringWrite(attempted << 1); + } + } else if (attempted > MAX_BYTES_PER_GATHERING_WRITE_ATTEMPTED_LOW_THRESHOLD && written < attempted >>> 1) { + writeConfig.setMaxBytesPerGatheringWrite(attempted >>> 1); + } + } + + private static void copyBytes(ByteBuffer[] source, int nioBufferCnt, ByteBuffer destination) { + for (int i = 0; i < nioBufferCnt && destination.hasRemaining(); i++) { + ByteBuffer buffer = source[i]; + assert buffer.hasArray() : "Buffer must have heap array"; + int nBytesToCopy = Math.min(destination.remaining(), buffer.remaining()); + destination.put(buffer.array(), buffer.arrayOffset() + buffer.position(), nBytesToCopy); + } + } + + private static void setWrittenBytes(ByteBuffer[] source, int bytesWritten) { + for (int i = 0; bytesWritten > 0; i++) { + ByteBuffer buffer = source[i]; + int nBytes = Math.min(buffer.remaining(), bytesWritten); + buffer.position(buffer.position() + nBytes); + bytesWritten = bytesWritten - nBytes; + } + } + + private final class WriteConfig { + + private volatile int maxBytesPerGatheringWrite = MAX_BYTES_PER_WRITE; + + private WriteConfig() { + calculateMaxBytesPerGatheringWrite(); + } + + void setMaxBytesPerGatheringWrite(int maxBytesPerGatheringWrite) { + this.maxBytesPerGatheringWrite = Math.min(maxBytesPerGatheringWrite, MAX_BYTES_PER_WRITE); + } + + int getMaxBytesPerGatheringWrite() { + return maxBytesPerGatheringWrite; + } + + private void calculateMaxBytesPerGatheringWrite() { + // Multiply by 2 to give some extra space in case the OS can process write data faster than we can provide. + int newSendBufferSize = config().getSendBufferSize() << 1; + if (newSendBufferSize > 0) { + setMaxBytesPerGatheringWrite(config().getSendBufferSize() << 1); + } + } + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index b7dcfe0cf45..af5e8b66fe7 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -21,6 +21,7 @@ package org.elasticsearch.transport.netty4; import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.ServerBootstrap; +import io.netty.buffer.ByteBufAllocator; import io.netty.channel.AdaptiveRecvByteBufAllocator; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; @@ -58,6 +59,8 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.net.NetUtils; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.CopyBytesServerSocketChannel; +import org.elasticsearch.transport.CopyBytesSocketChannel; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.TransportSettings; @@ -148,7 +151,14 @@ public class Netty4Transport extends TcpTransport { private Bootstrap createClientBootstrap(NioEventLoopGroup eventLoopGroup) { final Bootstrap bootstrap = new Bootstrap(); bootstrap.group(eventLoopGroup); - bootstrap.channel(NioSocketChannel.class); + + // If direct buffer pooling is disabled, use the CopyBytesSocketChannel which will pool a single + // direct buffer per-event-loop thread which will be used for IO operations. + if (ByteBufAllocator.DEFAULT.isDirectBufferPooled()) { + bootstrap.channel(NioSocketChannel.class); + } else { + bootstrap.channel(CopyBytesSocketChannel.class); + } bootstrap.option(ChannelOption.TCP_NODELAY, TransportSettings.TCP_NO_DELAY.get(settings)); bootstrap.option(ChannelOption.SO_KEEPALIVE, TransportSettings.TCP_KEEP_ALIVE.get(settings)); @@ -205,7 +215,15 @@ public class Netty4Transport extends TcpTransport { final ServerBootstrap serverBootstrap = new ServerBootstrap(); serverBootstrap.group(eventLoopGroup); - serverBootstrap.channel(NioServerSocketChannel.class); + + // If direct buffer pooling is disabled, use the CopyBytesServerSocketChannel which will create child + // channels of type CopyBytesSocketChannel. CopyBytesSocketChannel pool a single direct buffer + // per-event-loop thread to be used for IO operations. + if (ByteBufAllocator.DEFAULT.isDirectBufferPooled()) { + serverBootstrap.channel(NioServerSocketChannel.class); + } else { + serverBootstrap.channel(CopyBytesServerSocketChannel.class); + } serverBootstrap.childHandler(getServerChannelInitializer(name)); serverBootstrap.handler(new ServerChannelExceptionHandler()); diff --git a/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleCatAction.java b/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleCatAction.java index 759e6bdcfee..79cb1a6f76d 100644 --- a/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleCatAction.java +++ b/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleCatAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.example.resthandler; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -35,8 +34,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class ExampleCatAction extends AbstractCatAction { - ExampleCatAction(final Settings settings, final RestController controller) { - super(settings); + ExampleCatAction(final RestController controller) { controller.registerHandler(GET, "/_cat/example", this); controller.registerHandler(POST, "/_cat/example", this); } diff --git a/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java b/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java index 3ce6d8a42f5..725049c797e 100644 --- a/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java +++ b/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java @@ -46,6 +46,6 @@ public class ExampleRestHandlerPlugin extends Plugin implements ActionPlugin { final IndexNameExpressionResolver indexNameExpressionResolver, final Supplier nodesInCluster) { - return singletonList(new ExampleCatAction(settings, restController)); + return singletonList(new ExampleCatAction(restController)); } } diff --git a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java index 9916eb5dfed..5595ec6a838 100644 --- a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java +++ b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java @@ -36,7 +36,7 @@ import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessCo import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.user.User; import java.util.ArrayList; @@ -199,7 +199,7 @@ public class CustomAuthorizationEngine implements AuthorizationEngine { private GetUserPrivilegesResponse getUserPrivilegesResponse(boolean isSuperuser) { final Set cluster = isSuperuser ? Collections.singleton("ALL") : Collections.emptySet(); - final Set conditionalCluster = Collections.emptySet(); + final Set conditionalCluster = Collections.emptySet(); final Set indices = isSuperuser ? Collections.singleton(new Indices(Collections.singleton("*"), Collections.singleton("*"), Collections.emptySet(), Collections.emptySet(), true)) : Collections.emptySet(); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index e94ea5ef6c9..77f5bfcf72d 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -131,7 +131,7 @@ public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCa final String repositoryName = "testInsecureRepositoryCredentials"; createAndCheckTestRepository(repositoryName); final NodeClient nodeClient = internalCluster().getInstance(NodeClient.class); - final RestGetRepositoriesAction getRepoAction = new RestGetRepositoriesAction(Settings.EMPTY, mock(RestController.class), + final RestGetRepositoriesAction getRepoAction = new RestGetRepositoriesAction(mock(RestController.class), internalCluster().getInstance(SettingsFilter.class)); final RestRequest getRepoRequest = new FakeRestRequest(); getRepoRequest.params().put("repository", repositoryName); diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index f3b96a3ebe3..1c92623976f 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -108,7 +108,7 @@ thirdPartyAudit { 'net.jpountz.lz4.LZ4Compressor', 'net.jpountz.lz4.LZ4Factory', 'net.jpountz.lz4.LZ4FastDecompressor', - 'net.jpountz.xxhash.StreamingXXHash32', + 'net.jpountz.xxhash.XXHash32', 'net.jpountz.xxhash.XXHashFactory', 'org.eclipse.jetty.alpn.ALPN$ClientProvider', 'org.eclipse.jetty.alpn.ALPN$ServerProvider', diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.36.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.36.Final.jar.sha1 deleted file mode 100644 index 90895a5e168..00000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7f2db0921dd57df4db076229830ab09bba713aeb \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.38.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..5f99086039f --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +d16cf15d29c409987cecde77407fbb6f1e16d262 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.36.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.36.Final.jar.sha1 deleted file mode 100644 index efd6e5a3277..00000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8462116d327bb3d1ec24258071f2e7345a73dbfc \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.38.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..2db183d46ca --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +ccfbdfc727cbf702350572a0b12fe92185ebf162 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.36.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.36.Final.jar.sha1 deleted file mode 100644 index 4e86fef0e12..00000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -62b73d439dbddf3c0dde092b048580139695ab46 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.38.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..0a75cdf9957 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +4d55b3cdb74cd140d262de96987ebd369125a64c \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.36.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.36.Final.jar.sha1 deleted file mode 100644 index d9d50d776e9..00000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f38fde652a70ea579897edc80e52353e487ae6 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.38.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..e6ccf03b910 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +6f8aae763f743d91fb1ba1e9011dae0ef4f6ff34 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.36.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.36.Final.jar.sha1 deleted file mode 100644 index d943140f363..00000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1c38a5920a10c01b1cce4cdc964447ec76abf1b5 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.38.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..10e2094ebbd --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +ebf1f2bd0dad5e16aa1fc48d32e5dbe507b38d53 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.36.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.36.Final.jar.sha1 deleted file mode 100644 index 1499233b60d..00000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e4d243fbf4e6837fa294f892bf97149e18129100 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.38.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..01512737b8d --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +b00be4aa309e9b56e498191aa8c73e4f393759ed \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.36.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.36.Final.jar.sha1 deleted file mode 100644 index f36c1b17d74..00000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.36.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8546e6be47be587acab86bbd106ca023678f07d9 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.38.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.38.Final.jar.sha1 new file mode 100644 index 00000000000..197ce53e032 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.38.Final.jar.sha1 @@ -0,0 +1 @@ +cd8b612d5daa42d1be3bb3203e4857597d5db79b \ No newline at end of file diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioGroupFactory.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioGroupFactory.java index 0c17c1d8b85..1967f81762c 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioGroupFactory.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioGroupFactory.java @@ -135,7 +135,7 @@ public final class NioGroupFactory { * Wraps the {@link RefCountedNioGroup}. Calls {@link RefCountedNioGroup#decRef()} on close. After close, * this wrapped instance can no longer be used. */ - private class WrappedNioGroup implements NioGroup { + private static class WrappedNioGroup implements NioGroup { private final RefCountedNioGroup refCountedNioGroup; diff --git a/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java b/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java index 8027eeb8948..8b7093515cd 100644 --- a/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java +++ b/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java @@ -49,7 +49,7 @@ public class DieWithDignityPlugin extends Plugin implements ActionPlugin { final SettingsFilter settingsFilter, final IndexNameExpressionResolver indexNameExpressionResolver, final Supplier nodesInCluster) { - return Collections.singletonList(new RestDieWithDignityAction(settings, restController)); + return Collections.singletonList(new RestDieWithDignityAction(restController)); } } diff --git a/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java b/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java index a3876599b7e..8f43d679084 100644 --- a/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java +++ b/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java @@ -20,17 +20,13 @@ package org.elasticsearch; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import java.io.IOException; - public class RestDieWithDignityAction extends BaseRestHandler { - RestDieWithDignityAction(final Settings settings, final RestController restController) { - super(settings); + RestDieWithDignityAction(final RestController restController) { restController.registerHandler(RestRequest.Method.GET, "/_die_with_dignity", this); } @@ -40,7 +36,7 @@ public class RestDieWithDignityAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { throw new OutOfMemoryError("die with dignity"); } diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 1913c86fc9c..ddd626ca5fa 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -39,8 +39,8 @@ testClusters.'remote-cluster' { } task mixedClusterTest(type: RestIntegTestTask) { - useCluster testClusters.'remote-cluster' runner { + useCluster testClusters.'remote-cluster' dependsOn 'remote-cluster' systemProperty 'tests.rest.suite', 'multi_cluster' } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java index 27b2f18b091..3a6c4148239 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java @@ -76,7 +76,6 @@ public class TestDeprecationHeaderRestAction extends BaseRestHandler { private final Settings settings; public TestDeprecationHeaderRestAction(Settings settings, RestController controller) { - super(settings); this.settings = settings; controller.registerAsDeprecatedHandler(RestRequest.Method.GET, "/_test_cluster/deprecated_settings", this, diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderPlugin.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderPlugin.java index 421c6d9bade..0f9b6f4db19 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderPlugin.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderPlugin.java @@ -40,6 +40,6 @@ public class TestResponseHeaderPlugin extends Plugin implements ActionPlugin { public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { - return singletonList(new TestResponseHeaderRestAction(settings, restController)); + return singletonList(new TestResponseHeaderRestAction(restController)); } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderRestAction.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderRestAction.java index c7411f79869..c51947f7328 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderRestAction.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderRestAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.http; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -27,11 +26,9 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; -import java.io.IOException; - public class TestResponseHeaderRestAction extends BaseRestHandler { - public TestResponseHeaderRestAction(Settings settings, RestController controller) { - super(settings); + + public TestResponseHeaderRestAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_protected", this); } @@ -41,7 +38,7 @@ public class TestResponseHeaderRestAction extends BaseRestHandler { } @Override - public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { if ("password".equals(request.header("Secret"))) { RestResponse response = new BytesRestResponse(RestStatus.OK, "Access granted"); response.addHeader("Secret", "granted"); diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats b/qa/vagrant/bats/default/10_basic.bats similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats rename to qa/vagrant/bats/default/10_basic.bats diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/20_tar_bootstrap_password.bats b/qa/vagrant/bats/default/20_tar_bootstrap_password.bats similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/20_tar_bootstrap_password.bats rename to qa/vagrant/bats/default/20_tar_bootstrap_password.bats diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/25_package_bootstrap_password.bats b/qa/vagrant/bats/default/25_package_bootstrap_password.bats similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/25_package_bootstrap_password.bats rename to qa/vagrant/bats/default/25_package_bootstrap_password.bats diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/30_tar_setup_passwords.bats b/qa/vagrant/bats/default/30_tar_setup_passwords.bats similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/30_tar_setup_passwords.bats rename to qa/vagrant/bats/default/30_tar_setup_passwords.bats diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/35_package_setup_passwords.bats b/qa/vagrant/bats/default/35_package_setup_passwords.bats similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/35_package_setup_passwords.bats rename to qa/vagrant/bats/default/35_package_setup_passwords.bats diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/40_tar_certgen.bats b/qa/vagrant/bats/default/40_tar_certgen.bats similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/40_tar_certgen.bats rename to qa/vagrant/bats/default/40_tar_certgen.bats diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/45_package_certgen.bats b/qa/vagrant/bats/default/45_package_certgen.bats similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/45_package_certgen.bats rename to qa/vagrant/bats/default/45_package_certgen.bats diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash b/qa/vagrant/bats/default/bootstrap_password.bash similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash rename to qa/vagrant/bats/default/bootstrap_password.bash diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/certgen.bash b/qa/vagrant/bats/default/certgen.bash similarity index 98% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/certgen.bash rename to qa/vagrant/bats/default/certgen.bash index 13aed28b4c1..b52e286742c 100644 --- a/x-pack/qa/vagrant/src/test/resources/packaging/tests/certgen.bash +++ b/qa/vagrant/bats/default/certgen.bash @@ -15,7 +15,6 @@ instances="/tmp/instances.yml" certificates="/tmp/certificates.zip" setup() { - export PACKAGE_NAME="elasticsearch" if [ $BATS_TEST_NUMBER == 1 ]; then clean_before_test fi @@ -176,6 +175,7 @@ NEW_PASS } @test "[$GROUP] create instances file" { + rm -f /tmp/instances.yml run sudo -E -u $MASTER_USER bash <<"CREATE_INSTANCES_FILE" cat > /tmp/instances.yml <<- EOF instances: @@ -426,3 +426,8 @@ DATA_SETTINGS false } } + +@test "[$GROUP] remove Elasticsearch" { + # NOTE: this must be the last test, so that running oss tests does not already have the default distro still installed + clean_before_test +} diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash b/qa/vagrant/bats/default/setup_passwords.bash similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash rename to qa/vagrant/bats/default/setup_passwords.bash diff --git a/qa/vagrant/src/test/resources/packaging/tests/25_tar_plugins.bats b/qa/vagrant/bats/oss/25_tar_plugins.bats similarity index 100% rename from qa/vagrant/src/test/resources/packaging/tests/25_tar_plugins.bats rename to qa/vagrant/bats/oss/25_tar_plugins.bats diff --git a/qa/vagrant/src/test/resources/packaging/tests/50_modules_and_plugins.bats b/qa/vagrant/bats/oss/50_modules_and_plugins.bats similarity index 100% rename from qa/vagrant/src/test/resources/packaging/tests/50_modules_and_plugins.bats rename to qa/vagrant/bats/oss/50_modules_and_plugins.bats diff --git a/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats b/qa/vagrant/bats/oss/70_sysv_initd.bats similarity index 100% rename from qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats rename to qa/vagrant/bats/oss/70_sysv_initd.bats diff --git a/qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats b/qa/vagrant/bats/oss/80_upgrade.bats similarity index 98% rename from qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats rename to qa/vagrant/bats/oss/80_upgrade.bats index 697e6456d1f..0c80751a58f 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/80_upgrade.bats +++ b/qa/vagrant/bats/oss/80_upgrade.bats @@ -122,6 +122,7 @@ setup() { curl -s localhost:9200/library2/book/1?pretty | grep Darkness } -@test "[UPGRADE] stop version under test" { +@test "[UPGRADE] cleanup version under test" { stop_elasticsearch_service + clean_before_test } diff --git a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash b/qa/vagrant/bats/oss/module_and_plugin_test_cases.bash similarity index 100% rename from qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash rename to qa/vagrant/bats/oss/module_and_plugin_test_cases.bash diff --git a/qa/vagrant/src/test/resources/packaging/utils/modules.bash b/qa/vagrant/bats/utils/modules.bash similarity index 100% rename from qa/vagrant/src/test/resources/packaging/utils/modules.bash rename to qa/vagrant/bats/utils/modules.bash diff --git a/qa/vagrant/src/test/resources/packaging/utils/packages.bash b/qa/vagrant/bats/utils/packages.bash similarity index 99% rename from qa/vagrant/src/test/resources/packaging/utils/packages.bash rename to qa/vagrant/bats/utils/packages.bash index 5df432c35b3..2da3a02c543 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/packages.bash +++ b/qa/vagrant/bats/utils/packages.bash @@ -48,7 +48,7 @@ export_elasticsearch_paths() { export ESDATA="/var/lib/elasticsearch" export ESLOG="/var/log/elasticsearch" export ESENVFILE=$(env_file) - export PACKAGE_NAME=${PACKAGE_NAME:-"elasticsearch-oss"} + export PACKAGE_NAME } diff --git a/qa/vagrant/src/test/resources/packaging/utils/plugins.bash b/qa/vagrant/bats/utils/plugins.bash similarity index 100% rename from qa/vagrant/src/test/resources/packaging/utils/plugins.bash rename to qa/vagrant/bats/utils/plugins.bash diff --git a/qa/vagrant/src/test/resources/packaging/utils/tar.bash b/qa/vagrant/bats/utils/tar.bash similarity index 100% rename from qa/vagrant/src/test/resources/packaging/utils/tar.bash rename to qa/vagrant/bats/utils/tar.bash diff --git a/qa/vagrant/src/test/resources/packaging/utils/utils.bash b/qa/vagrant/bats/utils/utils.bash similarity index 100% rename from qa/vagrant/src/test/resources/packaging/utils/utils.bash rename to qa/vagrant/bats/utils/utils.bash diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash b/qa/vagrant/bats/utils/xpack.bash similarity index 100% rename from x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash rename to qa/vagrant/bats/utils/xpack.bash diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index f5cfcdda03c..7cbc475e933 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -18,10 +18,7 @@ */ plugins { - id 'java' id 'elasticsearch.build' - id 'elasticsearch.vagrantsupport' - id 'elasticsearch.vagrant' } dependencies { @@ -36,34 +33,14 @@ dependencies { compile "commons-logging:commons-logging:${versions.commonslogging}" compile project(':libs:elasticsearch-core') - - // pulls in the jar built by this project and its dependencies - packagingTest project(path: project.path, configuration: 'runtime') } -List plugins = [] -for (Project subproj : project.rootProject.subprojects) { - if (subproj.parent.path == ':plugins' || subproj.path.equals(':example-plugins:custom-settings')) { - // add plugin as a dep - dependencies { - packaging project(path: "${subproj.path}", configuration: 'zip') - } - plugins.add(subproj.name) - } -} -plugins = plugins.toSorted() +configurations.create('testClasses') -setupPackagingTest { - doFirst { - File expectedPlugins = file('build/plugins/expected') - expectedPlugins.parentFile.mkdirs() - expectedPlugins.setText(plugins.join('\n'), 'UTF-8') - } -} - -esvagrant { - testClass 'org.elasticsearch.packaging.PackagingTests' -} +String classesDir = project.file(project.sourceSets.main.output.classesDirs.singleFile).toString() +artifacts.add('testClasses', project.layout.projectDirectory.dir(classesDir)) { + builtBy tasks.named('testClasses') +} forbiddenApisMain { replaceSignatureFiles 'jdk-signatures' @@ -91,3 +68,61 @@ tasks.thirdPartyAudit.ignoreMissingClasses ( 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener' ) + +boolean sample = project.properties.get('vagrant.boxes') != 'all' + +subprojects { Project platformProject -> + apply plugin: 'elasticsearch.distro-test' + apply plugin: 'java' + + configurations.create('testClasses') + dependencies { + testClasses project(path: ':qa:vagrant', configuration: 'testClasses') + testRuntime project(path: ':qa:vagrant', configuration: 'runtime') + } + + tasks.named('destructiveDistroTest') { + testClassesDirs += project.files(configurations.testClasses.singleFile) + } + + // TODO: remove this property lookup once CI is switched to use an explicit task for the sample tests + boolean allBoxes = project.properties.get('vagrant.boxes', '') == 'all' + if (allBoxes || ['centos-7', 'ubuntu-1604'].contains(platformProject.name)) { + tasks.register('packagingTest') { + dependsOn 'distroTest', 'batsTest.oss', 'batsTest.default' + } + } + + vagrant { + hostEnv 'VAGRANT_PROJECT_DIR', platformProject.projectDir.absolutePath + } + +} + +configurations { + allPlugins +} + +List plugins = [] +for (Project subproj : project.rootProject.subprojects) { + if (subproj.parent.path == ':plugins' || subproj.path.equals(':example-plugins:custom-settings')) { + // add plugin as a dep + dependencies { + allPlugins project(path: "${subproj.path}", configuration: 'zip') + } + plugins.add(subproj.name) + } +} +plugins = plugins.toSorted() + +copyPackagingArchives { + from configurations.allPlugins + doLast { + // TODO: this was copied from the old way bats tests get the plugins list. we should pass + // this in differently when converting to java tests + File expectedPlugins = file('build/plugins/expected') + expectedPlugins.parentFile.mkdirs() + expectedPlugins.setText(plugins.join('\n'), 'UTF-8') + } +} + diff --git a/qa/vagrant/centos-6/build.gradle b/qa/vagrant/centos-6/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/centos-7/build.gradle b/qa/vagrant/centos-7/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/debian-8/build.gradle b/qa/vagrant/debian-8/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/debian-9/build.gradle b/qa/vagrant/debian-9/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/fedora-28/build.gradle b/qa/vagrant/fedora-28/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/fedora-29/build.gradle b/qa/vagrant/fedora-29/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/oel-6/build.gradle b/qa/vagrant/oel-6/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/oel-7/build.gradle b/qa/vagrant/oel-7/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/opensuse-42/build.gradle b/qa/vagrant/opensuse-42/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/sles-12/build.gradle b/qa/vagrant/sles-12/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java deleted file mode 100644 index 06c978b823a..00000000000 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.packaging; - -import org.elasticsearch.packaging.test.DefaultDebBasicTests; -import org.elasticsearch.packaging.test.DefaultDebPreservationTests; -import org.elasticsearch.packaging.test.DefaultLinuxTarTests; -import org.elasticsearch.packaging.test.DefaultNoJdkDebBasicTests; -import org.elasticsearch.packaging.test.DefaultNoJdkLinuxTarTests; -import org.elasticsearch.packaging.test.DefaultNoJdkRpmBasicTests; -import org.elasticsearch.packaging.test.DefaultNoJdkWindowsZipTests; -import org.elasticsearch.packaging.test.DefaultRpmBasicTests; -import org.elasticsearch.packaging.test.DefaultRpmPreservationTests; -import org.elasticsearch.packaging.test.DefaultWindowsServiceTests; -import org.elasticsearch.packaging.test.DefaultWindowsZipTests; -import org.elasticsearch.packaging.test.OssDebBasicTests; -import org.elasticsearch.packaging.test.OssDebPreservationTests; -import org.elasticsearch.packaging.test.OssLinuxTarTests; -import org.elasticsearch.packaging.test.OssNoJdkDebBasicTests; -import org.elasticsearch.packaging.test.OssNoJdkLinuxTarTests; -import org.elasticsearch.packaging.test.OssNoJdkRpmBasicTests; -import org.elasticsearch.packaging.test.OssNoJdkWindowsZipTests; -import org.elasticsearch.packaging.test.OssRpmBasicTests; -import org.elasticsearch.packaging.test.OssRpmPreservationTests; -import org.elasticsearch.packaging.test.OssWindowsServiceTests; -import org.elasticsearch.packaging.test.OssWindowsZipTests; -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; - -@RunWith(Suite.class) -@SuiteClasses({ - DefaultLinuxTarTests.class, - OssLinuxTarTests.class, - DefaultWindowsZipTests.class, - OssWindowsZipTests.class, - DefaultRpmBasicTests.class, - OssRpmBasicTests.class, - DefaultDebBasicTests.class, - OssDebBasicTests.class, - DefaultDebPreservationTests.class, - OssDebPreservationTests.class, - DefaultRpmPreservationTests.class, - OssRpmPreservationTests.class, - DefaultWindowsServiceTests.class, - OssWindowsServiceTests.class, - DefaultNoJdkLinuxTarTests.class, - OssNoJdkLinuxTarTests.class, - DefaultNoJdkWindowsZipTests.class, - OssNoJdkWindowsZipTests.class, - DefaultNoJdkRpmBasicTests.class, - OssNoJdkRpmBasicTests.class, - DefaultNoJdkDebBasicTests.class, - OssNoJdkDebBasicTests.class -}) -public class PackagingTests {} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index e5cbbcc60cc..d6ba57ba075 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -20,7 +20,6 @@ package org.elasticsearch.packaging.test; import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; -import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.http.client.fluent.Request; import org.elasticsearch.packaging.util.Archives; import org.elasticsearch.packaging.util.Distribution; @@ -36,7 +35,6 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.stream.Stream; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.elasticsearch.packaging.util.Archives.ARCHIVE_OWNER; import static org.elasticsearch.packaging.util.Archives.installArchive; import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation; @@ -225,17 +223,16 @@ public abstract class ArchiveTestCase extends PackagingTestCase { final Shell sh = new Shell(); // Create temporary directory with a space and link to java binary. // Use it as java_home - String nameWithSpace = RandomStrings.randomAsciiAlphanumOfLength(getRandom(), 10) + "java home"; - String test_java_home = FileUtils.mkdir(Paths.get("/home",ARCHIVE_OWNER, nameWithSpace)).toAbsolutePath().toString(); + String testJavaHome = FileUtils.mkdir(Paths.get("/home", ARCHIVE_OWNER, "java home")).toAbsolutePath().toString(); try { final String systemJavaHome = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim(); final String java = systemJavaHome + "/bin/java"; - sh.run("mkdir -p \"" + test_java_home + "/bin\""); - sh.run("ln -s \"" + java + "\" \"" + test_java_home + "/bin/java\""); - sh.run("chown -R " + ARCHIVE_OWNER + ":" + ARCHIVE_OWNER + " \"" + test_java_home + "\""); + sh.run("mkdir -p \"" + testJavaHome + "/bin\""); + sh.run("ln -s \"" + java + "\" \"" + testJavaHome + "/bin/java\""); + sh.run("chown -R " + ARCHIVE_OWNER + ":" + ARCHIVE_OWNER + " \"" + testJavaHome + "\""); - sh.getEnv().put("JAVA_HOME", test_java_home); + sh.getEnv().put("JAVA_HOME", testJavaHome); //verify ES can start, stop and run plugin list Archives.runElasticsearch(installation, sh); @@ -246,7 +243,7 @@ public abstract class ArchiveTestCase extends PackagingTestCase { Result result = sh.run(pluginListCommand); assertThat(result.exitCode, equalTo(0)); } finally { - FileUtils.rm(Paths.get("\"" + test_java_home + "\"")); + FileUtils.rm(Paths.get(testJavaHome)); } }); } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java index ca6c3e48d41..857fad55eea 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java @@ -45,10 +45,8 @@ import java.util.StringJoiner; import java.util.zip.GZIPInputStream; import java.util.zip.ZipException; -import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.core.IsNot.not; -import static org.hamcrest.text.IsEmptyString.isEmptyOrNullString; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -228,9 +226,7 @@ public class FileUtils { } public static Path getPackagingArchivesDir() { - String fromEnv = System.getenv("PACKAGING_ARCHIVES"); - assertThat(fromEnv, not(isEmptyOrNullString())); - return Paths.get(fromEnv); + return Paths.get(""); // tests are started in the packaging archives dir, ie the empty relative path } public static Path getDistributionFile(Distribution distribution) { diff --git a/qa/vagrant/ubuntu-1604/build.gradle b/qa/vagrant/ubuntu-1604/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/ubuntu-1804/build.gradle b/qa/vagrant/ubuntu-1804/build.gradle new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/vagrant/windows-2012r2/build.gradle b/qa/vagrant/windows-2012r2/build.gradle new file mode 100644 index 00000000000..f49de70eae7 --- /dev/null +++ b/qa/vagrant/windows-2012r2/build.gradle @@ -0,0 +1,11 @@ + +String boxId = project.properties.get('vagrant.windows-2012r2.id') +if (boxId != null) { + vagrant { + hostEnv 'VAGRANT_WINDOWS_2012R2_BOX', boxId + } +} else { + tasks.named('distroTest').configure { + onlyIf { false } + } +} diff --git a/qa/vagrant/windows-2016/build.gradle b/qa/vagrant/windows-2016/build.gradle new file mode 100644 index 00000000000..e0cfa1c6875 --- /dev/null +++ b/qa/vagrant/windows-2016/build.gradle @@ -0,0 +1,11 @@ + +String boxId = project.properties.get('vagrant.windows-2016.id') +if (boxId != null) { + vagrant { + hostEnv 'VAGRANT_WINDOWS_2016_BOX', boxId + } +} else { + tasks.named('distroTest').configure { + onlyIf { true } + } +} diff --git a/qa/wildfly/build.gradle b/qa/wildfly/build.gradle index 2fb586ed369..5f1b50b2410 100644 --- a/qa/wildfly/build.gradle +++ b/qa/wildfly/build.gradle @@ -1,6 +1,7 @@ import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.VersionProperties import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask import java.nio.charset.StandardCharsets import java.nio.file.Files @@ -87,7 +88,7 @@ task deploy(type: Copy) { into "${wildflyInstall}/standalone/deployments" } -task writeElasticsearchProperties { +task writeElasticsearchProperties(type: DefaultTestClustersTask) { onlyIf { !Os.isFamily(Os.FAMILY_WINDOWS) } useCluster testClusters.integTest dependsOn deploy diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 6450622388f..3abfe246b12 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -556,140 +556,140 @@ public class ActionModule extends AbstractModule { catActions.add((AbstractCatAction) a); } }; - registerHandler.accept(new RestAddVotingConfigExclusionAction(settings, restController)); - registerHandler.accept(new RestClearVotingConfigExclusionsAction(settings, restController)); - registerHandler.accept(new RestMainAction(settings, restController)); - registerHandler.accept(new RestNodesInfoAction(settings, restController, settingsFilter)); - registerHandler.accept(new RestRemoteClusterInfoAction(settings, restController)); - registerHandler.accept(new RestNodesStatsAction(settings, restController)); - registerHandler.accept(new RestNodesUsageAction(settings, restController)); - registerHandler.accept(new RestNodesHotThreadsAction(settings, restController)); - registerHandler.accept(new RestClusterAllocationExplainAction(settings, restController)); - registerHandler.accept(new RestClusterStatsAction(settings, restController)); - registerHandler.accept(new RestClusterStateAction(settings, restController, settingsFilter)); - registerHandler.accept(new RestClusterHealthAction(settings, restController)); - registerHandler.accept(new RestClusterUpdateSettingsAction(settings, restController)); + registerHandler.accept(new RestAddVotingConfigExclusionAction(restController)); + registerHandler.accept(new RestClearVotingConfigExclusionsAction(restController)); + registerHandler.accept(new RestMainAction(restController)); + registerHandler.accept(new RestNodesInfoAction(restController, settingsFilter)); + registerHandler.accept(new RestRemoteClusterInfoAction(restController)); + registerHandler.accept(new RestNodesStatsAction(restController)); + registerHandler.accept(new RestNodesUsageAction(restController)); + registerHandler.accept(new RestNodesHotThreadsAction(restController)); + registerHandler.accept(new RestClusterAllocationExplainAction(restController)); + registerHandler.accept(new RestClusterStatsAction(restController)); + registerHandler.accept(new RestClusterStateAction(restController, settingsFilter)); + registerHandler.accept(new RestClusterHealthAction(restController)); + registerHandler.accept(new RestClusterUpdateSettingsAction(restController)); registerHandler.accept(new RestClusterGetSettingsAction(settings, restController, clusterSettings, settingsFilter)); - registerHandler.accept(new RestClusterRerouteAction(settings, restController, settingsFilter)); - registerHandler.accept(new RestClusterSearchShardsAction(settings, restController)); - registerHandler.accept(new RestPendingClusterTasksAction(settings, restController)); - registerHandler.accept(new RestPutRepositoryAction(settings, restController)); - registerHandler.accept(new RestGetRepositoriesAction(settings, restController, settingsFilter)); - registerHandler.accept(new RestDeleteRepositoryAction(settings, restController)); - registerHandler.accept(new RestVerifyRepositoryAction(settings, restController)); - registerHandler.accept(new RestGetSnapshotsAction(settings, restController)); - registerHandler.accept(new RestCreateSnapshotAction(settings, restController)); - registerHandler.accept(new RestRestoreSnapshotAction(settings, restController)); - registerHandler.accept(new RestDeleteSnapshotAction(settings, restController)); - registerHandler.accept(new RestSnapshotsStatusAction(settings, restController)); - registerHandler.accept(new RestGetIndicesAction(settings, restController)); - registerHandler.accept(new RestIndicesStatsAction(settings, restController)); - registerHandler.accept(new RestIndicesSegmentsAction(settings, restController)); - registerHandler.accept(new RestIndicesShardStoresAction(settings, restController)); - registerHandler.accept(new RestGetAliasesAction(settings, restController)); - registerHandler.accept(new RestIndexDeleteAliasesAction(settings, restController)); - registerHandler.accept(new RestIndexPutAliasAction(settings, restController)); - registerHandler.accept(new RestIndicesAliasesAction(settings, restController)); - registerHandler.accept(new RestCreateIndexAction(settings, restController)); - registerHandler.accept(new RestResizeHandler.RestShrinkIndexAction(settings, restController)); - registerHandler.accept(new RestResizeHandler.RestSplitIndexAction(settings, restController)); - registerHandler.accept(new RestResizeHandler.RestCloneIndexAction(settings, restController)); - registerHandler.accept(new RestRolloverIndexAction(settings, restController)); - registerHandler.accept(new RestDeleteIndexAction(settings, restController)); - registerHandler.accept(new RestCloseIndexAction(settings, restController)); - registerHandler.accept(new RestOpenIndexAction(settings, restController)); + registerHandler.accept(new RestClusterRerouteAction(restController, settingsFilter)); + registerHandler.accept(new RestClusterSearchShardsAction(restController)); + registerHandler.accept(new RestPendingClusterTasksAction(restController)); + registerHandler.accept(new RestPutRepositoryAction(restController)); + registerHandler.accept(new RestGetRepositoriesAction(restController, settingsFilter)); + registerHandler.accept(new RestDeleteRepositoryAction(restController)); + registerHandler.accept(new RestVerifyRepositoryAction(restController)); + registerHandler.accept(new RestGetSnapshotsAction(restController)); + registerHandler.accept(new RestCreateSnapshotAction(restController)); + registerHandler.accept(new RestRestoreSnapshotAction(restController)); + registerHandler.accept(new RestDeleteSnapshotAction(restController)); + registerHandler.accept(new RestSnapshotsStatusAction(restController)); + registerHandler.accept(new RestGetIndicesAction(restController)); + registerHandler.accept(new RestIndicesStatsAction(restController)); + registerHandler.accept(new RestIndicesSegmentsAction(restController)); + registerHandler.accept(new RestIndicesShardStoresAction(restController)); + registerHandler.accept(new RestGetAliasesAction(restController)); + registerHandler.accept(new RestIndexDeleteAliasesAction(restController)); + registerHandler.accept(new RestIndexPutAliasAction(restController)); + registerHandler.accept(new RestIndicesAliasesAction(restController)); + registerHandler.accept(new RestCreateIndexAction(restController)); + registerHandler.accept(new RestResizeHandler.RestShrinkIndexAction(restController)); + registerHandler.accept(new RestResizeHandler.RestSplitIndexAction(restController)); + registerHandler.accept(new RestResizeHandler.RestCloneIndexAction(restController)); + registerHandler.accept(new RestRolloverIndexAction(restController)); + registerHandler.accept(new RestDeleteIndexAction(restController)); + registerHandler.accept(new RestCloseIndexAction(restController)); + registerHandler.accept(new RestOpenIndexAction(restController)); - registerHandler.accept(new RestUpdateSettingsAction(settings, restController)); - registerHandler.accept(new RestGetSettingsAction(settings, restController)); + registerHandler.accept(new RestUpdateSettingsAction(restController)); + registerHandler.accept(new RestGetSettingsAction(restController)); - registerHandler.accept(new RestAnalyzeAction(settings, restController)); - registerHandler.accept(new RestGetIndexTemplateAction(settings, restController)); - registerHandler.accept(new RestPutIndexTemplateAction(settings, restController)); - registerHandler.accept(new RestDeleteIndexTemplateAction(settings, restController)); + registerHandler.accept(new RestAnalyzeAction(restController)); + registerHandler.accept(new RestGetIndexTemplateAction(restController)); + registerHandler.accept(new RestPutIndexTemplateAction(restController)); + registerHandler.accept(new RestDeleteIndexTemplateAction(restController)); - registerHandler.accept(new RestPutMappingAction(settings, restController)); - registerHandler.accept(new RestGetMappingAction(settings, restController)); - registerHandler.accept(new RestGetFieldMappingAction(settings, restController)); + registerHandler.accept(new RestPutMappingAction(restController)); + registerHandler.accept(new RestGetMappingAction(restController)); + registerHandler.accept(new RestGetFieldMappingAction(restController)); - registerHandler.accept(new RestRefreshAction(settings, restController)); - registerHandler.accept(new RestFlushAction(settings, restController)); - registerHandler.accept(new RestSyncedFlushAction(settings, restController)); - registerHandler.accept(new RestForceMergeAction(settings, restController)); - registerHandler.accept(new RestUpgradeAction(settings, restController)); - registerHandler.accept(new RestUpgradeStatusAction(settings, restController)); - registerHandler.accept(new RestClearIndicesCacheAction(settings, restController)); + registerHandler.accept(new RestRefreshAction(restController)); + registerHandler.accept(new RestFlushAction(restController)); + registerHandler.accept(new RestSyncedFlushAction(restController)); + registerHandler.accept(new RestForceMergeAction(restController)); + registerHandler.accept(new RestUpgradeAction(restController)); + registerHandler.accept(new RestUpgradeStatusAction(restController)); + registerHandler.accept(new RestClearIndicesCacheAction(restController)); - registerHandler.accept(new RestIndexAction(settings, restController)); - registerHandler.accept(new RestGetAction(settings, restController)); - registerHandler.accept(new RestGetSourceAction(settings, restController)); + registerHandler.accept(new RestIndexAction(restController)); + registerHandler.accept(new RestGetAction(restController)); + registerHandler.accept(new RestGetSourceAction(restController)); registerHandler.accept(new RestMultiGetAction(settings, restController)); - registerHandler.accept(new RestDeleteAction(settings, restController)); - registerHandler.accept(new RestCountAction(settings, restController)); - registerHandler.accept(new RestTermVectorsAction(settings, restController)); - registerHandler.accept(new RestMultiTermVectorsAction(settings, restController)); + registerHandler.accept(new RestDeleteAction(restController)); + registerHandler.accept(new RestCountAction(restController)); + registerHandler.accept(new RestTermVectorsAction(restController)); + registerHandler.accept(new RestMultiTermVectorsAction(restController)); registerHandler.accept(new RestBulkAction(settings, restController)); - registerHandler.accept(new RestUpdateAction(settings, restController)); + registerHandler.accept(new RestUpdateAction(restController)); - registerHandler.accept(new RestSearchAction(settings, restController)); - registerHandler.accept(new RestSearchScrollAction(settings, restController)); - registerHandler.accept(new RestClearScrollAction(settings, restController)); + registerHandler.accept(new RestSearchAction(restController)); + registerHandler.accept(new RestSearchScrollAction(restController)); + registerHandler.accept(new RestClearScrollAction(restController)); registerHandler.accept(new RestMultiSearchAction(settings, restController)); - registerHandler.accept(new RestValidateQueryAction(settings, restController)); + registerHandler.accept(new RestValidateQueryAction(restController)); - registerHandler.accept(new RestExplainAction(settings, restController)); + registerHandler.accept(new RestExplainAction(restController)); - registerHandler.accept(new RestRecoveryAction(settings, restController)); + registerHandler.accept(new RestRecoveryAction(restController)); - registerHandler.accept(new RestReloadSecureSettingsAction(settings, restController)); + registerHandler.accept(new RestReloadSecureSettingsAction(restController)); // Scripts API - registerHandler.accept(new RestGetStoredScriptAction(settings, restController)); - registerHandler.accept(new RestPutStoredScriptAction(settings, restController)); - registerHandler.accept(new RestDeleteStoredScriptAction(settings, restController)); + registerHandler.accept(new RestGetStoredScriptAction(restController)); + registerHandler.accept(new RestPutStoredScriptAction(restController)); + registerHandler.accept(new RestDeleteStoredScriptAction(restController)); - registerHandler.accept(new RestFieldCapabilitiesAction(settings, restController)); + registerHandler.accept(new RestFieldCapabilitiesAction(restController)); // Tasks API - registerHandler.accept(new RestListTasksAction(settings, restController, nodesInCluster)); - registerHandler.accept(new RestGetTaskAction(settings, restController)); - registerHandler.accept(new RestCancelTasksAction(settings, restController, nodesInCluster)); + registerHandler.accept(new RestListTasksAction(restController, nodesInCluster)); + registerHandler.accept(new RestGetTaskAction(restController)); + registerHandler.accept(new RestCancelTasksAction(restController, nodesInCluster)); // Ingest API - registerHandler.accept(new RestPutPipelineAction(settings, restController)); - registerHandler.accept(new RestGetPipelineAction(settings, restController)); - registerHandler.accept(new RestDeletePipelineAction(settings, restController)); - registerHandler.accept(new RestSimulatePipelineAction(settings, restController)); + registerHandler.accept(new RestPutPipelineAction(restController)); + registerHandler.accept(new RestGetPipelineAction(restController)); + registerHandler.accept(new RestDeletePipelineAction(restController)); + registerHandler.accept(new RestSimulatePipelineAction(restController)); // CAT API - registerHandler.accept(new RestAllocationAction(settings, restController)); - registerHandler.accept(new RestShardsAction(settings, restController)); - registerHandler.accept(new RestMasterAction(settings, restController)); - registerHandler.accept(new RestNodesAction(settings, restController)); - registerHandler.accept(new RestTasksAction(settings, restController, nodesInCluster)); - registerHandler.accept(new RestIndicesAction(settings, restController)); - registerHandler.accept(new RestSegmentsAction(settings, restController)); + registerHandler.accept(new RestAllocationAction(restController)); + registerHandler.accept(new RestShardsAction(restController)); + registerHandler.accept(new RestMasterAction(restController)); + registerHandler.accept(new RestNodesAction(restController)); + registerHandler.accept(new RestTasksAction(restController, nodesInCluster)); + registerHandler.accept(new RestIndicesAction(restController)); + registerHandler.accept(new RestSegmentsAction(restController)); // Fully qualified to prevent interference with rest.action.count.RestCountAction - registerHandler.accept(new org.elasticsearch.rest.action.cat.RestCountAction(settings, restController)); + registerHandler.accept(new org.elasticsearch.rest.action.cat.RestCountAction(restController)); // Fully qualified to prevent interference with rest.action.indices.RestRecoveryAction - registerHandler.accept(new RestCatRecoveryAction(settings, restController)); - registerHandler.accept(new RestHealthAction(settings, restController)); - registerHandler.accept(new org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction(settings, restController)); - registerHandler.accept(new RestAliasAction(settings, restController)); - registerHandler.accept(new RestThreadPoolAction(settings, restController)); - registerHandler.accept(new RestPluginsAction(settings, restController)); - registerHandler.accept(new RestFielddataAction(settings, restController)); - registerHandler.accept(new RestNodeAttrsAction(settings, restController)); - registerHandler.accept(new RestRepositoriesAction(settings, restController)); - registerHandler.accept(new RestSnapshotAction(settings, restController)); - registerHandler.accept(new RestTemplatesAction(settings, restController)); + registerHandler.accept(new RestCatRecoveryAction(restController)); + registerHandler.accept(new RestHealthAction(restController)); + registerHandler.accept(new org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction(restController)); + registerHandler.accept(new RestAliasAction(restController)); + registerHandler.accept(new RestThreadPoolAction(restController)); + registerHandler.accept(new RestPluginsAction(restController)); + registerHandler.accept(new RestFielddataAction(restController)); + registerHandler.accept(new RestNodeAttrsAction(restController)); + registerHandler.accept(new RestRepositoriesAction(restController)); + registerHandler.accept(new RestSnapshotAction(restController)); + registerHandler.accept(new RestTemplatesAction(restController)); for (ActionPlugin plugin : actionPlugins) { for (RestHandler handler : plugin.getRestHandlers(settings, restController, clusterSettings, indexScopedSettings, settingsFilter, indexNameExpressionResolver, nodesInCluster)) { registerHandler.accept(handler); } } - registerHandler.accept(new RestCatAction(settings, restController, catActions)); + registerHandler.accept(new RestCatAction(restController, catActions)); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 5745e50df2b..11f09f3c7bb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -49,7 +49,6 @@ import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.io.InputStream; -import java.io.UncheckedIOException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -249,16 +248,9 @@ public class CreateIndexRequest extends AcknowledgedRequest * @param xContentType the content type of the mapping source */ private CreateIndexRequest mapping(String type, BytesReference source, XContentType xContentType) { - if (mappings.containsKey(type)) { - throw new IllegalStateException("mappings for type \"" + type + "\" were already defined"); - } Objects.requireNonNull(xContentType); - try { - mappings.put(type, XContentHelper.convertToJson(source, false, false, xContentType)); - return this; - } catch (IOException e) { - throw new UncheckedIOException("failed to convert to json", e); - } + Map mappingAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); + return mapping(type, mappingAsMap); } /** @@ -294,9 +286,10 @@ public class CreateIndexRequest extends AcknowledgedRequest source = MapBuilder.newMapBuilder().put(type, source).map(); } try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - return mapping(type, builder); + mappings.put(type, Strings.toString(builder)); + return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index a4a0c4e8721..b632b8bcb7b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -82,7 +82,7 @@ public class RolloverRequest extends AcknowledgedRequest implem throw new IllegalArgumentException("The mapping definition cannot be nested under a type " + "[" + MapperService.SINGLE_MAPPING_NAME + "] unless include_type_name is set to true."); } - request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, parser.map()); + request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappings); } }, CreateIndexRequest.MAPPINGS, ObjectParser.ValueType.OBJECT); PARSER.declareField((parser, request, context) -> request.createIndexRequest.aliases(parser.map()), diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 36b48fabf6c..c0187ec87f6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -49,7 +49,6 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -284,12 +283,8 @@ public class PutIndexTemplateRequest extends MasterNodeRequest mappingAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); + return mapping(type, mappingAsMap); } /** @@ -304,9 +299,10 @@ public class PutIndexTemplateRequest extends MasterNodeRequestnewMapBuilder().put(type, source).map(); } try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - return mapping(type, builder); + mappings.put(type, Strings.toString(builder)); + return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index f06af1a20fe..75b9402cd13 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -143,7 +143,7 @@ public abstract class TransportInstanceSingleOperationAction< throw blockException; } } - request.concreteIndex(indexNameExpressionResolver.concreteSingleIndex(clusterState, request).getName()); + request.concreteIndex(indexNameExpressionResolver.concreteWriteIndex(clusterState, request).getName()); resolveRequest(clusterState, request); blockException = checkRequestBlock(clusterState, request); if (blockException != null) { diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java index 017231c179a..a4f31fb3ab1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java @@ -141,7 +141,7 @@ public class ClusterFormationFailureHelper { final String discoveryWillContinueDescription = String.format(Locale.ROOT, "discovery will continue using %s from hosts providers and %s from last-known cluster state; " + "node term %d, last-accepted version %d in term %d", - resolvedAddresses, clusterStateNodes, currentTerm, clusterState.version(), clusterState.term()); + resolvedAddresses, clusterStateNodes, currentTerm, clusterState.getVersionOrMetaDataVersion(), clusterState.term()); final String discoveryStateIgnoringQuorum = String.format(Locale.ROOT, "have discovered %s; %s", foundPeers, discoveryWillContinueDescription); @@ -191,7 +191,8 @@ public class ClusterFormationFailureHelper { foundPeers.forEach(voteCollection::addVote); final String isQuorumOrNot = electionStrategy.isElectionQuorum(clusterState.nodes().getLocalNode(), currentTerm, clusterState.term(), - clusterState.version(), clusterState.getLastCommittedConfiguration(), clusterState.getLastAcceptedConfiguration(), + clusterState.getVersionOrMetaDataVersion(), clusterState.getLastCommittedConfiguration(), + clusterState.getLastAcceptedConfiguration(), voteCollection) ? "is a quorum" : "is not a quorum"; return String.format(Locale.ROOT, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 9ef0e23b024..142837fe5fc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -301,7 +301,10 @@ public class MetaDataCreateIndexService { List templateNames = new ArrayList<>(); for (Map.Entry entry : request.mappings().entrySet()) { - mappings.put(entry.getKey(), MapperService.parseMapping(xContentRegistry, entry.getValue())); + Map mapping = MapperService.parseMapping(xContentRegistry, entry.getValue()); + assert mapping.size() == 1 : mapping; + assert entry.getKey().equals(mapping.keySet().iterator().next()) : entry.getKey() + " != " + mapping; + mappings.put(entry.getKey(), mapping); } final Index recoverFromIndex = request.recoverFrom(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index feec1833443..4dc38c97a8e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -265,6 +265,13 @@ public class MetaDataMappingService { // try and parse it (no need to add it here) so we can bail early in case of parsing exception DocumentMapper newMapper; DocumentMapper existingMapper = mapperService.documentMapper(); + + String typeForUpdate = mapperService.getTypeForUpdate(mappingType, mappingUpdateSource); + if (existingMapper != null && existingMapper.type().equals(typeForUpdate) == false) { + throw new IllegalArgumentException("Rejecting mapping update to [" + mapperService.index().getName() + + "] as the final mapping would have more than 1 type: " + Arrays.asList(existingMapper.type(), typeForUpdate)); + } + if (MapperService.DEFAULT_MAPPING.equals(request.type())) { // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default newMapper = mapperService.parse(request.type(), mappingUpdateSource, false); @@ -299,14 +306,7 @@ public class MetaDataMappingService { final Index index = indexMetaData.getIndex(); final MapperService mapperService = indexMapperServices.get(index); - // If the _type name is _doc and there is no _doc top-level key then this means that we - // are handling a typeless call. In such a case, we override _doc with the actual type - // name in the mappings. This allows to use typeless APIs on typed indices. - String typeForUpdate = mappingType; // the type to use to apply the mapping update - if (isMappingSourceTyped(request.type(), mappingUpdateSource) == false) { - typeForUpdate = mapperService.resolveDocumentType(mappingType); - } - + String typeForUpdate = mapperService.getTypeForUpdate(mappingType, mappingUpdateSource); CompressedXContent existingSource = null; DocumentMapper existingMapper = mapperService.documentMapper(typeForUpdate); if (existingMapper != null) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index d4cc38f0b95..ca8a24ea93d 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -301,6 +301,19 @@ public final class IndexSettings { public static final Setting INDEX_SEARCH_THROTTLED = Setting.boolSetting("index.search.throttled", false, Property.IndexScope, Property.PrivateIndex, Property.Dynamic); + /** + * Determines a balance between file-based and operations-based peer recoveries. The number of operations that will be used in an + * operations-based peer recovery is limited to this proportion of the total number of documents in the shard (including deleted + * documents) on the grounds that a file-based peer recovery may copy all of the documents in the shard over to the new peer, but is + * significantly faster than replaying the missing operations on the peer, so once a peer falls far enough behind the primary it makes + * more sense to copy all the data over again instead of replaying history. + * + * Defaults to retaining history for up to 10% of the documents in the shard. This can only be changed in tests, since this setting is + * intentionally unregistered. + */ + public static final Setting FILE_BASED_RECOVERY_THRESHOLD_SETTING + = Setting.doubleSetting("index.recovery.file_based_threshold", 0.1d, 0.0d, Setting.Property.IndexScope); + private final Index index; private final Version version; private final Logger logger; diff --git a/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java b/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java index 313598e1d8e..8166a0d37d4 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.ObjectIntHashMap; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexDeletionPolicy; +import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.Directory; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; @@ -43,7 +44,7 @@ import java.util.function.LongSupplier; * In particular, this policy will delete index commits whose max sequence number is at most * the current global checkpoint except the index commit which has the highest max sequence number among those. */ -public final class CombinedDeletionPolicy extends IndexDeletionPolicy { +public class CombinedDeletionPolicy extends IndexDeletionPolicy { private final Logger logger; private final TranslogDeletionPolicy translogDeletionPolicy; private final SoftDeletesPolicy softDeletesPolicy; @@ -51,6 +52,7 @@ public final class CombinedDeletionPolicy extends IndexDeletionPolicy { private final ObjectIntHashMap snapshottedCommits; // Number of snapshots held against each commit point. private volatile IndexCommit safeCommit; // the most recent safe commit point - its max_seqno at most the persisted global checkpoint. private volatile IndexCommit lastCommit; // the most recent commit point + private volatile SafeCommitInfo safeCommitInfo = SafeCommitInfo.EMPTY; CombinedDeletionPolicy(Logger logger, TranslogDeletionPolicy translogDeletionPolicy, SoftDeletesPolicy softDeletesPolicy, LongSupplier globalCheckpointSupplier) { @@ -62,7 +64,7 @@ public final class CombinedDeletionPolicy extends IndexDeletionPolicy { } @Override - public synchronized void onInit(List commits) throws IOException { + public void onInit(List commits) throws IOException { assert commits.isEmpty() == false : "index is opened, but we have no commits"; onCommit(commits); if (safeCommit != commits.get(commits.size() - 1)) { @@ -74,16 +76,32 @@ public final class CombinedDeletionPolicy extends IndexDeletionPolicy { } @Override - public synchronized void onCommit(List commits) throws IOException { - final int keptPosition = indexOfKeptCommits(commits, globalCheckpointSupplier.getAsLong()); - lastCommit = commits.get(commits.size() - 1); - safeCommit = commits.get(keptPosition); - for (int i = 0; i < keptPosition; i++) { - if (snapshottedCommits.containsKey(commits.get(i)) == false) { - deleteCommit(commits.get(i)); + public void onCommit(List commits) throws IOException { + final IndexCommit safeCommit; + synchronized (this) { + final int keptPosition = indexOfKeptCommits(commits, globalCheckpointSupplier.getAsLong()); + this.safeCommitInfo = SafeCommitInfo.EMPTY; + this.lastCommit = commits.get(commits.size() - 1); + this.safeCommit = commits.get(keptPosition); + for (int i = 0; i < keptPosition; i++) { + if (snapshottedCommits.containsKey(commits.get(i)) == false) { + deleteCommit(commits.get(i)); + } } + updateRetentionPolicy(); + safeCommit = this.safeCommit; } - updateRetentionPolicy(); + + assert Thread.holdsLock(this) == false : "should not block concurrent acquire or relesase"; + safeCommitInfo = new SafeCommitInfo(Long.parseLong( + safeCommit.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)), getDocCountOfCommit(safeCommit)); + + // This is protected from concurrent calls by a lock on the IndexWriter, but this assertion makes sure that we notice if that ceases + // to be true in future. It is not disastrous if safeCommitInfo refers to an older safeCommit, it just means that we might retain a + // bit more history and do a few more ops-based recoveries than we would otherwise. + final IndexCommit newSafeCommit = this.safeCommit; + assert safeCommit == newSafeCommit + : "onCommit called concurrently? " + safeCommit.getGeneration() + " vs " + newSafeCommit.getGeneration(); } private void deleteCommit(IndexCommit commit) throws IOException { @@ -109,6 +127,14 @@ public final class CombinedDeletionPolicy extends IndexDeletionPolicy { Long.parseLong(safeCommit.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY))); } + protected int getDocCountOfCommit(IndexCommit indexCommit) throws IOException { + return SegmentInfos.readCommit(indexCommit.getDirectory(), indexCommit.getSegmentsFileName()).totalMaxDoc(); + } + + SafeCommitInfo getSafeCommitInfo() { + return safeCommitInfo; + } + /** * Captures the most recent commit point {@link #lastCommit} or the most recent safe commit point {@link #safeCommit}. * Index files of the capturing commit point won't be released until the commit reference is closed. diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 7a50d3471a3..f26e5b8ad1f 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1122,6 +1122,11 @@ public abstract class Engine implements Closeable { */ public abstract IndexCommitRef acquireSafeIndexCommit() throws EngineException; + /** + * @return a summary of the contents of the current safe commit + */ + public abstract SafeCommitInfo getSafeCommitInfo(); + /** * If the specified throwable contains a fatal error in the throwable graph, such a fatal error will be thrown. Callers should ensure * that there are no catch statements that would catch an error in the stack as the fatal error here should go uncaught and be handled diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 5a8662845c4..b83c0a70178 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -2008,6 +2008,11 @@ public class InternalEngine extends Engine { } } + @Override + public SafeCommitInfo getSafeCommitInfo() { + return combinedDeletionPolicy.getSafeCommitInfo(); + } + private boolean failOnTragicEvent(AlreadyClosedException ex) { final boolean engineFailed; // if we are already closed due to some tragic exception diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index 30b3d0221f3..ded39c51b37 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -77,6 +77,7 @@ public class ReadOnlyEngine extends Engine { private final Lock indexWriterLock; private final DocsStats docsStats; private final RamAccountingRefreshListener refreshListener; + private final SafeCommitInfo safeCommitInfo; protected volatile TranslogStats translogStats; @@ -120,6 +121,7 @@ public class ReadOnlyEngine extends Engine { assert translogStats != null || obtainLock : "mutiple translogs instances should not be opened at the same time"; this.translogStats = translogStats != null ? translogStats : translogStats(config, lastCommittedSegmentInfos); this.indexWriterLock = indexWriterLock; + this.safeCommitInfo = new SafeCommitInfo(seqNoStats.getLocalCheckpoint(), lastCommittedSegmentInfos.totalMaxDoc()); success = true; } finally { if (success == false) { @@ -420,6 +422,11 @@ public class ReadOnlyEngine extends Engine { return acquireLastIndexCommit(false); } + @Override + public SafeCommitInfo getSafeCommitInfo() { + return safeCommitInfo; + } + @Override public void activateThrottling() { } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformTaskState.java b/server/src/main/java/org/elasticsearch/index/engine/SafeCommitInfo.java similarity index 56% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformTaskState.java rename to server/src/main/java/org/elasticsearch/index/engine/SafeCommitInfo.java index 7235a0aed28..37461177c93 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformTaskState.java +++ b/server/src/main/java/org/elasticsearch/index/engine/SafeCommitInfo.java @@ -7,7 +7,7 @@ * not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an @@ -16,19 +16,22 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.index.engine; -package org.elasticsearch.client.dataframe.transforms; +import org.elasticsearch.index.seqno.SequenceNumbers; -import java.util.Locale; +/** + * Information about the safe commit, for making decisions about recoveries. + */ +public class SafeCommitInfo { -public enum DataFrameTransformTaskState { - STOPPED, STARTED, FAILED; + public final long localCheckpoint; + public final int docCount; - public static DataFrameTransformTaskState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); + public SafeCommitInfo(long localCheckpoint, int docCount) { + this.localCheckpoint = localCheckpoint; + this.docCount = docCount; } - public String value() { - return name().toLowerCase(Locale.ROOT); - } + public static final SafeCommitInfo EMPTY = new SafeCommitInfo(SequenceNumbers.NO_OPS_PERFORMED, 0); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 06ca4a8044c..57d8f697090 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; @@ -72,7 +71,6 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; @@ -452,14 +450,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { results.put(DEFAULT_MAPPING, defaultMapper); } - { - if (mapper != null && this.mapper != null && Objects.equals(this.mapper.type(), mapper.type()) == false) { - throw new IllegalArgumentException( - "Rejecting mapping update to [" + index().getName() + "] as the final mapping would have more than 1 type: " - + Arrays.asList(this.mapper.type(), mapper.type())); - } - } - DocumentMapper newMapper = null; if (mapper != null) { // check naming @@ -707,6 +697,15 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return isMappingSourceTyped(type, root); } + /** + * If the _type name is _doc and there is no _doc top-level key then this means that we + * are handling a typeless call. In such a case, we override _doc with the actual type + * name in the mappings. This allows to use typeless APIs on typed indices. + */ + public String getTypeForUpdate(String type, CompressedXContent mappingSource) { + return isMappingSourceTyped(type, mappingSource) == false ? resolveDocumentType(type) : type; + } + /** * Resolves a type from a mapping-related request into the type that should be used when * merging and updating mappings. diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 2fe7c6b08c3..a3fbf8f20d4 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -35,7 +35,6 @@ import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -44,7 +43,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; -import java.util.Iterator; import java.util.List; import java.util.function.Consumer; @@ -69,23 +67,39 @@ public class ClientScrollableHitSource extends ScrollableHitSource { } @Override - public void doStart(Consumer onResponse) { + public void doStart(RejectAwareActionListener searchListener) { if (logger.isDebugEnabled()) { logger.debug("executing initial scroll against {}{}", isEmpty(firstSearchRequest.indices()) ? "all indices" : firstSearchRequest.indices(), isEmpty(firstSearchRequest.types()) ? "" : firstSearchRequest.types()); } - searchWithRetry(listener -> client.search(firstSearchRequest, listener), r -> consume(r, onResponse)); + client.search(firstSearchRequest, wrapListener(searchListener)); } @Override - protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse) { - searchWithRetry(listener -> { - SearchScrollRequest request = new SearchScrollRequest(); - // Add the wait time into the scroll timeout so it won't timeout while we wait for throttling - request.scrollId(scrollId).scroll(timeValueNanos(firstSearchRequest.scroll().keepAlive().nanos() + extraKeepAlive.nanos())); - client.searchScroll(request, listener); - }, r -> consume(r, onResponse)); + protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, RejectAwareActionListener searchListener) { + SearchScrollRequest request = new SearchScrollRequest(); + // Add the wait time into the scroll timeout so it won't timeout while we wait for throttling + request.scrollId(scrollId).scroll(timeValueNanos(firstSearchRequest.scroll().keepAlive().nanos() + extraKeepAlive.nanos())); + client.searchScroll(request, wrapListener(searchListener)); + } + + private ActionListener wrapListener(RejectAwareActionListener searchListener) { + return new ActionListener() { + @Override + public void onResponse(SearchResponse searchResponse) { + searchListener.onResponse(wrapSearchResponse(searchResponse)); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrap(e, EsRejectedExecutionException.class) != null) { + searchListener.onRejection(e); + } else { + searchListener.onFailure(e); + } + } + }; } @Override @@ -116,69 +130,7 @@ public class ClientScrollableHitSource extends ScrollableHitSource { onCompletion.run(); } - /** - * Run a search action and call onResponse when a the response comes in, retrying if the action fails with an exception caused by - * rejected execution. - * - * @param action consumes a listener and starts the action. The listener it consumes is rigged to retry on failure. - * @param onResponse consumes the response from the action - */ - private void searchWithRetry(Consumer> action, Consumer onResponse) { - /* - * RetryHelper is both an AbstractRunnable and an ActionListener - meaning that it both starts the search and - * handles reacts to the results. The complexity is all in onFailure which either adapts the failure to the "fail" listener or - * retries the search. Since both AbstractRunnable and ActionListener define the onFailure method it is called for either failure - * to run the action (either while running or before starting) and for failure on the response from the action. - */ - class RetryHelper extends AbstractRunnable implements ActionListener { - private final Iterator retries = backoffPolicy.iterator(); - /** - * The runnable to run that retries in the same context as the original call. - */ - private Runnable retryWithContext; - private volatile int retryCount = 0; - - @Override - protected void doRun() throws Exception { - action.accept(this); - } - - @Override - public void onResponse(SearchResponse response) { - onResponse.accept(response); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrap(e, EsRejectedExecutionException.class) != null) { - if (retries.hasNext()) { - retryCount += 1; - TimeValue delay = retries.next(); - logger.trace(() -> new ParameterizedMessage("retrying rejected search after [{}]", delay), e); - countSearchRetry.run(); - threadPool.schedule(retryWithContext, delay, ThreadPool.Names.SAME); - } else { - logger.warn(() -> new ParameterizedMessage( - "giving up on search because we retried [{}] times without success", retryCount), e); - fail.accept(e); - } - } else { - logger.warn("giving up on search because it failed with a non-retryable exception", e); - fail.accept(e); - } - } - } - RetryHelper helper = new RetryHelper(); - // Wrap the helper in a runnable that preserves the current context so we keep it on retry. - helper.retryWithContext = threadPool.getThreadContext().preserveContext(helper); - helper.run(); - } - - private void consume(SearchResponse response, Consumer onResponse) { - onResponse.accept(wrap(response)); - } - - private Response wrap(SearchResponse response) { + private Response wrapSearchResponse(SearchResponse response) { List failures; if (response.getShardFailures() == null) { failures = emptyList(); diff --git a/server/src/main/java/org/elasticsearch/index/reindex/RejectAwareActionListener.java b/server/src/main/java/org/elasticsearch/index/reindex/RejectAwareActionListener.java new file mode 100644 index 00000000000..68b8421cab6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/reindex/RejectAwareActionListener.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.CheckedConsumer; + +import java.util.function.Consumer; + +// public for testing +public interface RejectAwareActionListener extends ActionListener { + void onRejection(Exception e); + + /** + * Return a new listener that delegates failure/reject to errorDelegate but forwards response to responseHandler + */ + static RejectAwareActionListener withResponseHandler(RejectAwareActionListener errorDelegate, Consumer responseHandler) { + return new RejectAwareActionListener() { + @Override + public void onRejection(Exception e) { + errorDelegate.onRejection(e); + } + + @Override + public void onResponse(X t) { + responseHandler.accept(t); + } + + @Override + public void onFailure(Exception e) { + errorDelegate.onFailure(e); + } + }; + } + + /** + * Similar to {@link ActionListener#wrap(CheckedConsumer, Consumer)}, extended to have handler for onRejection. + */ + static RejectAwareActionListener wrap(CheckedConsumer onResponse, + Consumer onFailure, Consumer onRejection) { + return new RejectAwareActionListener() { + @Override + public void onResponse(Response response) { + try { + onResponse.accept(response); + } catch (Exception e) { + onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + onFailure.accept(e); + } + + @Override + public void onRejection(Exception e) { + onRejection.accept(e); + } + }; + } + +} + diff --git a/server/src/main/java/org/elasticsearch/index/reindex/RetryListener.java b/server/src/main/java/org/elasticsearch/index/reindex/RetryListener.java new file mode 100644 index 00000000000..d197e1bed12 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/reindex/RetryListener.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.Iterator; +import java.util.function.Consumer; + +class RetryListener implements RejectAwareActionListener { + private final Logger logger; + private final Iterator retries; + private final ThreadPool threadPool; + private final Consumer> retryScrollHandler; + private final ActionListener delegate; + private int retryCount = 0; + + RetryListener(Logger logger, ThreadPool threadPool, BackoffPolicy backoffPolicy, + Consumer> retryScrollHandler, + ActionListener delegate) { + this.logger = logger; + this.threadPool = threadPool; + this.retries = backoffPolicy.iterator(); + this.retryScrollHandler = retryScrollHandler; + this.delegate = delegate; + } + + @Override + public void onResponse(ScrollableHitSource.Response response) { + delegate.onResponse(response); + } + + @Override + public void onFailure(Exception e) { + delegate.onFailure(e); + } + + @Override + public void onRejection(Exception e) { + if (retries.hasNext()) { + retryCount += 1; + TimeValue delay = retries.next(); + logger.trace(() -> new ParameterizedMessage("retrying rejected search after [{}]", delay), e); + schedule(() -> retryScrollHandler.accept(this), delay); + } else { + logger.warn(() -> new ParameterizedMessage( + "giving up on search because we retried [{}] times without success", retryCount), e); + delegate.onFailure(e); + } + } + + private void schedule(Runnable runnable, TimeValue delay) { + // schedule does not preserve context so have to do this manually + threadPool.schedule(threadPool.preserveContext(runnable), delay, ThreadPool.Names.SAME); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java index 2620b4d524d..269bed2ddc8 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.reindex; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.Nullable; @@ -71,23 +72,28 @@ public abstract class ScrollableHitSource { } public final void start() { - doStart(response -> { - setScroll(response.getScrollId()); - logger.debug("scroll returned [{}] documents with a scroll id of [{}]", response.getHits().size(), response.getScrollId()); - onResponse(response); - }); + doStart(createRetryListener(this::doStart)); } - protected abstract void doStart(Consumer onResponse); - final void startNextScroll(TimeValue extraKeepAlive) { - doStartNextScroll(scrollId.get(), extraKeepAlive, response -> { - setScroll(response.getScrollId()); - onResponse(response); - }); + private RetryListener createRetryListener(Consumer> retryHandler) { + Consumer> countingRetryHandler = listener -> { + countSearchRetry.run(); + retryHandler.accept(listener); + }; + return new RetryListener(logger, threadPool, backoffPolicy, countingRetryHandler, + ActionListener.wrap(this::onResponse, fail)); + } + + // package private for tests. + final void startNextScroll(TimeValue extraKeepAlive) { + startNextScroll(extraKeepAlive, createRetryListener(listener -> startNextScroll(extraKeepAlive, listener))); + } + private void startNextScroll(TimeValue extraKeepAlive, RejectAwareActionListener searchListener) { + doStartNextScroll(scrollId.get(), extraKeepAlive, searchListener); } - protected abstract void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse); private void onResponse(Response response) { + logger.debug("scroll returned [{}] documents with a scroll id of [{}]", response.getHits().size(), response.getScrollId()); setScroll(response.getScrollId()); onResponse.accept(new AsyncResponse() { private AtomicBoolean alreadyDone = new AtomicBoolean(); @@ -113,6 +119,12 @@ public abstract class ScrollableHitSource { } } + // following is the SPI to be implemented. + protected abstract void doStart(RejectAwareActionListener searchListener); + + protected abstract void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, + RejectAwareActionListener searchListener); + /** * Called to clear a scroll id. * diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 465abdd0e27..1ef7c27c517 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.gateway.WriteStateException; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.engine.SafeCommitInfo; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ReplicationGroup; @@ -57,6 +58,7 @@ import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.LongConsumer; import java.util.function.LongSupplier; +import java.util.function.Supplier; import java.util.function.ToLongFunction; import java.util.stream.Collectors; import java.util.stream.LongStream; @@ -210,6 +212,17 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L */ private boolean hasAllPeerRecoveryRetentionLeases; + /** + * Supplies information about the current safe commit which may be used to expire peer-recovery retention leases. + */ + private final Supplier safeCommitInfoSupplier; + + /** + * Threshold for expiring peer-recovery retention leases and falling back to file-based recovery. See + * {@link IndexSettings#FILE_BASED_RECOVERY_THRESHOLD_SETTING}. + */ + private final double fileBasedRecoveryThreshold; + /** * Get all retention leases tracked on this shard. * @@ -237,6 +250,8 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L final long retentionLeaseMillis = indexSettings.getRetentionLeaseMillis(); final Set leaseIdsForCurrentPeers = routingTable.assignedShards().stream().map(ReplicationTracker::getPeerRecoveryRetentionLeaseId).collect(Collectors.toSet()); + final boolean allShardsStarted = routingTable.allShardsStarted(); + final long minimumReasonableRetainedSeqNo = allShardsStarted ? 0L : getMinimumReasonableRetainedSeqNo(); final Map> partitionByExpiration = retentionLeases .leases() .stream() @@ -245,7 +260,12 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L if (leaseIdsForCurrentPeers.contains(lease.id())) { return false; } - if (routingTable.allShardsStarted()) { + if (allShardsStarted) { + logger.trace("expiring unused [{}]", lease); + return true; + } + if (lease.retainingSequenceNumber() < minimumReasonableRetainedSeqNo) { + logger.trace("expiring unreasonable [{}] retaining history before [{}]", lease, minimumReasonableRetainedSeqNo); return true; } } @@ -264,6 +284,17 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L return Tuple.tuple(true, retentionLeases); } + private long getMinimumReasonableRetainedSeqNo() { + final SafeCommitInfo safeCommitInfo = safeCommitInfoSupplier.get(); + return safeCommitInfo.localCheckpoint + 1 - Math.round(Math.ceil(safeCommitInfo.docCount * fileBasedRecoveryThreshold)); + // NB safeCommitInfo.docCount is a very low-level count of the docs in the index, and in particular if this shard contains nested + // docs then safeCommitInfo.docCount counts every child doc separately from the parent doc. However every part of a nested document + // has the same seqno, so we may be overestimating the cost of a file-based recovery when compared to an ops-based recovery and + // therefore preferring ops-based recoveries inappropriately in this case. Correctly accounting for nested docs seems difficult to + // do cheaply, and the circumstances in which this matters should be relatively rare, so we use this naive calculation regardless. + // TODO improve this measure for when nested docs are in use + } + /** * Adds a new retention lease. * @@ -850,7 +881,8 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L final long globalCheckpoint, final LongConsumer onGlobalCheckpointUpdated, final LongSupplier currentTimeMillisSupplier, - final BiConsumer> onSyncRetentionLeases) { + final BiConsumer> onSyncRetentionLeases, + final Supplier safeCommitInfoSupplier) { super(shardId, indexSettings); assert globalCheckpoint >= SequenceNumbers.UNASSIGNED_SEQ_NO : "illegal initial global checkpoint: " + globalCheckpoint; this.shardAllocationId = allocationId; @@ -867,6 +899,8 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L this.routingTable = null; this.replicationGroup = null; this.hasAllPeerRecoveryRetentionLeases = indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_4_0); + this.fileBasedRecoveryThreshold = IndexSettings.FILE_BASED_RECOVERY_THRESHOLD_SETTING.get(indexSettings.getSettings()); + this.safeCommitInfoSupplier = safeCommitInfoSupplier; assert Version.V_EMPTY.equals(indexSettings.getIndexVersionCreated()) == false; assert invariant(); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index dafd379b192..ce80b8cb1d9 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -92,6 +92,7 @@ import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.ReadOnlyEngine; import org.elasticsearch.index.engine.RefreshFailedEngineException; +import org.elasticsearch.index.engine.SafeCommitInfo; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.fielddata.FieldDataStats; @@ -336,7 +337,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl UNASSIGNED_SEQ_NO, globalCheckpointListeners::globalCheckpointUpdated, threadPool::absoluteTimeInMillis, - (retentionLeases, listener) -> retentionLeaseSyncer.sync(shardId, retentionLeases, listener)); + (retentionLeases, listener) -> retentionLeaseSyncer.sync(shardId, retentionLeases, listener), + this::getSafeCommitInfo); // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis @@ -1099,7 +1101,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl /** * Rolls the tranlog generation and cleans unneeded. */ - private void rollTranslogGeneration() { + public void rollTranslogGeneration() { final Engine engine = getEngine(); engine.rollTranslogGeneration(); } @@ -2612,6 +2614,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl replicationTracker.removePeerRecoveryRetentionLease(nodeId, listener); } + private SafeCommitInfo getSafeCommitInfo() { + final Engine engine = getEngineOrNull(); + return engine == null ? SafeCommitInfo.EMPTY : engine.getSafeCommitInfo(); + } + class ShardEventListener implements Engine.EventListener { private final CopyOnWriteArrayList> delegates = new CopyOnWriteArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java b/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java index e9d593b728e..c0969cbc80e 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java @@ -63,6 +63,7 @@ final class MultiSnapshot implements Translog.Snapshot { @Override public Translog.Operation next() throws IOException { + // TODO: Read translog forward in 9.0+ for (; index >= 0; index--) { final TranslogSnapshot current = translogs[index]; Translog.Operation op; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 15bc17b3f7e..5d58325d4cf 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -379,7 +379,7 @@ public class PeerRecoveryTargetService implements IndexEventListener { public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { final ActionListener listener = new ChannelActionListener<>(channel, Actions.FINALIZE, request); - recoveryRef.target().finalizeRecovery(request.globalCheckpoint(), + recoveryRef.target().finalizeRecovery(request.globalCheckpoint(), request.trimAboveSeqNo(), ActionListener.map(listener, nullVal -> TransportResponse.Empty.INSTANCE)); } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFinalizeRecoveryRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFinalizeRecoveryRequest.java index 8a1a93611b3..232ea266bf8 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFinalizeRecoveryRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFinalizeRecoveryRequest.java @@ -28,27 +28,30 @@ import org.elasticsearch.transport.TransportRequest; import java.io.IOException; -public class RecoveryFinalizeRecoveryRequest extends TransportRequest { +final class RecoveryFinalizeRecoveryRequest extends TransportRequest { - private long recoveryId; - private ShardId shardId; - private long globalCheckpoint; + private final long recoveryId; + private final ShardId shardId; + private final long globalCheckpoint; + private final long trimAboveSeqNo; - public RecoveryFinalizeRecoveryRequest(StreamInput in) throws IOException { + RecoveryFinalizeRecoveryRequest(StreamInput in) throws IOException { super(in); recoveryId = in.readLong(); shardId = new ShardId(in); - if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha1)) { - globalCheckpoint = in.readZLong(); + globalCheckpoint = in.readZLong(); + if (in.getVersion().onOrAfter(Version.V_7_4_0)) { + trimAboveSeqNo = in.readZLong(); } else { - globalCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; + trimAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; } } - RecoveryFinalizeRecoveryRequest(final long recoveryId, final ShardId shardId, final long globalCheckpoint) { + RecoveryFinalizeRecoveryRequest(final long recoveryId, final ShardId shardId, final long globalCheckpoint, final long trimAboveSeqNo) { this.recoveryId = recoveryId; this.shardId = shardId; this.globalCheckpoint = globalCheckpoint; + this.trimAboveSeqNo = trimAboveSeqNo; } public long recoveryId() { @@ -63,13 +66,18 @@ public class RecoveryFinalizeRecoveryRequest extends TransportRequest { return globalCheckpoint; } + public long trimAboveSeqNo() { + return trimAboveSeqNo; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeLong(recoveryId); shardId.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha1)) { - out.writeZLong(globalCheckpoint); + out.writeZLong(globalCheckpoint); + if (out.getVersion().onOrAfter(Version.V_7_4_0)) { + out.writeZLong(trimAboveSeqNo); } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 95507f89a18..5405929bfb6 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -327,7 +327,9 @@ public class RecoverySourceHandler { }, onFailure); - sendSnapshotStep.whenComplete(r -> finalizeRecovery(r.targetLocalCheckpoint, finalizeStep), onFailure); + // Recovery target can trim all operations >= startingSeqNo as we have sent all these operations in the phase 2 + final long trimAboveSeqNo = startingSeqNo - 1; + sendSnapshotStep.whenComplete(r -> finalizeRecovery(r.targetLocalCheckpoint, trimAboveSeqNo, finalizeStep), onFailure); finalizeStep.whenComplete(r -> { final long phase1ThrottlingWaitTime = 0L; // TODO: return the actual throttle time @@ -750,7 +752,7 @@ public class RecoverySourceHandler { } } - void finalizeRecovery(final long targetLocalCheckpoint, final ActionListener listener) throws IOException { + void finalizeRecovery(long targetLocalCheckpoint, long trimAboveSeqNo, ActionListener listener) throws IOException { if (shard.state() == IndexShardState.CLOSED) { throw new IndexShardClosedException(request.shardId()); } @@ -767,7 +769,7 @@ public class RecoverySourceHandler { shardId + " marking " + request.targetAllocationId() + " as in sync", shard, cancellableThreads, logger); final long globalCheckpoint = shard.getLastKnownGlobalCheckpoint(); // this global checkpoint is persisted in finalizeRecovery final StepListener finalizeListener = new StepListener<>(); - cancellableThreads.executeIO(() -> recoveryTarget.finalizeRecovery(globalCheckpoint, finalizeListener)); + cancellableThreads.executeIO(() -> recoveryTarget.finalizeRecovery(globalCheckpoint, trimAboveSeqNo, finalizeListener)); finalizeListener.whenComplete(r -> { runUnderPrimaryPermit(() -> shard.updateGlobalCheckpointForShard(request.targetAllocationId(), globalCheckpoint), shardId + " updating " + request.targetAllocationId() + "'s global checkpoint", shard, cancellableThreads, logger); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 6726e490037..7eea23c403b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -290,13 +290,23 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget } @Override - public void finalizeRecovery(final long globalCheckpoint, ActionListener listener) { + public void finalizeRecovery(final long globalCheckpoint, final long trimAboveSeqNo, ActionListener listener) { ActionListener.completeWith(listener, () -> { - final IndexShard indexShard = indexShard(); indexShard.updateGlobalCheckpointOnReplica(globalCheckpoint, "finalizing recovery"); // Persist the global checkpoint. indexShard.sync(); indexShard.persistRetentionLeases(); + if (trimAboveSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { + // We should erase all translog operations above trimAboveSeqNo as we have received either the same or a newer copy + // from the recovery source in phase2. Rolling a new translog generation is not strictly required here for we won't + // trim the current generation. It's merely to satisfy the assumption that the current generation does not have any + // operation that would be trimmed (see TranslogWriter#assertNoSeqAbove). This assumption does not hold for peer + // recovery because we could have received operations above startingSeqNo from the previous primary terms. + indexShard.rollTranslogGeneration(); + // the flush or translog generation threshold can be reached after we roll a new translog + indexShard.afterWriteOperation(); + indexShard.trimOperationOfPreviousPrimaryTerms(trimAboveSeqNo); + } if (hasUncommittedOperations()) { indexShard.flush(new FlushRequest().force(true).waitIfOngoing(true)); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java index a1990dda3a0..13fc04098f1 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java @@ -42,9 +42,11 @@ public interface RecoveryTargetHandler { * the global checkpoint. * * @param globalCheckpoint the global checkpoint on the recovery source + * @param trimAboveSeqNo The recovery target should erase its existing translog above this sequence number + * from the previous primary terms. * @param listener the listener which will be notified when this method is completed */ - void finalizeRecovery(long globalCheckpoint, ActionListener listener); + void finalizeRecovery(long globalCheckpoint, long trimAboveSeqNo, ActionListener listener); /** * Handoff the primary context between the relocation source and the relocation target. diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java index 9b2c0f752a5..3140de8169d 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java @@ -86,9 +86,9 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler { } @Override - public void finalizeRecovery(final long globalCheckpoint, final ActionListener listener) { + public void finalizeRecovery(final long globalCheckpoint, final long trimAboveSeqNo, final ActionListener listener) { transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.FINALIZE, - new RecoveryFinalizeRecoveryRequest(recoveryId, shardId, globalCheckpoint), + new RecoveryFinalizeRecoveryRequest(recoveryId, shardId, globalCheckpoint, trimAboveSeqNo), TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionLongTimeout()).build(), new ActionListenerResponseHandler<>(ActionListener.map(listener, r -> null), in -> TransportResponse.Empty.INSTANCE, ThreadPool.Names.GENERIC)); diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index ad5d62b22fe..a2a48d0a0fe 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -615,26 +615,27 @@ public class IngestService implements ClusterStateApplier { } /** - * Determine if a pipeline contains a processor class within it by introspecting all of the processors within the pipeline. + * Gets all the Processors of the given type from within a Pipeline. * @param pipelineId the pipeline to inspect * @param clazz the Processor class to look for * @return True if the pipeline contains an instance of the Processor class passed in */ - public boolean hasProcessor(String pipelineId, Class clazz) { + public

List

getProcessorsInPipeline(String pipelineId, Class

clazz) { Pipeline pipeline = getPipeline(pipelineId); if (pipeline == null) { - return false; + throw new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist"); } + List

processors = new ArrayList<>(); for (Processor processor: pipeline.flattenAllProcessors()) { if (clazz.isAssignableFrom(processor.getClass())) { - return true; + processors.add(clazz.cast(processor)); } while (processor instanceof WrappingProcessor) { WrappingProcessor wrappingProcessor = (WrappingProcessor) processor; if (clazz.isAssignableFrom(wrappingProcessor.getInnerProcessor().getClass())) { - return true; + processors.add(clazz.cast(wrappingProcessor.getInnerProcessor())); } processor = wrappingProcessor.getInnerProcessor(); // break in the case of self referencing processors in the event a processor author creates a @@ -645,7 +646,7 @@ public class IngestService implements ClusterStateApplier { } } - return false; + return processors; } private static Pipeline substitutePipeline(String id, ElasticsearchParseException e) { diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 9e86b3a6f94..c99deebc5c7 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.rest.action.admin.cluster.RestNodesUsageAction; @@ -70,10 +69,6 @@ public abstract class BaseRestHandler implements RestHandler { public static final String INCLUDE_TYPE_NAME_PARAMETER = "include_type_name"; public static final boolean DEFAULT_INCLUDE_TYPE_NAME_POLICY = false; - protected BaseRestHandler(Settings settings) { - // TODO drop settings from ctor - } - public final long getUsageCount() { return usageCount.sum(); } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 19d29f8e915..04410662cc4 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -47,11 +47,11 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Supplier; import java.util.function.UnaryOperator; import static org.elasticsearch.rest.BytesRestResponse.TEXT_CONTENT_TYPE; @@ -201,77 +201,55 @@ public class RestController implements HttpServerTransport.Dispatcher { /** * Dispatch the request, if possible, returning true if a response was sent or false otherwise. */ - boolean dispatchRequest(final RestRequest request, final RestChannel channel, final NodeClient client, - @Nullable final RestHandler handler) throws Exception { - if (handler == null) { - // Get the map of matching handlers for a request, for the full set of HTTP methods. - final Set validMethodSet = getValidHandlerMethodSet(request); - final RestRequest.Method method = request.method(); - if (validMethodSet.contains(method) == false) { - if (method == RestRequest.Method.OPTIONS) { - handleOptionsRequest(channel, validMethodSet); - return true; - } - if (validMethodSet.isEmpty() == false) { - // If an alternative handler for an explicit path is registered to a - // different HTTP method than the one supplied - return a 405 Method - // Not Allowed error. - handleUnsupportedHttpMethod(request.uri(), method, channel, validMethodSet, null); - return true; - } + private boolean dispatchRequest(RestRequest request, RestChannel channel, RestHandler handler) throws Exception { + final int contentLength = request.content().length(); + if (contentLength > 0) { + final XContentType xContentType = request.getXContentType(); + if (xContentType == null) { + sendContentTypeErrorMessage(request.getAllHeaderValues("Content-Type"), channel); + return true; } - return false; - } else { - final int contentLength = request.content().length(); - if (contentLength > 0) { - if (hasContentType(request, handler) == false) { - sendContentTypeErrorMessage(request.getAllHeaderValues("Content-Type"), channel); - return true; - } - final XContentType xContentType = request.getXContentType(); if (handler.supportsContentStream() && xContentType != XContentType.JSON && xContentType != XContentType.SMILE) { channel.sendResponse(BytesRestResponse.createSimpleErrorResponse(channel, RestStatus.NOT_ACCEPTABLE, "Content-Type [" + xContentType + "] does not support stream parsing. Use JSON or SMILE instead")); return true; } - } - RestChannel responseChannel = channel; - try { - if (handler.canTripCircuitBreaker()) { - inFlightRequestsBreaker(circuitBreakerService).addEstimateBytesAndMaybeBreak(contentLength, ""); - } else { - inFlightRequestsBreaker(circuitBreakerService).addWithoutBreaking(contentLength); - } - // iff we could reserve bytes for the request we need to send the response also over this channel - responseChannel = new ResourceHandlingHttpChannel(channel, circuitBreakerService, contentLength); - handler.handleRequest(request, responseChannel, client); - } catch (Exception e) { - responseChannel.sendResponse(new BytesRestResponse(responseChannel, e)); - } - return true; } - } - - /** - * If a request contains content, this method will return {@code true} if the {@code Content-Type} header is present, matches an - * {@link XContentType} or the handler supports a content stream and the content type header is for newline delimited JSON, - */ - private static boolean hasContentType(final RestRequest restRequest, final RestHandler restHandler) { - if (restRequest.getXContentType() == null) { - String contentTypeHeader = restRequest.header("Content-Type"); - if (restHandler.supportsContentStream() && contentTypeHeader != null) { - final String lowercaseMediaType = contentTypeHeader.toLowerCase(Locale.ROOT); - // we also support newline delimited JSON: http://specs.okfnlabs.org/ndjson/ - if (lowercaseMediaType.equals("application/x-ndjson")) { - restRequest.setXContentType(XContentType.JSON); - return true; - } + RestChannel responseChannel = channel; + try { + if (handler.canTripCircuitBreaker()) { + inFlightRequestsBreaker(circuitBreakerService).addEstimateBytesAndMaybeBreak(contentLength, ""); + } else { + inFlightRequestsBreaker(circuitBreakerService).addWithoutBreaking(contentLength); } - return false; + // iff we could reserve bytes for the request we need to send the response also over this channel + responseChannel = new ResourceHandlingHttpChannel(channel, circuitBreakerService, contentLength); + handler.handleRequest(request, responseChannel, client); + } catch (Exception e) { + responseChannel.sendResponse(new BytesRestResponse(responseChannel, e)); } return true; } + private boolean handleNoHandlerFound(String rawPath, RestRequest.Method method, String uri, RestChannel channel) { + // Get the map of matching handlers for a request, for the full set of HTTP methods. + final Set validMethodSet = getValidHandlerMethodSet(rawPath); + if (validMethodSet.contains(method) == false) { + if (method == RestRequest.Method.OPTIONS) { + handleOptionsRequest(channel, validMethodSet); + return true; + } + if (validMethodSet.isEmpty() == false) { + // If an alternative handler for an explicit path is registered to a + // different HTTP method than the one supplied - return a 405 Method + // Not Allowed error. + handleUnsupportedHttpMethod(uri, method, channel, validMethodSet, null); + return true; + } + } + return false; + } + private void sendContentTypeErrorMessage(@Nullable List contentTypeHeader, RestChannel channel) throws IOException { final String errorMessage; if (contentTypeHeader == null) { @@ -284,16 +262,6 @@ public class RestController implements HttpServerTransport.Dispatcher { channel.sendResponse(BytesRestResponse.createSimpleErrorResponse(channel, NOT_ACCEPTABLE, errorMessage)); } - /** - * Checks the request parameters against enabled settings for error trace support - * @return true if the request does not have any parameters that conflict with system settings - */ - private static boolean checkErrorTraceParameter(final RestRequest request, final RestChannel channel) { - // error_trace cannot be used when we disable detailed errors - // we consume the error_trace parameter first to ensure that it is always consumed - return request.paramAsBoolean("error_trace", false) == false || channel.detailedErrorsEnabled(); - } - private void tryAllHandlers(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) throws Exception { for (String key : headersToCopy) { String httpHeader = request.header(key); @@ -301,17 +269,22 @@ public class RestController implements HttpServerTransport.Dispatcher { threadContext.putHeader(key, httpHeader); } } - if (checkErrorTraceParameter(request, channel) == false) { + // error_trace cannot be used when we disable detailed errors + // we consume the error_trace parameter first to ensure that it is always consumed + if (request.paramAsBoolean("error_trace", false) && channel.detailedErrorsEnabled() == false) { channel.sendResponse( BytesRestResponse.createSimpleErrorResponse(channel, BAD_REQUEST, "error traces in responses are disabled.")); return; } + final String rawPath = request.rawPath(); + final String uri = request.uri(); + final RestRequest.Method requestMethod; try { // Resolves the HTTP method and fails if the method is invalid - final RestRequest.Method requestMethod = request.method(); + requestMethod = request.method(); // Loop through all possible handlers, attempting to dispatch the request - Iterator allHandlers = getAllHandlers(request); + Iterator allHandlers = getAllHandlers(request.params(), rawPath); while (allHandlers.hasNext()) { final RestHandler handler; final MethodHandlers handlers = allHandlers.next(); @@ -320,32 +293,41 @@ public class RestController implements HttpServerTransport.Dispatcher { } else { handler = handlers.getHandler(requestMethod); } - if (dispatchRequest(request, channel, client, handler)) { + if (handler == null) { + if (handleNoHandlerFound(rawPath, requestMethod, uri, channel)) { + return; + } + } else if (dispatchRequest(request, channel, handler)) { return; } } } catch (final IllegalArgumentException e) { - handleUnsupportedHttpMethod(request.uri(), null, channel, getValidHandlerMethodSet(request), e); + handleUnsupportedHttpMethod(uri, null, channel, getValidHandlerMethodSet(rawPath), e); return; } // If request has not been handled, fallback to a bad request error. - handleBadRequest(request.uri(), request.method(), channel); + handleBadRequest(uri, requestMethod, channel); } - Iterator getAllHandlers(final RestRequest request) { - // Between retrieving the correct path, we need to reset the parameters, - // otherwise parameters are parsed out of the URI that aren't actually handled. - final Map originalParams = new HashMap<>(request.params()); + Iterator getAllHandlers(@Nullable Map requestParamsRef, String rawPath) { + final Supplier> paramsSupplier; + if (requestParamsRef == null) { + paramsSupplier = () -> null; + } else { + // Between retrieving the correct path, we need to reset the parameters, + // otherwise parameters are parsed out of the URI that aren't actually handled. + final Map originalParams = new HashMap<>(requestParamsRef); + paramsSupplier = () -> { + // PathTrie modifies the request, so reset the params between each iteration + requestParamsRef.clear(); + requestParamsRef.putAll(originalParams); + return requestParamsRef; + }; + } // we use rawPath since we don't want to decode it while processing the path resolution // so we can handle things like: // my_index/my_type/http%3A%2F%2Fwww.google.com - final Map requestParamsRef = request.params(); - return handlers.retrieveAll(request.rawPath(), () -> { - // PathTrie modifies the request, so reset the params between each iteration - requestParamsRef.clear(); - requestParamsRef.putAll(originalParams); - return requestParamsRef; - }); + return handlers.retrieveAll(rawPath, paramsSupplier); } /** @@ -417,9 +399,9 @@ public class RestController implements HttpServerTransport.Dispatcher { /** * Get the valid set of HTTP methods for a REST request. */ - private Set getValidHandlerMethodSet(RestRequest request) { + private Set getValidHandlerMethodSet(String rawPath) { Set validMethods = new HashSet<>(); - Iterator allHandlers = getAllHandlers(request); + Iterator allHandlers = getAllHandlers(null, rawPath); for (Iterator it = allHandlers; it.hasNext(); ) { Optional.ofNullable(it.next()).map(mh -> validMethods.addAll(mh.getValidMethods())); } diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 4fd8515caba..405cf7f68ef 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -40,6 +40,7 @@ import org.elasticsearch.http.HttpRequest; import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -248,13 +249,6 @@ public class RestRequest implements ToXContent.Params { return xContentType.get(); } - /** - * Sets the {@link XContentType} - */ - final void setXContentType(XContentType xContentType) { - this.xContentType.set(xContentType); - } - public HttpChannel getHttpChannel() { return httpChannel; } @@ -294,7 +288,7 @@ public class RestRequest implements ToXContent.Params { * @return the list of currently consumed parameters. */ List consumedParams() { - return consumedParams.stream().collect(Collectors.toList()); + return new ArrayList<>(consumedParams); } /** diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java index 24e26713ed6..613729f8904 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -34,8 +33,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestFieldCapabilitiesAction extends BaseRestHandler { - public RestFieldCapabilitiesAction(Settings settings, RestController controller) { - super(settings); + + public RestFieldCapabilitiesAction(RestController controller) { controller.registerHandler(GET, "/_field_caps", this); controller.registerHandler(POST, "/_field_caps", this); controller.registerHandler(GET, "/{index}/_field_caps", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java b/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java index 9efc8f526f3..e776a6ad08a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.main.MainAction; import org.elasticsearch.action.main.MainRequest; import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -38,8 +37,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; public class RestMainAction extends BaseRestHandler { - public RestMainAction(Settings settings, RestController controller) { - super(settings); + public RestMainAction(RestController controller) { controller.registerHandler(GET, "/", this); controller.registerHandler(HEAD, "/", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java index c0d8c1cf698..018392bb05a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java @@ -19,11 +19,10 @@ package org.elasticsearch.rest.action.admin.cluster; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; +import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -36,8 +35,7 @@ public class RestAddVotingConfigExclusionAction extends BaseRestHandler { private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(30L); - public RestAddVotingConfigExclusionAction(Settings settings, RestController controller) { - super(settings); + public RestAddVotingConfigExclusionAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/_cluster/voting_config_exclusions/{node_name}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCancelTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCancelTasksAction.java index bd94e7351eb..2dd98bfb9a5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCancelTasksAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksReque import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -39,8 +38,7 @@ import static org.elasticsearch.rest.action.admin.cluster.RestListTasksAction.li public class RestCancelTasksAction extends BaseRestHandler { private final Supplier nodesInCluster; - public RestCancelTasksAction(Settings settings, RestController controller, Supplier nodesInCluster) { - super(settings); + public RestCancelTasksAction(RestController controller, Supplier nodesInCluster) { this.nodesInCluster = nodesInCluster; controller.registerHandler(POST, "/_tasks/_cancel", this); controller.registerHandler(POST, "/_tasks/{task_id}/_cancel", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClearVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClearVotingConfigExclusionsAction.java index ee3ec532f82..8d9a2121b81 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClearVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClearVotingConfigExclusionsAction.java @@ -19,10 +19,9 @@ package org.elasticsearch.rest.action.admin.cluster; -import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; +import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -32,8 +31,7 @@ import java.io.IOException; public class RestClearVotingConfigExclusionsAction extends BaseRestHandler { - public RestClearVotingConfigExclusionsAction(Settings settings, RestController controller) { - super(settings); + public RestClearVotingConfigExclusionsAction(RestController controller) { controller.registerHandler(RestRequest.Method.DELETE, "/_cluster/voting_config_exclusions", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterAllocationExplainAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterAllocationExplainAction.java index 504c8f365d7..99d0f00d096 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterAllocationExplainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterAllocationExplainAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequest; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainResponse; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -40,8 +39,8 @@ import java.io.IOException; * Class handling cluster allocation explanation at the REST level */ public class RestClusterAllocationExplainAction extends BaseRestHandler { - public RestClusterAllocationExplainAction(Settings settings, RestController controller) { - super(settings); + + public RestClusterAllocationExplainAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_cluster/allocation/explain", this); controller.registerHandler(RestRequest.Method.POST, "/_cluster/allocation/explain", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java index e1e4d921163..0a4d5b4e897 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java @@ -49,7 +49,6 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { public RestClusterGetSettingsAction(Settings settings, RestController controller, ClusterSettings clusterSettings, SettingsFilter settingsFilter) { - super(settings); this.settings = settings; this.clusterSettings = clusterSettings; controller.registerHandler(RestRequest.Method.GET, "/_cluster/settings", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java index 5e6f98eedd8..63c6bc4c92c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -41,8 +40,7 @@ import static org.elasticsearch.client.Requests.clusterHealthRequest; public class RestClusterHealthAction extends BaseRestHandler { - public RestClusterHealthAction(Settings settings, RestController controller) { - super(settings); + public RestClusterHealthAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_cluster/health", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/health/{index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java index 4b019e98e62..30799cb0552 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java @@ -54,8 +54,7 @@ public class RestClusterRerouteAction extends BaseRestHandler { private final SettingsFilter settingsFilter; - public RestClusterRerouteAction(Settings settings, RestController controller, SettingsFilter settingsFilter) { - super(settings); + public RestClusterRerouteAction(RestController controller, SettingsFilter settingsFilter) { this.settingsFilter = settingsFilter; controller.registerHandler(RestRequest.Method.POST, "/_cluster/reroute", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterSearchShardsAction.java index e761e848023..188691dacf5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterSearchShardsAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Requests; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -36,8 +35,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestClusterSearchShardsAction extends BaseRestHandler { - public RestClusterSearchShardsAction(Settings settings, RestController controller) { - super(settings); + + public RestClusterSearchShardsAction(RestController controller) { controller.registerHandler(GET, "/_search_shards", this); controller.registerHandler(POST, "/_search_shards", this); controller.registerHandler(GET, "/{index}/_search_shards", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStateAction.java index 732fe639865..9838cc26625 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStateAction.java @@ -47,8 +47,7 @@ public class RestClusterStateAction extends BaseRestHandler { private final SettingsFilter settingsFilter; - public RestClusterStateAction(Settings settings, RestController controller, SettingsFilter settingsFilter) { - super(settings); + public RestClusterStateAction(RestController controller, SettingsFilter settingsFilter) { controller.registerHandler(RestRequest.Method.GET, "/_cluster/state", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/state/{metric}", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/state/{metric}/{indices}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java index 830fc3041f9..c7bfc640e76 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -30,8 +29,8 @@ import org.elasticsearch.rest.action.RestActions.NodesResponseRestListener; import java.io.IOException; public class RestClusterStatsAction extends BaseRestHandler { - public RestClusterStatsAction(Settings settings, RestController controller) { - super(settings); + + public RestClusterStatsAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_cluster/stats", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/stats/nodes/{nodeId}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java index 4eb5bbe2a84..35874f12d5f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java @@ -38,8 +38,7 @@ public class RestClusterUpdateSettingsAction extends BaseRestHandler { private static final String PERSISTENT = "persistent"; private static final String TRANSIENT = "transient"; - public RestClusterUpdateSettingsAction(Settings settings, RestController controller) { - super(settings); + public RestClusterUpdateSettingsAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/_cluster/settings", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCreateSnapshotAction.java index bf2866b5771..185a1659738 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCreateSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCreateSnapshotAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -37,8 +36,8 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; * Creates a new snapshot */ public class RestCreateSnapshotAction extends BaseRestHandler { - public RestCreateSnapshotAction(Settings settings, RestController controller) { - super(settings); + + public RestCreateSnapshotAction(RestController controller) { controller.registerHandler(PUT, "/_snapshot/{repository}/{snapshot}", this); controller.registerHandler(POST, "/_snapshot/{repository}/{snapshot}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java index 4b7bb9d8de0..81914718b07 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -36,8 +35,8 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; * Unregisters a repository */ public class RestDeleteRepositoryAction extends BaseRestHandler { - public RestDeleteRepositoryAction(Settings settings, RestController controller) { - super(settings); + + public RestDeleteRepositoryAction(RestController controller) { controller.registerHandler(DELETE, "/_snapshot/{repository}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java index 6b18e9a3d50..81a3ddd31c5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -36,8 +35,8 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; * Deletes a snapshot */ public class RestDeleteSnapshotAction extends BaseRestHandler { - public RestDeleteSnapshotAction(Settings settings, RestController controller) { - super(settings); + + public RestDeleteSnapshotAction(RestController controller) { controller.registerHandler(DELETE, "/_snapshot/{repository}/{snapshot}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java index 2da902df9da..a0258974cfc 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -32,9 +31,7 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteStoredScriptAction extends BaseRestHandler { - public RestDeleteStoredScriptAction(Settings settings, RestController controller) { - super(settings); - + public RestDeleteStoredScriptAction(RestController controller) { controller.registerHandler(DELETE, "/_scripts/{id}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java index 5d8c93ca433..5b39a7e6df3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java @@ -42,8 +42,7 @@ public class RestGetRepositoriesAction extends BaseRestHandler { private final SettingsFilter settingsFilter; - public RestGetRepositoriesAction(Settings settings, RestController controller, SettingsFilter settingsFilter) { - super(settings); + public RestGetRepositoriesAction(RestController controller, SettingsFilter settingsFilter) { controller.registerHandler(GET, "/_snapshot", this); controller.registerHandler(GET, "/_snapshot/{repository}", this); this.settingsFilter = settingsFilter; diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetSnapshotsAction.java index f42180b5029..b873bce0238 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetSnapshotsAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -37,8 +36,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; * Returns information about snapshot */ public class RestGetSnapshotsAction extends BaseRestHandler { - public RestGetSnapshotsAction(Settings settings, RestController controller) { - super(settings); + + public RestGetSnapshotsAction(RestController controller) { controller.registerHandler(GET, "/_snapshot/{repository}/{snapshot}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java index 1a14d505382..f87c6f513b7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -32,9 +31,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetStoredScriptAction extends BaseRestHandler { - public RestGetStoredScriptAction(Settings settings, RestController controller) { - super(settings); - + public RestGetStoredScriptAction(RestController controller) { controller.registerHandler(GET, "/_scripts/{id}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetTaskAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetTaskAction.java index 158cb5bbd6a..f0ca02f43bc 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetTaskAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -34,8 +33,8 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetTaskAction extends BaseRestHandler { - public RestGetTaskAction(Settings settings, RestController controller) { - super(settings); + + public RestGetTaskAction(RestController controller) { controller.registerHandler(GET, "/_tasks/{task_id}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java index ec4058fea9d..4627b853ac7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -49,8 +48,7 @@ public class RestListTasksAction extends BaseRestHandler { private final Supplier nodesInCluster; - public RestListTasksAction(Settings settings, RestController controller, Supplier nodesInCluster) { - super(settings); + public RestListTasksAction(RestController controller, Supplier nodesInCluster) { this.nodesInCluster = nodesInCluster; controller.registerHandler(GET, "/_tasks", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java index be163d0431a..07b85266d9d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsReq import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -38,8 +37,8 @@ import java.io.IOException; public class RestNodesHotThreadsAction extends BaseRestHandler { - public RestNodesHotThreadsAction(Settings settings, RestController controller) { - super(settings); + + public RestNodesHotThreadsAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_cluster/nodes/hotthreads", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/nodes/hot_threads", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/nodes/{nodeId}/hotthreads", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java index 20370b27d43..ab39c4408f2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java @@ -50,8 +50,7 @@ public class RestNodesInfoAction extends BaseRestHandler { private final SettingsFilter settingsFilter; - public RestNodesInfoAction(Settings settings, RestController controller, SettingsFilter settingsFilter) { - super(settings); + public RestNodesInfoAction(RestController controller, SettingsFilter settingsFilter) { controller.registerHandler(GET, "/_nodes", this); // this endpoint is used for metrics, not for node IDs, like /_nodes/fs controller.registerHandler(GET, "/_nodes/{nodeId}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java index 14c8655e48c..f3ef2a55d25 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -42,8 +41,8 @@ import java.util.function.Consumer; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestNodesStatsAction extends BaseRestHandler { - public RestNodesStatsAction(Settings settings, RestController controller) { - super(settings); + + public RestNodesStatsAction(RestController controller) { controller.registerHandler(GET, "/_nodes/stats", this); controller.registerHandler(GET, "/_nodes/{nodeId}/stats", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesUsageAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesUsageAction.java index dc6449862a8..f1a3b15d8b6 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesUsageAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesUsageAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -44,8 +43,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestNodesUsageAction extends BaseRestHandler { @Inject - public RestNodesUsageAction(Settings settings, RestController controller) { - super(settings); + public RestNodesUsageAction(RestController controller) { controller.registerHandler(GET, "/_nodes/usage", this); controller.registerHandler(GET, "/_nodes/{nodeId}/usage", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java index ab2beda1d03..dbf03e35130 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -30,8 +29,8 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; public class RestPendingClusterTasksAction extends BaseRestHandler { - public RestPendingClusterTasksAction(Settings settings, RestController controller) { - super(settings); + + public RestPendingClusterTasksAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_cluster/pending_tasks", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java index 62cc06cc404..150296d65ae 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -38,8 +37,8 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; * Registers repositories */ public class RestPutRepositoryAction extends BaseRestHandler { - public RestPutRepositoryAction(Settings settings, RestController controller) { - super(settings); + + public RestPutRepositoryAction(RestController controller) { controller.registerHandler(PUT, "/_snapshot/{repository}", this); controller.registerHandler(POST, "/_snapshot/{repository}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java index 4a4530b6fa3..2e316abc142 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -36,9 +35,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutStoredScriptAction extends BaseRestHandler { - public RestPutStoredScriptAction(Settings settings, RestController controller) { - super(settings); - + public RestPutStoredScriptAction(RestController controller) { controller.registerHandler(POST, "/_scripts/{id}", this); controller.registerHandler(PUT, "/_scripts/{id}", this); controller.registerHandler(POST, "/_scripts/{id}/{context}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java index 2251615d678..cb21c7e30da 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java @@ -19,12 +19,10 @@ package org.elasticsearch.rest.action.admin.cluster; -import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequest; import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -41,8 +39,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public final class RestReloadSecureSettingsAction extends BaseRestHandler { - public RestReloadSecureSettingsAction(Settings settings, RestController controller) { - super(settings); + public RestReloadSecureSettingsAction(RestController controller) { controller.registerHandler(POST, "/_nodes/reload_secure_settings", this); controller.registerHandler(POST, "/_nodes/{nodeId}/reload_secure_settings", this); } @@ -60,7 +57,6 @@ public final class RestReloadSecureSettingsAction extends BaseRestHandler { .prepareReloadSecureSettings() .setTimeout(request.param("timeout")) .setNodesIds(nodesIds); - final NodesReloadSecureSettingsRequest nodesRequest = nodesRequestBuilder.request(); return channel -> nodesRequestBuilder .execute(new RestBuilderListener(channel) { @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java index e879f7f4116..a49f6d1319b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java @@ -22,20 +22,16 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.remote.RemoteInfoAction; import org.elasticsearch.action.admin.cluster.remote.RemoteInfoRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.GET; public final class RestRemoteClusterInfoAction extends BaseRestHandler { - public RestRemoteClusterInfoAction(Settings settings, RestController controller) { - super(settings); + public RestRemoteClusterInfoAction(RestController controller) { controller.registerHandler(GET, "_remote/info", this); } @@ -45,9 +41,10 @@ public final class RestRemoteClusterInfoAction extends BaseRestHandler { } @Override - public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { return channel -> client.execute(RemoteInfoAction.INSTANCE, new RemoteInfoRequest(), new RestToXContentListener<>(channel)); } + @Override public boolean canTripCircuitBreaker() { return false; diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java index 2bd077037ec..fb47e928ab8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -36,8 +35,8 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; * Restores a snapshot */ public class RestRestoreSnapshotAction extends BaseRestHandler { - public RestRestoreSnapshotAction(Settings settings, RestController controller) { - super(settings); + + public RestRestoreSnapshotAction(RestController controller) { controller.registerHandler(POST, "/_snapshot/{repository}/{snapshot}/_restore", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java index a06a916420e..f8347d38cd5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -37,8 +36,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; * Returns status of currently running snapshot */ public class RestSnapshotsStatusAction extends BaseRestHandler { - public RestSnapshotsStatusAction(Settings settings, RestController controller) { - super(settings); + + public RestSnapshotsStatusAction(RestController controller) { controller.registerHandler(GET, "/_snapshot/{repository}/{snapshot}/_status", this); controller.registerHandler(GET, "/_snapshot/{repository}/_status", this); controller.registerHandler(GET, "/_snapshot/_status", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java index 92debec6bf8..be69e73275c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -33,8 +32,8 @@ import static org.elasticsearch.client.Requests.verifyRepositoryRequest; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestVerifyRepositoryAction extends BaseRestHandler { - public RestVerifyRepositoryAction(Settings settings, RestController controller) { - super(settings); + + public RestVerifyRepositoryAction(RestController controller) { controller.registerHandler(POST, "/_snapshot/{repository}/_verify", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java index 99c85981068..af9bfbfe595 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -47,8 +46,7 @@ public class RestAnalyzeAction extends BaseRestHandler { public static final ParseField NORMALIZER = new ParseField("normalizer"); } - public RestAnalyzeAction(Settings settings, RestController controller) { - super(settings); + public RestAnalyzeAction(RestController controller) { controller.registerHandler(GET, "/_analyze", this); controller.registerHandler(GET, "/{index}/_analyze", this); controller.registerHandler(POST, "/_analyze", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java index 38b9d987d04..bc2fed911b3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheReque import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -35,8 +34,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestClearIndicesCacheAction extends BaseRestHandler { - public RestClearIndicesCacheAction(Settings settings, RestController controller) { - super(settings); + public RestClearIndicesCacheAction(RestController controller) { controller.registerHandler(POST, "/_cache/clear", this); controller.registerHandler(POST, "/{index}/_cache/clear", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java index 3ee2687eb72..3f26b197837 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -33,8 +32,8 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; public class RestCloseIndexAction extends BaseRestHandler { - public RestCloseIndexAction(Settings settings, RestController controller) { - super(settings); + + public RestCloseIndexAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/_close", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_close", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java index 527b0830c80..4b5ca382e08 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.mapper.MapperService; @@ -44,8 +43,7 @@ public class RestCreateIndexAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in create " + "index requests is deprecated. The parameter will be removed in the next major version."; - public RestCreateIndexAction(Settings settings, RestController controller) { - super(settings); + public RestCreateIndexAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/{index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java index f6c4c178572..b0d02cd19b7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -32,8 +31,8 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; public class RestDeleteIndexAction extends BaseRestHandler { - public RestDeleteIndexAction(Settings settings, RestController controller) { - super(settings); + + public RestDeleteIndexAction(RestController controller) { controller.registerHandler(RestRequest.Method.DELETE, "/", this); controller.registerHandler(RestRequest.Method.DELETE, "/{index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java index 114e043e2e1..c0679bb3562 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -29,8 +28,8 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; public class RestDeleteIndexTemplateAction extends BaseRestHandler { - public RestDeleteIndexTemplateAction(Settings settings, RestController controller) { - super(settings); + + public RestDeleteIndexTemplateAction(RestController controller) { controller.registerHandler(RestRequest.Method.DELETE, "/_template/{name}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestFlushAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestFlushAction.java index 4879a54f4fe..f9d614e9b99 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestFlushAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestFlushAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -35,8 +34,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestFlushAction extends BaseRestHandler { - public RestFlushAction(Settings settings, RestController controller) { - super(settings); + + public RestFlushAction(RestController controller) { controller.registerHandler(POST, "/_flush", this); controller.registerHandler(POST, "/{index}/_flush", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java index 13a826300b5..e2876e7cf02 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -39,8 +38,7 @@ public class RestForceMergeAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestForceMergeAction.class)); - public RestForceMergeAction(final Settings settings, final RestController controller) { - super(settings); + public RestForceMergeAction(final RestController controller) { controller.registerHandler(POST, "/_forcemerge", this); controller.registerHandler(POST, "/{index}/_forcemerge", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java index 8cdf9e62b10..3379ab00957 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -56,8 +55,7 @@ import static org.elasticsearch.rest.RestRequest.Method.HEAD; */ public class RestGetAliasesAction extends BaseRestHandler { - public RestGetAliasesAction(final Settings settings, final RestController controller) { - super(settings); + public RestGetAliasesAction(final RestController controller) { controller.registerHandler(GET, "/_alias", this); controller.registerHandler(GET, "/_aliases", this); controller.registerHandler(GET, "/_alias/{name}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index 09a59d44f97..27d8c2f8a8c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -51,8 +50,7 @@ public class RestGetFieldMappingAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get " + "field mapping requests is deprecated. The parameter will be removed in the next major version."; - public RestGetFieldMappingAction(Settings settings, RestController controller) { - super(settings); + public RestGetFieldMappingAction(RestController controller) { controller.registerHandler(GET, "/_mapping/field/{fields}", this); controller.registerHandler(GET, "/_mapping/{type}/field/{fields}", this); controller.registerHandler(GET, "/{index}/_mapping/field/{fields}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java index 707378eec4c..3a66d7a0129 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java @@ -54,8 +54,7 @@ public class RestGetIndexTemplateAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying include_type_name in get index template requests is deprecated."; - public RestGetIndexTemplateAction(final Settings settings, final RestController controller) { - super(settings); + public RestGetIndexTemplateAction(final RestController controller) { controller.registerHandler(GET, "/_template", this); controller.registerHandler(GET, "/_template/{name}", this); controller.registerHandler(HEAD, "/_template/{name}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java index 6649863e852..64754260ebf 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -54,10 +54,7 @@ public class RestGetIndicesAction extends BaseRestHandler { .unmodifiableSet(Stream.concat(Collections.singleton(INCLUDE_TYPE_NAME_PARAMETER).stream(), Settings.FORMAT_PARAMS.stream()) .collect(Collectors.toSet())); - public RestGetIndicesAction( - final Settings settings, - final RestController controller) { - super(settings); + public RestGetIndicesAction(final RestController controller) { controller.registerHandler(GET, "/{index}", this); controller.registerHandler(HEAD, "/{index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 74f451ab30c..051a1199e74 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.indices.TypeMissingException; @@ -63,8 +62,7 @@ public class RestGetMappingAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get" + " mapping requests is deprecated. The parameter will be removed in the next major version."; - public RestGetMappingAction(final Settings settings, final RestController controller) { - super(settings); + public RestGetMappingAction(final RestController controller) { controller.registerHandler(GET, "/_mapping", this); controller.registerHandler(GET, "/_mappings", this); controller.registerHandler(GET, "/{index}/{type}/_mapping", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java index 6dead806042..c7b9cc485d3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -35,8 +34,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetSettingsAction extends BaseRestHandler { - public RestGetSettingsAction(Settings settings, RestController controller) { - super(settings); + public RestGetSettingsAction(RestController controller) { controller.registerHandler(GET, "/_settings", this); controller.registerHandler(GET, "/_settings/{name}", this); controller.registerHandler(GET, "/{index}/_settings", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java index 270a9c8fdc4..91442bb2fb9 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -33,8 +32,8 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestIndexDeleteAliasesAction extends BaseRestHandler { - public RestIndexDeleteAliasesAction(Settings settings, RestController controller) { - super(settings); + + public RestIndexDeleteAliasesAction(RestController controller) { controller.registerHandler(DELETE, "/{index}/_alias/{name}", this); controller.registerHandler(DELETE, "/{index}/_aliases/{name}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java index 6091994db96..3c51c9111f2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -36,8 +35,8 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestIndexPutAliasAction extends BaseRestHandler { - public RestIndexPutAliasAction(Settings settings, RestController controller) { - super(settings); + + public RestIndexPutAliasAction(RestController controller) { controller.registerHandler(PUT, "/{index}/_alias/{name}", this); controller.registerHandler(PUT, "/_alias/{name}", this); controller.registerHandler(PUT, "/{index}/_aliases/{name}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java index fe4155f8cb2..d305c5ad3ca 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -39,8 +38,7 @@ public class RestIndicesAliasesAction extends BaseRestHandler { return "indices_aliases_action"; } - public RestIndicesAliasesAction(Settings settings, RestController controller) { - super(settings); + public RestIndicesAliasesAction(RestController controller) { controller.registerHandler(POST, "/_aliases", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesSegmentsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesSegmentsAction.java index 1beec61e6dd..ed76c36dac3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesSegmentsAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -34,8 +33,8 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestIndicesSegmentsAction extends BaseRestHandler { - public RestIndicesSegmentsAction(Settings settings, RestController controller) { - super(settings); + + public RestIndicesSegmentsAction(RestController controller) { controller.registerHandler(GET, "/_segments", this); controller.registerHandler(GET, "/{index}/_segments", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesShardStoresAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesShardStoresAction.java index a498557ab37..e214e7bbb34 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesShardStoresAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesShardStoresAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -43,8 +42,8 @@ import static org.elasticsearch.rest.RestStatus.OK; * Rest action for {@link IndicesShardStoresAction} */ public class RestIndicesShardStoresAction extends BaseRestHandler { - public RestIndicesShardStoresAction(Settings settings, RestController controller) { - super(settings); + + public RestIndicesShardStoresAction(RestController controller) { controller.registerHandler(GET, "/_shard_stores", this); controller.registerHandler(GET, "/{index}/_shard_stores", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java index c2d16ce5ac6..6ef806a43a5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -43,8 +42,8 @@ import java.util.function.Consumer; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestIndicesStatsAction extends BaseRestHandler { - public RestIndicesStatsAction(Settings settings, RestController controller) { - super(settings); + + public RestIndicesStatsAction(RestController controller) { controller.registerHandler(GET, "/_stats", this); controller.registerHandler(GET, "/_stats/{metric}", this); controller.registerHandler(GET, "/{index}/_stats", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java index fcf2422be7c..8eb5c3a74bd 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -33,8 +32,8 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; public class RestOpenIndexAction extends BaseRestHandler { - public RestOpenIndexAction(Settings settings, RestController controller) { - super(settings); + + public RestOpenIndexAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/_open", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_open", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index bb0f282639b..f539b85b556 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateReque import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -44,8 +43,7 @@ public class RestPutIndexTemplateAction extends BaseRestHandler { " Specifying include_type_name in put index template requests is deprecated."+ " The parameter will be removed in the next major version."; - public RestPutIndexTemplateAction(Settings settings, RestController controller) { - super(settings); + public RestPutIndexTemplateAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/_template/{name}", this); controller.registerHandler(RestRequest.Method.POST, "/_template/{name}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java index f5e760d54ed..149758c60b4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; @@ -47,8 +46,7 @@ public class RestPutMappingAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in put " + "mapping requests is deprecated. The parameter will be removed in the next major version."; - public RestPutMappingAction(Settings settings, RestController controller) { - super(settings); + public RestPutMappingAction(RestController controller) { controller.registerHandler(PUT, "/{index}/_mapping/", this); controller.registerHandler(PUT, "/{index}/{type}/_mapping", this); controller.registerHandler(PUT, "/{index}/_mapping/{type}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRecoveryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRecoveryAction.java index b445cb3a676..38325a17f48 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRecoveryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRecoveryAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -37,8 +36,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; * REST handler to report on index recoveries. */ public class RestRecoveryAction extends BaseRestHandler { - public RestRecoveryAction(Settings settings, RestController controller) { - super(settings); + + public RestRecoveryAction(RestController controller) { controller.registerHandler(GET, "/_recovery", this); controller.registerHandler(GET, "/{index}/_recovery", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java index 1f0f81e0285..5e89e16d068 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -37,8 +36,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestRefreshAction extends BaseRestHandler { - public RestRefreshAction(Settings settings, RestController controller) { - super(settings); + + public RestRefreshAction(RestController controller) { controller.registerHandler(POST, "/_refresh", this); controller.registerHandler(POST, "/{index}/_refresh", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java index 15fd4b663e2..ad7e169b007 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -40,8 +39,7 @@ public abstract class RestResizeHandler extends BaseRestHandler { private static final Logger logger = LogManager.getLogger(RestResizeHandler.class); private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); - RestResizeHandler(final Settings settings) { - super(settings); + RestResizeHandler() { } @Override @@ -80,8 +78,7 @@ public abstract class RestResizeHandler extends BaseRestHandler { public static class RestShrinkIndexAction extends RestResizeHandler { - public RestShrinkIndexAction(final Settings settings, final RestController controller) { - super(settings); + public RestShrinkIndexAction(final RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/{index}/_shrink/{target}", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_shrink/{target}", this); } @@ -100,8 +97,7 @@ public abstract class RestResizeHandler extends BaseRestHandler { public static class RestSplitIndexAction extends RestResizeHandler { - public RestSplitIndexAction(final Settings settings, final RestController controller) { - super(settings); + public RestSplitIndexAction(final RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/{index}/_split/{target}", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_split/{target}", this); } @@ -120,8 +116,7 @@ public abstract class RestResizeHandler extends BaseRestHandler { public static class RestCloneIndexAction extends RestResizeHandler { - public RestCloneIndexAction(final Settings settings, final RestController controller) { - super(settings); + public RestCloneIndexAction(final RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/{index}/_clone/{target}", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_clone/{target}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index f79d3247e64..13ec16b7e54 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -37,8 +36,8 @@ public class RestRolloverIndexAction extends BaseRestHandler { LogManager.getLogger(RestRolloverIndexAction.class)); public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in rollover " + "index requests is deprecated. The parameter will be removed in the next major version."; - public RestRolloverIndexAction(Settings settings, RestController controller) { - super(settings); + + public RestRolloverIndexAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/{index}/_rollover", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_rollover/{new_index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java index 4824fe4a842..53cbb5c6d10 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -39,8 +38,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestSyncedFlushAction extends BaseRestHandler { - public RestSyncedFlushAction(Settings settings, RestController controller) { - super(settings); + + public RestSyncedFlushAction(RestController controller) { controller.registerHandler(POST, "/_flush/synced", this); controller.registerHandler(POST, "/{index}/_flush/synced", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java index b06314aeb1a..e26bf9f318e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java @@ -36,8 +36,7 @@ import static org.elasticsearch.client.Requests.updateSettingsRequest; public class RestUpdateSettingsAction extends BaseRestHandler { - public RestUpdateSettingsAction(Settings settings, RestController controller) { - super(settings); + public RestUpdateSettingsAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/{index}/_settings", this); controller.registerHandler(RestRequest.Method.PUT, "/_settings", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeAction.java index 6ed39c70efd..77f2d4f7b86 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -34,8 +33,8 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestUpgradeAction extends BaseRestHandler { - public RestUpgradeAction(Settings settings, RestController controller) { - super(settings); + + public RestUpgradeAction(RestController controller) { controller.registerHandler(POST, "/_upgrade", this); controller.registerHandler(POST, "/{index}/_upgrade", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeStatusAction.java index 1b21e125cdc..a560c19daf7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeStatusAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -35,8 +34,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestUpgradeStatusAction extends BaseRestHandler { - public RestUpgradeStatusAction(Settings settings, RestController controller) { - super(settings); + public RestUpgradeStatusAction(RestController controller) { controller.registerHandler(GET, "/_upgrade", this); controller.registerHandler(GET, "/{index}/_upgrade", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java index 327706dc60a..39e9b3fa852 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -50,8 +49,7 @@ public class RestValidateQueryAction extends BaseRestHandler { static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in validate query requests is deprecated."; - public RestValidateQueryAction(Settings settings, RestController controller) { - super(settings); + public RestValidateQueryAction(RestController controller) { controller.registerHandler(GET, "/_validate/query", this); controller.registerHandler(POST, "/_validate/query", this); controller.registerHandler(GET, "/{index}/_validate/query", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java index 58dc861126b..2f1ed2b4998 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.UTF8StreamWriter; import org.elasticsearch.common.io.stream.BytesStream; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; @@ -40,10 +39,6 @@ import static org.elasticsearch.rest.action.cat.RestTable.pad; public abstract class AbstractCatAction extends BaseRestHandler { - public AbstractCatAction(Settings settings) { - super(settings); - } - protected abstract RestChannelConsumer doCatRequest(RestRequest request, NodeClient client); protected abstract void documentation(StringBuilder sb); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java index 774a603cb4b..b46ad80e338 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java @@ -19,14 +19,12 @@ package org.elasticsearch.rest.action.cat; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -37,8 +35,8 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestAliasAction extends AbstractCatAction { - public RestAliasAction(Settings settings, RestController controller) { - super(settings); + + public RestAliasAction(RestController controller) { controller.registerHandler(GET, "/_cat/aliases", this); controller.registerHandler(GET, "/_cat/aliases/{alias}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java index 60b76269ebc..1f30fb917ea 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.cat; import com.carrotsearch.hppc.ObjectIntScatterMap; - import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -32,7 +31,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -44,8 +42,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestAllocationAction extends AbstractCatAction { - public RestAllocationAction(Settings settings, RestController controller) { - super(settings); + + public RestAllocationAction(RestController controller) { controller.registerHandler(GET, "/_cat/allocation", this); controller.registerHandler(GET, "/_cat/allocation/{nodes}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java index d52449ea66f..fe5f7b8127c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.cat; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -40,8 +39,7 @@ public class RestCatAction extends BaseRestHandler { private final String HELP; @Inject - public RestCatAction(Settings settings, RestController controller, List catActions) { - super(settings); + public RestCatAction(RestController controller, List catActions) { controller.registerHandler(GET, "/_cat", this); StringBuilder sb = new StringBuilder(); sb.append(CAT_NL); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatRecoveryAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatRecoveryAction.java index 0cea93e4e7e..0f3e8812300 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatRecoveryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatRecoveryAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentElasticsearchExtension; import org.elasticsearch.indices.recovery.RecoveryState; @@ -49,8 +48,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; * be specified to limit output to a particular index or indices. */ public class RestCatRecoveryAction extends AbstractCatAction { - public RestCatRecoveryAction(Settings settings, RestController restController) { - super(settings); + + public RestCatRecoveryAction(RestController restController) { restController.registerHandler(GET, "/_cat/recovery", this); restController.registerHandler(GET, "/_cat/recovery/{index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java index 7dd0758c008..7b7742972ea 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -40,8 +39,7 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestCountAction extends AbstractCatAction { - public RestCountAction(Settings settings, RestController restController) { - super(settings); + public RestCountAction(RestController restController) { restController.registerHandler(GET, "/_cat/count", this); restController.registerHandler(GET, "/_cat/count/{index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java index 120ea603271..2eb83151953 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java @@ -20,13 +20,11 @@ package org.elasticsearch.rest.action.cat; import com.carrotsearch.hppc.cursors.ObjectLongCursor; - import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -39,8 +37,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; * Cat API class to display information about the size of fielddata fields per node */ public class RestFielddataAction extends AbstractCatAction { - public RestFielddataAction(Settings settings, RestController controller) { - super(settings); + + public RestFielddataAction(RestController controller) { controller.registerHandler(GET, "/_cat/fielddata", this); controller.registerHandler(GET, "/_cat/fielddata/{fields}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestHealthAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestHealthAction.java index d7e72f207ff..5e896f210f6 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestHealthAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestHealthAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -34,8 +33,8 @@ import java.util.Locale; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestHealthAction extends AbstractCatAction { - public RestHealthAction(Settings settings, RestController controller) { - super(settings); + + public RestHealthAction(RestController controller) { controller.registerHandler(GET, "/_cat/health", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 32fb62b8203..e452eef2c31 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -69,8 +69,7 @@ public class RestIndicesAction extends AbstractCatAction { private static final DateFormatter STRICT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_time"); - public RestIndicesAction(Settings settings, RestController controller) { - super(settings); + public RestIndicesAction(RestController controller) { controller.registerHandler(GET, "/_cat/indices", this); controller.registerHandler(GET, "/_cat/indices/{index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java index d1753aa868f..e50865af019 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -34,8 +33,8 @@ import org.elasticsearch.rest.action.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestMasterAction extends AbstractCatAction { - public RestMasterAction(Settings settings, RestController controller) { - super(settings); + + public RestMasterAction(RestController controller) { controller.registerHandler(GET, "/_cat/master", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java index 2e396ac59b7..efae10a6aa2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -41,8 +40,8 @@ import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestNodeAttrsAction extends AbstractCatAction { - public RestNodeAttrsAction(Settings settings, RestController controller) { - super(settings); + + public RestNodeAttrsAction(RestController controller) { controller.registerHandler(GET, "/_cat/nodeattrs", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index c28057cbebc..aa780f104c2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.http.HttpInfo; @@ -68,8 +67,8 @@ import java.util.stream.Collectors; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestNodesAction extends AbstractCatAction { - public RestNodesAction(Settings settings, RestController controller) { - super(settings); + + public RestNodesAction(RestController controller) { controller.registerHandler(GET, "/_cat/nodes", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java index 21a3c0c3c82..84bd8798153 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -33,8 +32,8 @@ import org.elasticsearch.rest.action.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestPendingClusterTasksAction extends AbstractCatAction { - public RestPendingClusterTasksAction(Settings settings, RestController controller) { - super(settings); + + public RestPendingClusterTasksAction(RestController controller) { controller.registerHandler(GET, "/_cat/pending_tasks", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java index f61a5fdeb7e..eb6a6fc4b8f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -39,8 +38,8 @@ import org.elasticsearch.rest.action.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestPluginsAction extends AbstractCatAction { - public RestPluginsAction(Settings settings, RestController controller) { - super(settings); + + public RestPluginsAction(RestController controller) { controller.registerHandler(GET, "/_cat/plugins", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestRepositoriesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestRepositoriesAction.java index 42cb904f2f0..f77f9bf100b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestRepositoriesAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRe import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -36,8 +35,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; * Cat API class to display information about snapshot repositories */ public class RestRepositoriesAction extends AbstractCatAction { - public RestRepositoriesAction(Settings settings, RestController controller) { - super(settings); + + public RestRepositoriesAction(RestController controller) { controller.registerHandler(GET, "/_cat/repositories", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java index 0d84549f145..c51fa593f3b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -44,8 +43,8 @@ import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestSegmentsAction extends AbstractCatAction { - public RestSegmentsAction(Settings settings, RestController controller) { - super(settings); + + public RestSegmentsAction(RestController controller) { controller.registerHandler(GET, "/_cat/segments", this); controller.registerHandler(GET, "/_cat/segments/{index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index ae751475ce5..847b1a773e5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.engine.CommitStats; @@ -60,8 +59,8 @@ import java.util.function.Function; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestShardsAction extends AbstractCatAction { - public RestShardsAction(Settings settings, RestController controller) { - super(settings); + + public RestShardsAction(RestController controller) { controller.registerHandler(GET, "/_cat/shards", this); controller.registerHandler(GET, "/_cat/shards/{index}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java index 22258ce2d88..2e28665fa93 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestController; @@ -44,8 +43,8 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; * Cat API class to display information about snapshots */ public class RestSnapshotAction extends AbstractCatAction { - public RestSnapshotAction(Settings settings, RestController controller) { - super(settings); + + public RestSnapshotAction(RestController controller) { controller.registerHandler(GET, "/_cat/snapshots", this); controller.registerHandler(GET, "/_cat/snapshots/{repository}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java index 573eac6c049..b6d5eb549bf 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestController; @@ -51,8 +50,7 @@ import static org.elasticsearch.rest.action.admin.cluster.RestListTasksAction.ge public class RestTasksAction extends AbstractCatAction { private final Supplier nodesInCluster; - public RestTasksAction(Settings settings, RestController controller, Supplier nodesInCluster) { - super(settings); + public RestTasksAction(RestController controller, Supplier nodesInCluster) { controller.registerHandler(GET, "/_cat/tasks", this); this.nodesInCluster = nodesInCluster; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java index b6b63348882..c8da4648e2a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.cat; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.node.NodeClient; @@ -28,7 +27,6 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Table; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -37,8 +35,8 @@ import org.elasticsearch.rest.action.RestResponseListener; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestTemplatesAction extends AbstractCatAction { - public RestTemplatesAction(Settings settings, RestController controller) { - super(settings); + + public RestTemplatesAction(RestController controller) { controller.registerHandler(GET, "/_cat/templates", this); controller.registerHandler(GET, "/_cat/templates/{name}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java index e420dfb9843..571767affd0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java @@ -32,7 +32,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Table; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -51,8 +50,8 @@ import java.util.TreeMap; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestThreadPoolAction extends AbstractCatAction { - public RestThreadPoolAction(Settings settings, RestController controller) { - super(settings); + + public RestThreadPoolAction(RestController controller) { controller.registerHandler(GET, "/_cat/thread_pool", this); controller.registerHandler(GET, "/_cat/thread_pool/{thread_pool_patterns}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index 08ddbb728c1..33fd1d46b3e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -57,7 +57,6 @@ public class RestBulkAction extends BaseRestHandler { " Specifying types in bulk requests is deprecated."; public RestBulkAction(Settings settings, RestController controller) { - super(settings); controller.registerHandler(POST, "/_bulk", this); controller.registerHandler(PUT, "/_bulk", this); controller.registerHandler(POST, "/{index}/_bulk", this); @@ -83,7 +82,7 @@ public class RestBulkAction extends BaseRestHandler { if (defaultType == null) { defaultType = MapperService.SINGLE_MAPPING_NAME; } else { - deprecationLogger.deprecatedAndMaybeLog("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE); + deprecationLogger.deprecatedAndMaybeLog("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE); } String defaultRouting = request.param("routing"); FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java index 5100b3d960b..479e1eb73dc 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -42,8 +41,7 @@ public class RestDeleteAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + "document index requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; - public RestDeleteAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteAction(RestController controller) { controller.registerHandler(DELETE, "/{index}/_doc/{id}", this); // Deprecated typed endpoint. diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index 6bef519078a..100d4cc6531 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -48,8 +47,7 @@ public class RestGetAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; - public RestGetAction(final Settings settings, final RestController controller) { - super(settings); + public RestGetAction(final RestController controller) { controller.registerHandler(GET, "/{index}/_doc/{id}", this); controller.registerHandler(HEAD, "/{index}/_doc/{id}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java index af376bf7c3c..a45c30d0ad1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.rest.BaseRestHandler; @@ -55,8 +54,7 @@ public class RestGetSourceAction extends BaseRestHandler { static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get_source and exist_source" + "requests is deprecated."; - public RestGetSourceAction(final Settings settings, final RestController controller) { - super(settings); + public RestGetSourceAction(final RestController controller) { controller.registerHandler(GET, "/{index}/_source/{id}", this); controller.registerHandler(HEAD, "/{index}/_source/{id}", this); controller.registerHandler(GET, "/{index}/{type}/{id}/_source", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index adaa4a46fa2..80794be9827 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; @@ -46,13 +45,12 @@ public class RestIndexAction extends BaseRestHandler { "index requests is deprecated, use the typeless endpoints instead (/{index}/_doc/{id}, /{index}/_doc, " + "or /{index}/_create/{id})."; - public RestIndexAction(Settings settings, RestController controller) { - super(settings); + public RestIndexAction(RestController controller) { controller.registerHandler(POST, "/{index}/_doc", this); // auto id creation controller.registerHandler(PUT, "/{index}/_doc/{id}", this); controller.registerHandler(POST, "/{index}/_doc/{id}", this); - CreateHandler createHandler = new CreateHandler(settings); + CreateHandler createHandler = new CreateHandler(); controller.registerHandler(PUT, "/{index}/_create/{id}", createHandler); controller.registerHandler(POST, "/{index}/_create/{id}/", createHandler); @@ -70,8 +68,7 @@ public class RestIndexAction extends BaseRestHandler { } final class CreateHandler extends BaseRestHandler { - protected CreateHandler(Settings settings) { - super(settings); + protected CreateHandler() { } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java index e466eaf4ee9..a7ef8e60fb0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java @@ -46,7 +46,6 @@ public class RestMultiGetAction extends BaseRestHandler { private final boolean allowExplicitIndex; public RestMultiGetAction(Settings settings, RestController controller) { - super(settings); controller.registerHandler(GET, "/_mget", this); controller.registerHandler(POST, "/_mget", this); controller.registerHandler(GET, "/{index}/_mget", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java index bdb00526982..5322973d50e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -43,8 +42,7 @@ public class RestMultiTermVectorsAction extends BaseRestHandler { static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + "Specifying types in multi term vector requests is deprecated."; - public RestMultiTermVectorsAction(Settings settings, RestController controller) { - super(settings); + public RestMultiTermVectorsAction(RestController controller) { controller.registerHandler(GET, "/_mtermvectors", this); controller.registerHandler(POST, "/_mtermvectors", this); controller.registerHandler(GET, "/{index}/_mtermvectors", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java index 85ddd3b58ed..70455116de9 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.MapperService; @@ -51,8 +50,7 @@ public class RestTermVectorsAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + "Specifying types in term vector requests is deprecated."; - public RestTermVectorsAction(Settings settings, RestController controller) { - super(settings); + public RestTermVectorsAction(RestController controller) { controller.registerHandler(GET, "/{index}/_termvectors", this); controller.registerHandler(POST, "/{index}/_termvectors", this); controller.registerHandler(GET, "/{index}/_termvectors/{id}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index e89d257aefb..1409d7c3efa 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -45,8 +44,7 @@ public class RestUpdateAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + "document update requests is deprecated, use the endpoint /{index}/_update/{id} instead."; - public RestUpdateAction(Settings settings, RestController controller) { - super(settings); + public RestUpdateAction(RestController controller) { controller.registerHandler(POST, "/{index}/_update/{id}", this); // Deprecated typed endpoint. diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index 8fca614f63c..dea61df609a 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.ingest; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -30,8 +29,7 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; public class RestDeletePipelineAction extends BaseRestHandler { - public RestDeletePipelineAction(Settings settings, RestController controller) { - super(settings); + public RestDeletePipelineAction(RestController controller) { controller.registerHandler(RestRequest.Method.DELETE, "/_ingest/pipeline/{id}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index 92330d5bc45..751b9ee6889 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest.action.ingest; import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -31,8 +30,8 @@ import org.elasticsearch.rest.action.RestStatusToXContentListener; import java.io.IOException; public class RestGetPipelineAction extends BaseRestHandler { - public RestGetPipelineAction(Settings settings, RestController controller) { - super(settings); + + public RestGetPipelineAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline", this); controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index 9cd66c8c9e4..3b1861c14f5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -34,8 +33,8 @@ import java.io.IOException; public class RestPutPipelineAction extends BaseRestHandler { - public RestPutPipelineAction(Settings settings, RestController controller) { - super(settings); + + public RestPutPipelineAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/_ingest/pipeline/{id}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java index c5f01e25a9c..6b8d6c3ccc4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -33,8 +32,8 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; public class RestSimulatePipelineAction extends BaseRestHandler { - public RestSimulatePipelineAction(Settings settings, RestController controller) { - super(settings); + + public RestSimulatePipelineAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/{id}/_simulate", this); controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}/_simulate", this); controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java index d9f746a029c..a8c69867a57 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest.action.search; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -34,9 +33,7 @@ import java.util.Arrays; import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestClearScrollAction extends BaseRestHandler { - public RestClearScrollAction(Settings settings, RestController controller) { - super(settings); - + public RestClearScrollAction(RestController controller) { controller.registerHandler(DELETE, "/_search/scroll", this); controller.registerHandler(DELETE, "/_search/scroll/{scroll_id}", this); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index ecdd34ca07c..912d2f531ae 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -51,8 +50,7 @@ public class RestCountAction extends BaseRestHandler { static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in count requests is deprecated."; - public RestCountAction(Settings settings, RestController controller) { - super(settings); + public RestCountAction(RestController controller) { controller.registerHandler(POST, "/_count", this); controller.registerHandler(GET, "/_count", this); controller.registerHandler(POST, "/{index}/_count", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index 2bcc8a5a794..bcaea4e65f0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -47,8 +46,7 @@ public class RestExplainAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + "Specifying a type in explain requests is deprecated."; - public RestExplainAction(Settings settings, RestController controller) { - super(settings); + public RestExplainAction(RestController controller) { controller.registerHandler(GET, "/{index}/_explain/{id}", this); controller.registerHandler(POST, "/{index}/_explain/{id}", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 5aec63ccb9f..adc9c31aafc 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -67,7 +67,6 @@ public class RestMultiSearchAction extends BaseRestHandler { private final boolean allowExplicitIndex; public RestMultiSearchAction(Settings settings, RestController controller) { - super(settings); controller.registerHandler(GET, "/_msearch", this); controller.registerHandler(POST, "/_msearch", this); controller.registerHandler(GET, "/{index}/_msearch", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 00c08a124f1..4e935211dba 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -73,8 +72,7 @@ public class RestSearchAction extends BaseRestHandler { public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in search requests is deprecated."; - public RestSearchAction(Settings settings, RestController controller) { - super(settings); + public RestSearchAction(RestController controller) { controller.registerHandler(GET, "/_search", this); controller.registerHandler(POST, "/_search", this); controller.registerHandler(GET, "/{index}/_search", this); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java index 50806a096f1..0b49048dee9 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.search; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -39,9 +38,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestSearchScrollAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Collections.singleton(RestSearchAction.TOTAL_HITS_AS_INT_PARAM); - public RestSearchScrollAction(Settings settings, RestController controller) { - super(settings); - + public RestSearchScrollAction(RestController controller) { controller.registerHandler(GET, "/_search/scroll", this); controller.registerHandler(POST, "/_search/scroll", this); controller.registerHandler(GET, "/_search/scroll/{scroll_id}", this); diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 36d9fff5a3f..cb5ce5ddb6e 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -123,7 +123,7 @@ public class ActionModuleTests extends ESTestCase { public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { - return singletonList(new RestMainAction(settings, restController)); + return singletonList(new RestMainAction(restController)); } }; SettingsModule settings = new SettingsModule(Settings.EMPTY); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 27e3ffefd63..7be8871ee06 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -52,6 +53,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.IsNull.notNullValue; @@ -105,8 +107,8 @@ public class CreateIndexIT extends ESIntegTestCase { } try { prepareCreate("test") - .addMapping("type1", jsonBuilder()) - .addMapping("type1", jsonBuilder()); + .addMapping("type1", jsonBuilder().startObject().endObject()) + .addMapping("type1", jsonBuilder().startObject().endObject()); fail("did not hit expected exception"); } catch (IllegalStateException ise) { // expected @@ -147,6 +149,14 @@ public class CreateIndexIT extends ESIntegTestCase { assertTrue(metadata.sourceAsMap().isEmpty()); } + public void testMappingParamAndNestedMismatch() throws Exception { + MapperParsingException e = expectThrows(MapperParsingException.class, () -> prepareCreate("test") + .addMapping("type1", XContentFactory.jsonBuilder().startObject() + .startObject("type2").endObject() + .endObject()).get()); + assertThat(e.getMessage(), startsWith("Failed to parse mapping [type1]: Root mapping definition has unsupported parameters")); + } + public void testEmptyMappings() throws Exception { assertAcked(prepareCreate("test") .addMapping("_doc", XContentFactory.jsonBuilder().startObject() diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java index 2dbcbf4aff6..419d669d066 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -23,11 +23,14 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -46,7 +49,7 @@ public class CreateIndexRequestTests extends ESTestCase { public void testSerialization() throws IOException { CreateIndexRequest request = new CreateIndexRequest("foo"); - String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject()); + String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("my_type").endObject().endObject()); request.mapping("my_type", mapping, XContentType.JSON); try (BytesStreamOutput output = new BytesStreamOutput()) { @@ -86,7 +89,12 @@ public class CreateIndexRequestTests extends ESTestCase { public void testToXContent() throws IOException { CreateIndexRequest request = new CreateIndexRequest("foo"); - String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject()); + String mapping; + if (randomBoolean()) { + mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("my_type").endObject().endObject()); + } else { + mapping = Strings.toString(JsonXContent.contentBuilder().startObject().endObject()); + } request.mapping("my_type", mapping, XContentType.JSON); Alias alias = new Alias("test_alias"); @@ -102,12 +110,73 @@ public class CreateIndexRequestTests extends ESTestCase { String actualRequestBody = Strings.toString(request); String expectedRequestBody = "{\"settings\":{\"index\":{\"number_of_shards\":\"10\"}}," + - "\"mappings\":{\"my_type\":{\"type\":{}}}," + + "\"mappings\":{\"my_type\":{\"my_type\":{}}}," + "\"aliases\":{\"test_alias\":{\"filter\":{\"term\":{\"year\":2016}},\"routing\":\"1\",\"is_write_index\":true}}}"; assertEquals(expectedRequestBody, actualRequestBody); } + public void testMappingKeyedByType() throws IOException { + CreateIndexRequest request1 = new CreateIndexRequest("foo"); + CreateIndexRequest request2 = new CreateIndexRequest("bar"); + { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject().startObject("properties") + .startObject("field1") + .field("type", "text") + .endObject() + .startObject("field2") + .startObject("properties") + .startObject("field21") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject().endObject(); + request1.mapping("type1", builder); + builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject().startObject("type1") + .startObject("properties") + .startObject("field1") + .field("type", "text") + .endObject() + .startObject("field2") + .startObject("properties") + .startObject("field21") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject().endObject(); + request2.mapping("type1", builder); + assertEquals(request1.mappings(), request2.mappings()); + } + { + request1 = new CreateIndexRequest("foo"); + request2 = new CreateIndexRequest("bar"); + String nakedMapping = "{\"properties\": {\"foo\": {\"type\": \"integer\"}}}"; + request1.mapping("type2", nakedMapping, XContentType.JSON); + request2.mapping("type2", "{\"type2\": " + nakedMapping + "}", XContentType.JSON); + assertEquals(request1.mappings(), request2.mappings()); + } + { + request1 = new CreateIndexRequest("foo"); + request2 = new CreateIndexRequest("bar"); + Map nakedMapping = MapBuilder.newMapBuilder() + .put("properties", MapBuilder.newMapBuilder() + .put("bar", MapBuilder.newMapBuilder() + .put("type", "scaled_float") + .put("scaling_factor", 100) + .map()) + .map()) + .map(); + request1.mapping("type3", nakedMapping); + request2.mapping("type3", MapBuilder.newMapBuilder().put("type3", nakedMapping).map()); + assertEquals(request1.mappings(), request2.mappings()); + } + } + public void testToAndFromXContent() throws IOException { final CreateIndexRequest createIndexRequest = RandomCreateIndexGenerator.randomCreateIndexRequest(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java index c4a4169ba16..a80ca1a83b2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.indices.forcemerge; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -43,11 +42,11 @@ public class RestForceMergeActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestForceMergeAction(Settings.EMPTY, controller()); + new RestForceMergeAction(controller()); } public void testBodyRejection() throws Exception { - final RestForceMergeAction handler = new RestForceMergeAction(Settings.EMPTY, mock(RestController.class)); + final RestForceMergeAction handler = new RestForceMergeAction(mock(RestController.class)); String json = JsonXContent.contentBuilder().startObject().field("max_num_segments", 1).endObject().toString(); final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) .withContent(new BytesArray(json), XContentType.JSON) diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index c866d7d279c..7dfdd0484d5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.rollover; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestTests; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -32,9 +33,11 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.XContentTestUtils; @@ -115,6 +118,35 @@ public class RolloverRequestTests extends ESTestCase { assertThat(request.getCreateIndexRequest().settings().getAsInt("number_of_shards", 0), equalTo(10)); } + public void testTypelessMappingParsing() throws Exception { + final RolloverRequest request = new RolloverRequest(randomAlphaOfLength(10), randomAlphaOfLength(10)); + final XContentBuilder builder = XContentFactory.jsonBuilder() + .startObject() + .startObject("mappings") + .startObject("properties") + .startObject("field1") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject(); + + boolean includeTypeName = false; + request.fromXContent(includeTypeName, createParser(builder)); + + CreateIndexRequest createIndexRequest = request.getCreateIndexRequest(); + String mapping = createIndexRequest.mappings().get(MapperService.SINGLE_MAPPING_NAME); + assertNotNull(mapping); + + Map parsedMapping = XContentHelper.convertToMap( + new BytesArray(mapping), false, XContentType.JSON).v2(); + + @SuppressWarnings("unchecked") + Map properties = (Map) parsedMapping.get(MapperService.SINGLE_MAPPING_NAME); + assertNotNull(properties); + assertFalse(properties.isEmpty()); + } + public void testSerialize() throws Exception { RolloverRequest originalRequest = new RolloverRequest("alias-index", "new-index-name"); originalRequest.addMaxIndexDocsCondition(randomNonNegativeLong()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java index 2d037d7c024..7d5152b539a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java @@ -20,15 +20,19 @@ package org.elasticsearch.action.admin.indices.template.put; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; import java.io.UncheckedIOException; import java.util.Arrays; import java.util.Collections; +import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -37,6 +41,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; public class PutIndexTemplateRequestTests extends AbstractXContentTestCase { + public void testValidateErrorMessage() throws Exception { PutIndexTemplateRequest request = new PutIndexTemplateRequest(); ActionRequestValidationException withoutNameAndPattern = request.validate(); @@ -53,6 +58,67 @@ public class PutIndexTemplateRequestTests extends AbstractXContentTestCase nakedMapping = MapBuilder.newMapBuilder() + .put("properties", MapBuilder.newMapBuilder() + .put("bar", MapBuilder.newMapBuilder() + .put("type", "scaled_float") + .put("scaling_factor", 100) + .map()) + .map()) + .map(); + request1.mapping("type3", nakedMapping); + request2.mapping("type3", MapBuilder.newMapBuilder().put("type3", nakedMapping).map()); + assertEquals(request1.mappings(), request2.mappings()); + } + } + @Override protected PutIndexTemplateRequest createTestInstance() { PutIndexTemplateRequest request = new PutIndexTemplateRequest(); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java index 05f12ac72b2..ec115265652 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java @@ -142,24 +142,41 @@ public class ClusterFormationFailureHelperTests extends ESTestCase { public void testDescriptionOnMasterIneligibleNodes() { final DiscoveryNode localNode = new DiscoveryNode("local", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) - .version(12L).nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId())).build(); + .version(12L) + .metaData(MetaData.builder().coordinationMetaData(CoordinationMetaData.builder().term(4L).build())) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId())).build(); assertThat(new ClusterFormationState(Settings.EMPTY, clusterState, emptyList(), emptyList(), 15L, electionStrategy) .getDescription(), is("master not discovered yet: have discovered []; discovery will continue using [] from hosts providers " + - "and [] from last-known cluster state; node term 15, last-accepted version 12 in term 0")); + "and [] from last-known cluster state; node term 15, last-accepted version 12 in term 4")); final TransportAddress otherAddress = buildNewFakeTransportAddress(); assertThat(new ClusterFormationState(Settings.EMPTY, clusterState, singletonList(otherAddress), emptyList(), 16L, electionStrategy) .getDescription(), is("master not discovered yet: have discovered []; discovery will continue using [" + otherAddress + - "] from hosts providers and [] from last-known cluster state; node term 16, last-accepted version 12 in term 0")); + "] from hosts providers and [] from last-known cluster state; node term 16, last-accepted version 12 in term 4")); final DiscoveryNode otherNode = new DiscoveryNode("other", buildNewFakeTransportAddress(), Version.CURRENT); assertThat(new ClusterFormationState(Settings.EMPTY, clusterState, emptyList(), singletonList(otherNode), 17L, electionStrategy) .getDescription(), is("master not discovered yet: have discovered [" + otherNode + "]; discovery will continue using [] from hosts providers " + - "and [] from last-known cluster state; node term 17, last-accepted version 12 in term 0")); + "and [] from last-known cluster state; node term 17, last-accepted version 12 in term 4")); + } + + public void testDescriptionForBWCState() { + final DiscoveryNode localNode = new DiscoveryNode("local", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); + final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .metaData(MetaData.builder() + .version(42L) // check that we use metadata version in case of BWC term 0 + .coordinationMetaData(CoordinationMetaData.builder().term(Coordinator.ZEN1_BWC_TERM).build()) + .build()) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId())).build(); + + assertThat(new ClusterFormationState(Settings.EMPTY, clusterState, emptyList(), emptyList(), 15L, electionStrategy) + .getDescription(), + is("master not discovered yet: have discovered []; discovery will continue using [] from hosts providers " + + "and [] from last-known cluster state; node term 15, last-accepted version 42 in term 0")); } public void testDescriptionBeforeBootstrapping() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java index d4d36434828..893d1f58e8e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java @@ -143,14 +143,14 @@ public class IndexCreationTaskTests extends ESTestCase { public void testApplyDataFromRequest() throws Exception { setupRequestAlias(new Alias("alias1")); - setupRequestMapping("mapping1", createMapping()); + setupRequestMapping("type", createMapping()); reqSettings.put("key1", "value1"); final ClusterState result = executeTask(); assertThat(result.metaData().index("test").getAliases(), hasKey("alias1")); assertThat(result.metaData().index("test").getSettings().get("key1"), equalTo("value1")); - assertThat(getMappingsFromResponse(), Matchers.hasKey("mapping1")); + assertThat(getMappingsFromResponse(), Matchers.hasKey("type")); } public void testInvalidAliasName() throws Exception { @@ -165,19 +165,19 @@ public class IndexCreationTaskTests extends ESTestCase { addMatchingTemplate(builder -> builder .putAlias(AliasMetaData.builder("alias1").searchRouting("fromTpl").build()) - .putMapping("mapping1", tplMapping) + .putMapping("type", tplMapping) .settings(Settings.builder().put("key1", "tplValue")) ); setupRequestAlias(new Alias("alias1").searchRouting("fromReq")); - setupRequestMapping("mapping1", reqMapping); + setupRequestMapping("type", reqMapping); reqSettings.put("key1", "reqValue"); final ClusterState result = executeTask(); assertThat(result.metaData().index("test").getAliases().get("alias1").getSearchRouting(), equalTo("fromReq")); assertThat(result.metaData().index("test").getSettings().get("key1"), equalTo("reqValue")); - assertThat(getMappingsFromResponse().get("mapping1").toString(), equalTo("{type={properties={field={type=keyword}}}}")); + assertThat(getMappingsFromResponse().get("type").toString(), equalTo("{type={properties={field={type=keyword}}}}")); } public void testDefaultSettings() throws Exception { @@ -289,7 +289,7 @@ public class IndexCreationTaskTests extends ESTestCase { public void testWriteIndex() throws Exception { Boolean writeIndex = randomBoolean() ? null : randomBoolean(); setupRequestAlias(new Alias("alias1").writeIndex(writeIndex)); - setupRequestMapping("mapping1", createMapping()); + setupRequestMapping("type", createMapping()); reqSettings.put("key1", "value1"); final ClusterState result = executeTask(); @@ -302,7 +302,7 @@ public class IndexCreationTaskTests extends ESTestCase { .settings(settings(Version.CURRENT)).putAlias(AliasMetaData.builder("alias1").writeIndex(true).build()) .numberOfShards(1).numberOfReplicas(0).build(); idxBuilder.put("test2", existingWriteIndex); - setupRequestMapping("mapping1", createMapping()); + setupRequestMapping("type", createMapping()); reqSettings.put("key1", "value1"); setupRequestAlias(new Alias("alias1").writeIndex(true)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java index 81fd5ec7fc8..14d640c4ca3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java @@ -19,11 +19,15 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; @@ -34,6 +38,7 @@ import org.elasticsearch.test.InternalSettingsPlugin; import java.util.Collection; import java.util.Collections; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -134,4 +139,77 @@ public class MetaDataMappingServiceTests extends ESSingleNodeTestCase { Collections.singletonMap("foo", Collections.singletonMap("type", "keyword"))), mappingMetaData.sourceAsMap()); } + + public void testForbidMultipleTypes() throws Exception { + CreateIndexRequestBuilder createIndexRequest = client().admin().indices() + .prepareCreate("test") + .addMapping(MapperService.SINGLE_MAPPING_NAME); + IndexService indexService = createIndex("test", createIndexRequest); + + MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest() + .type("other_type") + .indices(new Index[] {indexService.index()}) + .source(Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("other_type").endObject() + .endObject())); + ClusterStateTaskExecutor.ClusterTasksResult result = + mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request)); + assertThat(result.executionResults.size(), equalTo(1)); + + ClusterStateTaskExecutor.TaskResult taskResult = result.executionResults.values().iterator().next(); + assertFalse(taskResult.isSuccess()); + assertThat(taskResult.getFailure().getMessage(), containsString( + "Rejecting mapping update to [test] as the final mapping would have more than 1 type: ")); + } + + /** + * This test checks that the multi-type validation is done before we do any other kind of validation + * on the mapping that's added, see https://github.com/elastic/elasticsearch/issues/29313 + */ + public void testForbidMultipleTypesWithConflictingMappings() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("field1") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + + CreateIndexRequestBuilder createIndexRequest = client().admin().indices() + .prepareCreate("test") + .addMapping(MapperService.SINGLE_MAPPING_NAME, mapping); + IndexService indexService = createIndex("test", createIndexRequest); + + MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + String conflictingMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("other_type") + .startObject("properties") + .startObject("field1") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject()); + + PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest() + .type("other_type") + .indices(new Index[] {indexService.index()}) + .source(conflictingMapping); + ClusterStateTaskExecutor.ClusterTasksResult result = + mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request)); + assertThat(result.executionResults.size(), equalTo(1)); + + ClusterStateTaskExecutor.TaskResult taskResult = result.executionResults.values().iterator().next(); + assertFalse(taskResult.isSuccess()); + assertThat(taskResult.getFailure().getMessage(), containsString( + "Rejecting mapping update to [test] as the final mapping would have more than 1 type: ")); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index 5731b04be4c..c7e47e98de3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; @@ -41,15 +42,14 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class MockDiskUsagesIT extends ESIntegTestCase { @@ -184,9 +184,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase { final List realNodeNames = new ArrayList<>(); ClusterStateResponse resp = client().admin().cluster().prepareState().get(); - Iterator iter = resp.getState().getRoutingNodes().iterator(); - while (iter.hasNext()) { - RoutingNode node = iter.next(); + for (RoutingNode node : resp.getState().getRoutingNodes()) { realNodeNames.add(node.nodeId()); logger.info("--> node {} has {} shards", node.nodeId(), resp.getState().getRoutingNodes().node(node.nodeId()).numberOfOwningShards()); @@ -202,10 +200,10 @@ public class MockDiskUsagesIT extends ESIntegTestCase { cis.setN3Usage(realNodeNames.get(2), new DiskUsage(nodes.get(2), "n3", "_na_", 100, 3)); // Wait until index "test" is blocked - assertBusy(() -> { - assertBlocked(client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar"), - IndexMetaData.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK); - }); + assertBusy(() -> assertBlocked(client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource("foo", "bar"), + IndexMetaData.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK)); + + assertFalse(client().admin().cluster().prepareHealth("test").setWaitForEvents(Priority.LANGUID).get().isTimedOut()); // Cannot add further documents assertBlocked(client().prepareIndex().setIndex("test").setType("doc").setId("2").setSource("foo", "bar"), diff --git a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 32cc6785d57..88aa5eea2ef 100644 --- a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.network; -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.Table; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; @@ -35,9 +33,6 @@ import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.NullDispatcher; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.plugins.NetworkPlugin; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.cat.AbstractCatAction; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -46,7 +41,6 @@ import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; -import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -92,37 +86,6 @@ public class NetworkModuleTests extends ESTestCase { } } - - static class FakeRestHandler extends BaseRestHandler { - FakeRestHandler() { - super(null); - } - @Override - public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { return channel -> {}; } - @Override - public String getName() { - return "FakeRestHandler"; - } - } - - static class FakeCatRestHandler extends AbstractCatAction { - FakeCatRestHandler() { - super(null); - } - @Override - protected RestChannelConsumer doCatRequest(RestRequest request, NodeClient client) { return channel -> {}; } - @Override - protected void documentation(StringBuilder sb) {} - @Override - protected Table getTableWithHeader(RestRequest request) { - return null; - } - @Override - public String getName() { - return "FakeCatRestHandler"; - } - } - public void testRegisterTransport() { Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom").build(); Supplier custom = () -> null; // content doesn't matter we check reference equality diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 069c52a96e3..40f9c73592d 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.env; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.util.Constants; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.LuceneTestCase; @@ -389,6 +390,7 @@ public class NodeEnvironmentTests extends ESTestCase { } public void testCustomDataPaths() throws Exception { + assumeFalse("Fails on Windows, see https://github.com/elastic/elasticsearch/issues/45333", Constants.WINDOWS); String[] dataPaths = tmpPaths(); NodeEnvironment env = newNodeEnvironment(dataPaths, "/tmp", Settings.EMPTY); diff --git a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java index 110a27ff551..4e82a77ce43 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java @@ -55,7 +55,7 @@ public class CombinedDeletionPolicyTests extends ESTestCase { final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, NO_OPS_PERFORMED, extraRetainedOps, () -> RetentionLeases.EMPTY); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, softDeletesPolicy, globalCheckpoint::get); + CombinedDeletionPolicy indexPolicy = newCombinedDeletionPolicy(translogPolicy, softDeletesPolicy, globalCheckpoint); final LongArrayList maxSeqNoList = new LongArrayList(); final LongArrayList translogGenList = new LongArrayList(); @@ -102,7 +102,7 @@ public class CombinedDeletionPolicyTests extends ESTestCase { new SoftDeletesPolicy(globalCheckpoint::get, -1, extraRetainedOps, () -> RetentionLeases.EMPTY); final UUID translogUUID = UUID.randomUUID(); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, softDeletesPolicy, globalCheckpoint::get); + CombinedDeletionPolicy indexPolicy = newCombinedDeletionPolicy(translogPolicy, softDeletesPolicy, globalCheckpoint); long lastMaxSeqNo = between(1, 1000); long lastCheckpoint = randomLongBetween(-1, lastMaxSeqNo); long lastTranslogGen = between(1, 20); @@ -182,7 +182,7 @@ public class CombinedDeletionPolicyTests extends ESTestCase { final AtomicLong globalCheckpoint = new AtomicLong(randomNonNegativeLong()); final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, 0, () -> RetentionLeases.EMPTY); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, softDeletesPolicy, globalCheckpoint::get); + CombinedDeletionPolicy indexPolicy = newCombinedDeletionPolicy(translogPolicy, softDeletesPolicy, globalCheckpoint); final int invalidCommits = between(1, 10); final List commitList = new ArrayList<>(); @@ -217,7 +217,7 @@ public class CombinedDeletionPolicyTests extends ESTestCase { final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, 0, () -> RetentionLeases.EMPTY); final UUID translogUUID = UUID.randomUUID(); final TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); - CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, softDeletesPolicy, globalCheckpoint::get); + CombinedDeletionPolicy indexPolicy = newCombinedDeletionPolicy(translogPolicy, softDeletesPolicy, globalCheckpoint); final List commitList = new ArrayList<>(); int totalCommits = between(2, 20); long lastMaxSeqNo = between(1, 1000); @@ -254,6 +254,17 @@ public class CombinedDeletionPolicyTests extends ESTestCase { } } + private CombinedDeletionPolicy newCombinedDeletionPolicy(TranslogDeletionPolicy translogPolicy, SoftDeletesPolicy softDeletesPolicy, + AtomicLong globalCheckpoint) { + return new CombinedDeletionPolicy(logger, translogPolicy, softDeletesPolicy, globalCheckpoint::get) + { + @Override + protected int getDocCountOfCommit(IndexCommit indexCommit) { + return between(0, 1000); + } + }; + } + IndexCommit mockIndexCommit(long localCheckpoint, long maxSeqNo, UUID translogUUID, long translogGen) throws IOException { final Map userData = new HashMap<>(); userData.put(SequenceNumbers.LOCAL_CHECKPOINT_KEY, Long.toString(localCheckpoint)); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 1436bf18ae7..d1982358d2c 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -134,6 +134,7 @@ import org.elasticsearch.index.shard.ShardUtils; import org.elasticsearch.index.store.FsDirectoryFactory; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.SnapshotMatchers; +import org.elasticsearch.index.translog.TestTranslog; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -184,6 +185,7 @@ import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_TRANS import static org.elasticsearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA; +import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; @@ -5938,4 +5940,34 @@ public class InternalEngineTests extends EngineTestCase { assertNotNull(engine.failedEngine.get()); } } + + /** + * We can trim translog on primary promotion and peer recovery based on the fact we add operations with either + * REPLICA or PEER_RECOVERY origin to translog although they already exist in the engine (i.e. hasProcessed() == true). + * If we decide not to add those already-processed operations to translog, we need to study carefully the consequence + * of the translog trimming in these two places. + */ + public void testAlwaysRecordReplicaOrPeerRecoveryOperationsToTranslog() throws Exception { + List operations = generateHistoryOnReplica(between(1, 100), randomBoolean(), randomBoolean(), randomBoolean()); + applyOperations(engine, operations); + Set seqNos = operations.stream().map(Engine.Operation::seqNo).collect(Collectors.toSet()); + try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) { + assertThat(snapshot.totalOperations(), equalTo(operations.size())); + assertThat(TestTranslog.drainSnapshot(snapshot, false).stream().map(Translog.Operation::seqNo).collect(Collectors.toSet()), + equalTo(seqNos)); + } + primaryTerm.set(randomLongBetween(primaryTerm.get(), Long.MAX_VALUE)); + engine.rollTranslogGeneration(); + engine.trimOperationsFromTranslog(primaryTerm.get(), NO_OPS_PERFORMED); // trim everything in translog + try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) { + assertThat(snapshot.totalOperations(), equalTo(operations.size())); + assertNull(snapshot.next()); + } + applyOperations(engine, operations); + try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) { + assertThat(snapshot.totalOperations(), equalTo(operations.size() * 2)); + assertThat(TestTranslog.drainSnapshot(snapshot, false).stream().map(Translog.Operation::seqNo).collect(Collectors.toSet()), + equalTo(seqNos)); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 8e03c12cfbe..4478f2f4640 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -57,7 +57,6 @@ import java.util.concurrent.ExecutionException; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.startsWith; public class MapperServiceTests extends ESSingleNodeTestCase { @@ -298,36 +297,6 @@ public class MapperServiceTests extends ESSingleNodeTestCase { assertEquals("Limit of total fields [" + numberOfNonAliasFields + "] in index [test2] has been exceeded", e.getMessage()); } - public void testForbidMultipleTypes() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); - MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - - String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> mapperService.merge("type2", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)); - assertThat(e.getMessage(), startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: ")); - } - - /** - * This test checks that the multi-type validation is done before we do any other kind of validation on the mapping that's added, - * see https://github.com/elastic/elasticsearch/issues/29313 - */ - public void testForbidMultipleTypesWithConflictingMappings() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field1").field("type", "integer_range") - .endObject().endObject().endObject().endObject()); - MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - - String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type2") - .startObject("properties").startObject("field1").field("type", "integer") - .endObject().endObject().endObject().endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> mapperService.merge("type2", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)); - assertThat(e.getMessage(), startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: ")); - } - public void testDefaultMappingIsRejectedOn7() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_default_").endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index f05ddce567a..76ca0c66a37 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -504,9 +504,9 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC recoveryStart.countDown(); return new RecoveryTarget(indexShard, node, recoveryListener) { @Override - public void finalizeRecovery(long globalCheckpoint, ActionListener listener) { + public void finalizeRecovery(long globalCheckpoint, long trimAboveSeqNo, ActionListener listener) { recoveryDone.set(true); - super.finalizeRecovery(globalCheckpoint, listener); + super.finalizeRecovery(globalCheckpoint, trimAboveSeqNo, listener); } }; }); @@ -868,13 +868,13 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC } @Override - public void finalizeRecovery(long globalCheckpoint, ActionListener listener) { + public void finalizeRecovery(long globalCheckpoint, long trimAboveSeqNo, ActionListener listener) { if (hasBlocked() == false) { // it maybe that not ops have been transferred, block now blockIfNeeded(RecoveryState.Stage.TRANSLOG); } blockIfNeeded(RecoveryState.Stage.FINALIZE); - super.finalizeRecovery(globalCheckpoint, listener); + super.finalizeRecovery(globalCheckpoint, trimAboveSeqNo, listener); } } diff --git a/server/src/test/java/org/elasticsearch/index/seqno/PeerRecoveryRetentionLeaseExpiryTests.java b/server/src/test/java/org/elasticsearch/index/seqno/PeerRecoveryRetentionLeaseExpiryTests.java index 22d4f5e86f9..fe2d8f27aa3 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/PeerRecoveryRetentionLeaseExpiryTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/PeerRecoveryRetentionLeaseExpiryTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.engine.SafeCommitInfo; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.IndexSettingsModule; import org.junit.Before; @@ -37,6 +38,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -48,6 +50,7 @@ public class PeerRecoveryRetentionLeaseExpiryTests extends ReplicationTrackerTes private ReplicationTracker replicationTracker; private AtomicLong currentTimeMillis; private Settings settings; + private SafeCommitInfo safeCommitInfo; @Before public void setUpReplicationTracker() throws InterruptedException { @@ -63,6 +66,8 @@ public class PeerRecoveryRetentionLeaseExpiryTests extends ReplicationTrackerTes settings = Settings.EMPTY; } + safeCommitInfo = null; // must be set in each test + final long primaryTerm = randomLongBetween(1, Long.MAX_VALUE); replicationTracker = new ReplicationTracker( new ShardId("test", "_na", 0), @@ -72,7 +77,8 @@ public class PeerRecoveryRetentionLeaseExpiryTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> { }, currentTimeMillis::get, - (leases, listener) -> { }); + (leases, listener) -> { }, + () -> safeCommitInfo); replicationTracker.updateFromMaster(1L, Collections.singleton(primaryAllocationId.getId()), routingTable(Collections.emptySet(), primaryAllocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); @@ -109,6 +115,7 @@ public class PeerRecoveryRetentionLeaseExpiryTests extends ReplicationTrackerTes } currentTimeMillis.set(currentTimeMillis.get() + randomLongBetween(0, Long.MAX_VALUE - currentTimeMillis.get())); + safeCommitInfo = randomSafeCommitInfo(); final Tuple retentionLeases = replicationTracker.getRetentionLeases(true); assertFalse(retentionLeases.v1()); @@ -121,11 +128,14 @@ public class PeerRecoveryRetentionLeaseExpiryTests extends ReplicationTrackerTes public void testPeerRecoveryRetentionLeasesForUnassignedCopiesDoNotExpireImmediatelyIfShardsNotAllStarted() { final String unknownNodeId = randomAlphaOfLength(10); - replicationTracker.addPeerRecoveryRetentionLease(unknownNodeId, randomCheckpoint(), EMPTY_LISTENER); + final long globalCheckpoint = randomNonNegativeLong(); // not NO_OPS_PERFORMED since this always results in file-based recovery + replicationTracker.addPeerRecoveryRetentionLease(unknownNodeId, globalCheckpoint, EMPTY_LISTENER); currentTimeMillis.set(currentTimeMillis.get() + randomLongBetween(0, IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING.get(settings).millis())); + safeCommitInfo = randomSafeCommitInfoSuitableForOpsBasedRecovery(globalCheckpoint); + final Tuple retentionLeases = replicationTracker.getRetentionLeases(true); assertFalse("should not have expired anything", retentionLeases.v1()); @@ -142,12 +152,15 @@ public class PeerRecoveryRetentionLeaseExpiryTests extends ReplicationTrackerTes } final String unknownNodeId = randomAlphaOfLength(10); - replicationTracker.addPeerRecoveryRetentionLease(unknownNodeId, randomCheckpoint(), EMPTY_LISTENER); + final long globalCheckpoint = randomCheckpoint(); + replicationTracker.addPeerRecoveryRetentionLease(unknownNodeId, globalCheckpoint, EMPTY_LISTENER); currentTimeMillis.set(randomLongBetween( currentTimeMillis.get() + IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING.get(settings).millis() + 1, Long.MAX_VALUE)); + safeCommitInfo = randomSafeCommitInfoSuitableForOpsBasedRecovery(globalCheckpoint); + final Tuple retentionLeases = replicationTracker.getRetentionLeases(true); assertTrue("should have expired something", retentionLeases.v1()); @@ -167,6 +180,7 @@ public class PeerRecoveryRetentionLeaseExpiryTests extends ReplicationTrackerTes (usually() ? randomLongBetween(0, IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING.get(settings).millis()) : randomLongBetween(0, Long.MAX_VALUE - currentTimeMillis.get()))); + safeCommitInfo = randomSafeCommitInfo(); final Tuple retentionLeases = replicationTracker.getRetentionLeases(true); assertTrue(retentionLeases.v1()); @@ -176,4 +190,41 @@ public class PeerRecoveryRetentionLeaseExpiryTests extends ReplicationTrackerTes assertThat(leaseIds, equalTo(replicationTracker.routingTable.shards().stream() .map(ReplicationTracker::getPeerRecoveryRetentionLeaseId).collect(Collectors.toSet()))); } + + public void testPeerRecoveryRetentionLeasesForUnassignedCopiesExpireIfRetainingTooMuchHistory() { + if (randomBoolean()) { + startReplica(); + } + + final String unknownNodeId = randomAlphaOfLength(10); + final long globalCheckpoint = randomValueOtherThan(SequenceNumbers.NO_OPS_PERFORMED, this::randomCheckpoint); + replicationTracker.addPeerRecoveryRetentionLease(unknownNodeId, globalCheckpoint, EMPTY_LISTENER); + + safeCommitInfo = randomSafeCommitInfoSuitableForFileBasedRecovery(globalCheckpoint); + + final Tuple retentionLeases = replicationTracker.getRetentionLeases(true); + assertTrue("should have expired something", retentionLeases.v1()); + + final Set leaseIds = retentionLeases.v2().leases().stream().map(RetentionLease::id).collect(Collectors.toSet()); + assertThat(leaseIds, hasSize(2)); + assertThat(leaseIds, equalTo(replicationTracker.routingTable.shards().stream() + .map(ReplicationTracker::getPeerRecoveryRetentionLeaseId).collect(Collectors.toSet()))); + } + + private SafeCommitInfo randomSafeCommitInfo() { + return randomBoolean() ? SafeCommitInfo.EMPTY : new SafeCommitInfo( + randomFrom(randomNonNegativeLong(), (long) randomIntBetween(0, Integer.MAX_VALUE)), + randomIntBetween(0, Integer.MAX_VALUE)); + } + + private SafeCommitInfo randomSafeCommitInfoSuitableForOpsBasedRecovery(long globalCheckpoint) { + // simulate a safe commit that is behind the given global checkpoint, so that no files need to be transferrred + final long localCheckpoint = randomLongBetween(NO_OPS_PERFORMED, globalCheckpoint); + return new SafeCommitInfo(localCheckpoint, between(0, Math.toIntExact(Math.min(localCheckpoint + 1, Integer.MAX_VALUE)))); + } + + private SafeCommitInfo randomSafeCommitInfoSuitableForFileBasedRecovery(long globalCheckpoint) { + // simulate a later safe commit containing no documents, which is always better to transfer than any ops + return new SafeCommitInfo(randomLongBetween(globalCheckpoint + 1, Long.MAX_VALUE), 0); + } } diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java index 7611fad5a7e..bdf7acf478b 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java @@ -70,7 +70,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -111,7 +112,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -139,7 +141,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -174,7 +177,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes .stream() .collect(Collectors.toMap(RetentionLease::id, RetentionLease::retainingSequenceNumber)), equalTo(retainingSequenceNumbers)); - }); + }, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); reference.set(replicationTracker); replicationTracker.updateFromMaster( randomNonNegativeLong(), @@ -210,7 +214,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -266,7 +271,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes assertFalse(Thread.holdsLock(replicationTrackerRef.get())); assertTrue(synced.compareAndSet(false, true)); listener.onResponse(new ReplicationResponse()); - }); + }, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTrackerRef.set(replicationTracker); replicationTracker.updateFromMaster( randomNonNegativeLong(), @@ -309,7 +315,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> { }); + (leases, listener) -> { }, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -331,7 +338,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> { }); + (leases, listener) -> { }, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -357,7 +365,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -392,7 +401,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes .stream() .collect(Collectors.toMap(RetentionLease::id, RetentionLease::retainingSequenceNumber)), equalTo(retainingSequenceNumbers)); - }); + }, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); reference.set(replicationTracker); replicationTracker.updateFromMaster( randomNonNegativeLong(), @@ -445,7 +455,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, currentTimeMillis::get, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -519,7 +530,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -572,7 +584,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -605,7 +618,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), @@ -653,7 +667,8 @@ public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTes UNASSIGNED_SEQ_NO, value -> {}, () -> 0L, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java index 5f035a3604f..cc32d5198c8 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.engine.SafeCommitInfo; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -32,6 +33,7 @@ import org.elasticsearch.test.IndexSettingsModule; import java.util.Set; import java.util.function.LongConsumer; import java.util.function.LongSupplier; +import java.util.function.Supplier; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -49,9 +51,12 @@ public abstract class ReplicationTrackerTestCase extends ESTestCase { UNASSIGNED_SEQ_NO, updatedGlobalCheckpoint, currentTimeMillisSupplier, - (leases, listener) -> {}); + (leases, listener) -> {}, + OPS_BASED_RECOVERY_ALWAYS_REASONABLE); } + static final Supplier OPS_BASED_RECOVERY_ALWAYS_REASONABLE = () -> SafeCommitInfo.EMPTY; + static String nodeIdFromAllocationId(final AllocationId allocationId) { return "n-" + allocationId.getId().substring(0, 8); } diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java index afbd560758c..e7d68baf265 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java @@ -694,10 +694,10 @@ public class ReplicationTrackerTests extends ReplicationTrackerTestCase { final long globalCheckpoint = UNASSIGNED_SEQ_NO; final BiConsumer> onNewRetentionLease = (leases, listener) -> {}; - ReplicationTracker oldPrimary = new ReplicationTracker( - shardId, aId.getId(), indexSettings, primaryTerm, globalCheckpoint, onUpdate, () -> 0L, onNewRetentionLease); - ReplicationTracker newPrimary = new ReplicationTracker( - shardId, aId.getRelocationId(), indexSettings, primaryTerm, globalCheckpoint, onUpdate, () -> 0L, onNewRetentionLease); + ReplicationTracker oldPrimary = new ReplicationTracker(shardId, aId.getId(), indexSettings, primaryTerm, globalCheckpoint, + onUpdate, () -> 0L, onNewRetentionLease, OPS_BASED_RECOVERY_ALWAYS_REASONABLE); + ReplicationTracker newPrimary = new ReplicationTracker(shardId, aId.getRelocationId(), indexSettings, primaryTerm, globalCheckpoint, + onUpdate, () -> 0L, onNewRetentionLease, OPS_BASED_RECOVERY_ALWAYS_REASONABLE); Set allocationIds = new HashSet<>(Arrays.asList(oldPrimary.shardAllocationId, newPrimary.shardAllocationId)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 215468d4570..b3799e3fe76 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -1065,7 +1065,7 @@ public class IndexShardTests extends IndexShardTestCase { onFailureException.get(), hasToString(containsString("operation primary term [" + oldPrimaryTerm + "] is too old"))); } - closeShards(indexShard); + closeShard(indexShard, false); // skip asserting translog and Lucene as we rolled back Lucene but did not execute resync } public void testAcquireReplicaPermitAdvanceMaxSeqNoOfUpdates() throws Exception { @@ -2767,8 +2767,8 @@ public class IndexShardTests extends IndexShardTestCase { } @Override - public void finalizeRecovery(long globalCheckpoint, ActionListener listener) { - super.finalizeRecovery(globalCheckpoint, + public void finalizeRecovery(long globalCheckpoint, long trimAboveSeqNo, ActionListener listener) { + super.finalizeRecovery(globalCheckpoint, trimAboveSeqNo, ActionListener.wrap( r -> { assertListenerCalled.accept(replica); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index ad791702ffe..f169263a6cd 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.recovery; import org.apache.lucene.analysis.TokenStream; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -33,6 +34,7 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -75,6 +77,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.flush.SyncedFlushUtil; import org.elasticsearch.indices.recovery.RecoveryState.Stage; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.node.RecoverySettingsChunkSizePlugin; import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; @@ -101,6 +104,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -117,6 +121,8 @@ import java.util.stream.StreamSupport; import static java.util.Collections.singletonMap; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.action.DocWriteResponse.Result.CREATED; +import static org.elasticsearch.action.DocWriteResponse.Result.UPDATED; import static org.elasticsearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -125,6 +131,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -1206,6 +1213,109 @@ public class IndexRecoveryIT extends ESIntegTestCase { assertThat(recoveryState.getIndex().totalFileCount(), greaterThan(0)); } + public void testUsesFileBasedRecoveryIfOperationsBasedRecoveryWouldBeUnreasonable() throws Exception { + internalCluster().ensureAtLeastNumDataNodes(2); + + String indexName = "test-index"; + final Settings.Builder settings = Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "12h") + .put(IndexService.RETENTION_LEASE_SYNC_INTERVAL_SETTING.getKey(), "100ms"); + + final double reasonableOperationsBasedRecoveryProportion; + if (randomBoolean()) { + reasonableOperationsBasedRecoveryProportion = randomDoubleBetween(0.05, 0.99, true); + settings.put(IndexSettings.FILE_BASED_RECOVERY_THRESHOLD_SETTING.getKey(), + reasonableOperationsBasedRecoveryProportion); + } else { + reasonableOperationsBasedRecoveryProportion + = IndexSettings.FILE_BASED_RECOVERY_THRESHOLD_SETTING.get(Settings.EMPTY); + } + logger.info("--> performing ops-based recoveries up to [{}%] of docs", reasonableOperationsBasedRecoveryProportion * 100.0); + + createIndex(indexName, settings.build()); + indexRandom(randomBoolean(), false, randomBoolean(), IntStream.range(0, between(0, 100)) + .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)).collect(toList())); + ensureGreen(indexName); + + flush(indexName); + // wait for all history to be discarded + assertBusy(() -> { + for (ShardStats shardStats : client().admin().indices().prepareStats(indexName).get().getShards()) { + final long maxSeqNo = shardStats.getSeqNoStats().getMaxSeqNo(); + assertTrue(shardStats.getRetentionLeaseStats().retentionLeases() + " should discard history up to " + maxSeqNo, + shardStats.getRetentionLeaseStats().retentionLeases().leases().stream().allMatch( + l -> l.retainingSequenceNumber() == maxSeqNo + 1)); + } + }); + flush(indexName); // ensure that all operations are in the safe commit + + final ShardStats shardStats = client().admin().indices().prepareStats(indexName).get().getShards()[0]; + final long docCount = shardStats.getStats().docs.getCount(); + assertThat(shardStats.getStats().docs.getDeleted(), equalTo(0L)); + assertThat(shardStats.getSeqNoStats().getMaxSeqNo() + 1, equalTo(docCount)); + + final ShardId shardId = new ShardId(resolveIndex(indexName), 0); + final DiscoveryNodes discoveryNodes = clusterService().state().nodes(); + final IndexShardRoutingTable indexShardRoutingTable = clusterService().state().routingTable().shardRoutingTable(shardId); + + final ShardRouting replicaShardRouting = indexShardRoutingTable.replicaShards().get(0); + assertTrue("should have lease for " + replicaShardRouting, + client().admin().indices().prepareStats(indexName).get().getShards()[0].getRetentionLeaseStats() + .retentionLeases().contains(ReplicationTracker.getPeerRecoveryRetentionLeaseId(replicaShardRouting))); + internalCluster().restartNode(discoveryNodes.get(replicaShardRouting.currentNodeId()).getName(), + new InternalTestCluster.RestartCallback() { + @Override + public Settings onNodeStopped(String nodeName) throws Exception { + assertFalse(client().admin().cluster().prepareHealth() + .setWaitForNodes(Integer.toString(discoveryNodes.getSize() - 1)) + .setWaitForEvents(Priority.LANGUID).get().isTimedOut()); + + final int newDocCount = Math.toIntExact(Math.round(Math.ceil( + (1 + Math.ceil(docCount * reasonableOperationsBasedRecoveryProportion)) + / (1 - reasonableOperationsBasedRecoveryProportion)))); + + /* + * newDocCount >= (ceil(docCount * p) + 1) / (1-p) + * + * ==> 0 <= newDocCount * (1-p) - ceil(docCount * p) - 1 + * = newDocCount - (newDocCount * p + ceil(docCount * p) + 1) + * < newDocCount - (ceil(newDocCount * p) + ceil(docCount * p)) + * <= newDocCount - ceil(newDocCount * p + docCount * p) + * + * ==> docCount < newDocCount + docCount - ceil((newDocCount + docCount) * p) + * == localCheckpoint + 1 - ceil((newDocCount + docCount) * p) + * == firstReasonableSeqNo + * + * The replica has docCount docs, i.e. has operations with seqnos [0..docCount-1], so a seqno-based recovery will start + * from docCount < firstReasonableSeqNo + * + * ==> it is unreasonable to recover the replica using a seqno-based recovery + */ + + indexRandom(randomBoolean(), randomBoolean(), randomBoolean(), IntStream.range(0, newDocCount) + .mapToObj(n -> client().prepareIndex(indexName, "_doc").setSource("num", n)).collect(toList())); + + flush(indexName); + + assertBusy(() -> assertFalse("should no longer have lease for " + replicaShardRouting, + client().admin().indices().prepareStats(indexName).get().getShards()[0].getRetentionLeaseStats() + .retentionLeases().contains(ReplicationTracker.getPeerRecoveryRetentionLeaseId(replicaShardRouting)))); + + return super.onNodeStopped(nodeName); + } + }); + + ensureGreen(indexName); + + //noinspection OptionalGetWithoutIsPresent because it fails the test if absent + final RecoveryState recoveryState = client().admin().indices().prepareRecoveries(indexName).get() + .shardRecoveryStates().get(indexName).stream().filter(rs -> rs.getPrimary() == false).findFirst().get(); + assertThat(recoveryState.getIndex().totalFileCount(), greaterThan(0)); + } + public void testDoesNotCopyOperationsInSafeCommit() throws Exception { internalCluster().ensureAtLeastNumDataNodes(2); @@ -1326,4 +1436,56 @@ public class IndexRecoveryIT extends ESIntegTestCase { assertThat(shardStats.getSeqNoStats().getGlobalCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); } } + public void testPeerRecoveryTrimsLocalTranslog() throws Exception { + internalCluster().startNode(); + List dataNodes = internalCluster().startDataOnlyNodes(2); + String indexName = "test-index"; + createIndex(indexName, Settings.builder() + .put("index.number_of_shards", 1).put("index.number_of_replicas", 1) + .put("index.routing.allocation.include._name", String.join(",", dataNodes)).build()); + ensureGreen(indexName); + ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + DiscoveryNode nodeWithOldPrimary = clusterState.nodes().get(clusterState.routingTable() + .index(indexName).shard(0).primaryShard().currentNodeId()); + MockTransportService transportService = (MockTransportService) internalCluster() + .getInstance(TransportService.class, nodeWithOldPrimary.getName()); + CountDownLatch readyToRestartNode = new CountDownLatch(1); + AtomicBoolean stopped = new AtomicBoolean(); + transportService.addSendBehavior((connection, requestId, action, request, options) -> { + if (action.equals("indices:data/write/bulk[s][r]") && randomInt(100) < 5) { + throw new NodeClosedException(nodeWithOldPrimary); + } + // prevent the primary from marking the replica as stale so the replica can get promoted. + if (action.equals("internal:cluster/shard/failure")) { + stopped.set(true); + readyToRestartNode.countDown(); + throw new NodeClosedException(nodeWithOldPrimary); + } + connection.sendRequest(requestId, action, request, options); + }); + Thread[] indexers = new Thread[randomIntBetween(1, 8)]; + for (int i = 0; i < indexers.length; i++) { + indexers[i] = new Thread(() -> { + while (stopped.get() == false) { + try { + IndexResponse response = client().prepareIndex(indexName, "_doc") + .setSource(Collections.singletonMap("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON) + .get(); + assertThat(response.getResult(), isOneOf(CREATED, UPDATED)); + } catch (ElasticsearchException ignored) { + } + } + }); + } + for (Thread indexer : indexers) { + indexer.start(); + } + readyToRestartNode.await(); + transportService.clearAllRules(); + internalCluster().restartNode(nodeWithOldPrimary.getName(), new InternalTestCluster.RestartCallback()); + for (Thread indexer : indexers) { + indexer.join(); + } + ensureGreen(indexName); + } } diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 51b45890765..e14dcb02390 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -782,7 +782,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { } @Override - public void finalizeRecovery(long globalCheckpoint, ActionListener listener) { + public void finalizeRecovery(long globalCheckpoint, long trimAboveSeqNo, ActionListener listener) { } @Override diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index fc235392077..3b338ff824f 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; @@ -41,6 +42,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.engine.DocIdSeqNoAndSource; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; @@ -461,4 +463,44 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase { closeShards(replica); } } + + public void testRecoveryTrimsLocalTranslog() throws Exception { + try (ReplicationGroup shards = createGroup(between(1, 2))) { + shards.startAll(); + IndexShard oldPrimary = shards.getPrimary(); + shards.indexDocs(scaledRandomIntBetween(1, 100)); + if (randomBoolean()) { + shards.flush(); + } + int inflightDocs = scaledRandomIntBetween(1, 100); + for (int i = 0; i < inflightDocs; i++) { + final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_" + i).source("{}", XContentType.JSON); + final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); + for (IndexShard replica : randomSubsetOf(shards.getReplicas())) { + indexOnReplica(bulkShardRequest, shards, replica); + } + if (rarely()) { + shards.flush(); + } + } + shards.syncGlobalCheckpoint(); + shards.promoteReplicaToPrimary(randomFrom(shards.getReplicas())).get(); + oldPrimary.close("demoted", false); + oldPrimary.store().close(); + oldPrimary = shards.addReplicaWithExistingPath(oldPrimary.shardPath(), oldPrimary.routingEntry().currentNodeId()); + shards.recoverReplica(oldPrimary); + for (IndexShard shard : shards) { + assertConsistentHistoryBetweenTranslogAndLucene(shard); + } + final List docsAfterRecovery = getDocIdAndSeqNos(shards.getPrimary()); + for (IndexShard shard : shards.getReplicas()) { + assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterRecovery)); + } + shards.promoteReplicaToPrimary(oldPrimary).get(); + for (IndexShard shard : shards) { + assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterRecovery)); + assertConsistentHistoryBetweenTranslogAndLucene(shard); + } + } + } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 3bb5bc44504..c44fc1be605 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -290,7 +290,7 @@ public class IngestServiceTests extends ESTestCase { ingestService.validatePipeline(Collections.singletonMap(discoveryNode, ingestInfo), putRequest); } - public void testHasProcessor() throws Exception { + public void testGetProcessorsInPipeline() throws Exception { IngestService ingestService = createWithProcessors(); String id = "_id"; Pipeline pipeline = ingestService.getPipeline(id); @@ -307,15 +307,19 @@ public class IngestServiceTests extends ESTestCase { pipeline = ingestService.getPipeline(id); assertThat(pipeline, notNullValue()); - assertTrue(ingestService.hasProcessor(id, Processor.class)); - assertTrue(ingestService.hasProcessor(id, WrappingProcessorImpl.class)); - assertTrue(ingestService.hasProcessor(id, WrappingProcessor.class)); - assertTrue(ingestService.hasProcessor(id, FakeProcessor.class)); + assertThat(ingestService.getProcessorsInPipeline(id, Processor.class).size(), equalTo(3)); + assertThat(ingestService.getProcessorsInPipeline(id, WrappingProcessorImpl.class).size(), equalTo(1)); + assertThat(ingestService.getProcessorsInPipeline(id, WrappingProcessor.class).size(), equalTo(1)); + assertThat(ingestService.getProcessorsInPipeline(id, FakeProcessor.class).size(), equalTo(2)); - assertFalse(ingestService.hasProcessor(id, ConditionalProcessor.class)); + assertThat(ingestService.getProcessorsInPipeline(id, ConditionalProcessor.class).size(), equalTo(0)); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> ingestService.getProcessorsInPipeline("fakeID", Processor.class)); + assertThat("pipeline with id [fakeID] does not exist", equalTo(e.getMessage())); } - public void testHasProcessorComplexConditional() throws Exception { + public void testGetProcessorsInPipelineComplexConditional() throws Exception { LongSupplier relativeTimeProvider = mock(LongSupplier.class); String scriptName = "conditionalScript"; ScriptService scriptService = new ScriptService(Settings.builder().build(), @@ -365,12 +369,12 @@ public class IngestServiceTests extends ESTestCase { pipeline = ingestService.getPipeline(id); assertThat(pipeline, notNullValue()); - assertTrue(ingestService.hasProcessor(id, Processor.class)); - assertTrue(ingestService.hasProcessor(id, WrappingProcessor.class)); - assertTrue(ingestService.hasProcessor(id, FakeProcessor.class)); - assertTrue(ingestService.hasProcessor(id, ConditionalProcessor.class)); + assertThat(ingestService.getProcessorsInPipeline(id, Processor.class).size(), equalTo(3)); + assertThat(ingestService.getProcessorsInPipeline(id, WrappingProcessor.class).size(), equalTo(2)); + assertThat(ingestService.getProcessorsInPipeline(id, FakeProcessor.class).size(), equalTo(1)); + assertThat(ingestService.getProcessorsInPipeline(id, ConditionalProcessor.class).size(), equalTo(2)); - assertFalse(ingestService.hasProcessor(id, WrappingProcessorImpl.class)); + assertThat(ingestService.getProcessorsInPipeline(id, WrappingProcessorImpl.class).size(), equalTo(0)); } public void testCrud() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 68e3c8416b9..66badef0cef 100644 --- a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -45,7 +44,7 @@ public class BaseRestHandlerTests extends ESTestCase { public void testOneUnconsumedParameters() throws Exception { final AtomicBoolean executed = new AtomicBoolean(); - BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) { + BaseRestHandler handler = new BaseRestHandler() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { request.param("consumed"); @@ -71,7 +70,7 @@ public class BaseRestHandlerTests extends ESTestCase { public void testMultipleUnconsumedParameters() throws Exception { final AtomicBoolean executed = new AtomicBoolean(); - BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) { + BaseRestHandler handler = new BaseRestHandler() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { request.param("consumed"); @@ -98,7 +97,7 @@ public class BaseRestHandlerTests extends ESTestCase { public void testUnconsumedParametersDidYouMean() throws Exception { final AtomicBoolean executed = new AtomicBoolean(); - BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) { + BaseRestHandler handler = new BaseRestHandler() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { request.param("consumed"); @@ -145,7 +144,7 @@ public class BaseRestHandlerTests extends ESTestCase { public void testUnconsumedResponseParameters() throws Exception { final AtomicBoolean executed = new AtomicBoolean(); - BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) { + BaseRestHandler handler = new BaseRestHandler() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { request.param("consumed"); @@ -174,7 +173,7 @@ public class BaseRestHandlerTests extends ESTestCase { public void testDefaultResponseParameters() throws Exception { final AtomicBoolean executed = new AtomicBoolean(); - BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) { + BaseRestHandler handler = new BaseRestHandler() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { return channel -> executed.set(true); @@ -199,7 +198,7 @@ public class BaseRestHandlerTests extends ESTestCase { public void testCatResponseParameters() throws Exception { final AtomicBoolean executed = new AtomicBoolean(); - AbstractCatAction handler = new AbstractCatAction(Settings.EMPTY) { + AbstractCatAction handler = new AbstractCatAction() { @Override protected RestChannelConsumer doCatRequest(RestRequest request, NodeClient client) { return channel -> executed.set(true); @@ -238,7 +237,7 @@ public class BaseRestHandlerTests extends ESTestCase { public void testConsumedBody() throws Exception { final AtomicBoolean executed = new AtomicBoolean(); - final BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) { + final BaseRestHandler handler = new BaseRestHandler() { @Override protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { request.content(); @@ -264,7 +263,7 @@ public class BaseRestHandlerTests extends ESTestCase { public void testUnconsumedNoBody() throws Exception { final AtomicBoolean executed = new AtomicBoolean(); - final BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) { + final BaseRestHandler handler = new BaseRestHandler() { @Override protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { return channel -> executed.set(true); @@ -285,7 +284,7 @@ public class BaseRestHandlerTests extends ESTestCase { public void testUnconsumedBody() throws IOException { final AtomicBoolean executed = new AtomicBoolean(); - final BaseRestHandler handler = new BaseRestHandler(Settings.EMPTY) { + final BaseRestHandler handler = new BaseRestHandler() { @Override protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { return channel -> executed.set(true); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index ef8cd3e5490..4515897df30 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -111,7 +111,7 @@ public class RestControllerTests extends ESTestCase { restHeaders.put("header.3", Collections.singletonList("false")); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); final RestController spyRestController = spy(restController); - when(spyRestController.getAllHandlers(fakeRequest)) + when(spyRestController.getAllHandlers(null, fakeRequest.rawPath())) .thenReturn(new Iterator() { @Override public boolean hasNext() { diff --git a/server/src/test/java/org/elasticsearch/rest/RestRequestTests.java b/server/src/test/java/org/elasticsearch/rest/RestRequestTests.java index 8a2994a6981..b0d9847c947 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestRequestTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestRequestTests.java @@ -88,9 +88,10 @@ public class RestRequestTests extends ESTestCase { final HttpRequest httpRequest = mock(HttpRequest.class); when (httpRequest.uri()).thenReturn(""); when (httpRequest.content()).thenReturn(new BytesArray(new byte[1])); + when (httpRequest.getHeaders()).thenReturn( + Collections.singletonMap("Content-Type", Collections.singletonList(randomFrom("application/json", "application/x-ndjson")))); final RestRequest request = RestRequest.request(mock(NamedXContentRegistry.class), httpRequest, mock(HttpChannel.class)); - request.setXContentType(XContentType.JSON); assertFalse(request.isContentConsumed()); try { consumer.accept(request); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionActionTests.java index 2dc6a5b7136..c6864979223 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionActionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; @@ -35,7 +34,7 @@ public class RestAddVotingConfigExclusionActionTests extends RestActionTestCase @Before public void setupAction() { - action = new RestAddVotingConfigExclusionAction(Settings.EMPTY, controller()); + action = new RestAddVotingConfigExclusionAction(controller()); } public void testResolveVotingConfigExclusionsRequest() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java index 330dee49f9f..93203b774ec 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; @@ -45,8 +44,8 @@ public class RestNodesStatsActionTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); UsageService usageService = new UsageService(); - action = new RestNodesStatsAction(Settings.EMPTY, - new RestController(Collections.emptySet(), null, null, null, usageService)); + action = new RestNodesStatsAction( + new RestController(Collections.emptySet(), null, null, null, usageService)); } public void testUnrecognizedMetric() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index 70e8903b0c3..3956178587d 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -95,7 +94,7 @@ public class RestAnalyzeActionTests extends ESTestCase { } public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() { - RestAnalyzeAction action = new RestAnalyzeAction(Settings.EMPTY, mock(RestController.class)); + RestAnalyzeAction action = new RestAnalyzeAction(mock(RestController.class)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) .withContent(new BytesArray("{invalid_json}"), XContentType.JSON).build(); IOException e = expectThrows(IOException.class, () -> action.handleRequest(request, null, null)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexActionTests.java index f37f36a8d06..7e63f36862f 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexActionTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -42,7 +41,7 @@ public class RestCreateIndexActionTests extends RestActionTestCase { @Before public void setupAction() { - action = new RestCreateIndexAction(Settings.EMPTY, controller()); + action = new RestCreateIndexAction(controller()); } public void testIncludeTypeName() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java index 915f8597aa4..fab3f2a8a7e 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java @@ -37,7 +37,7 @@ public class RestGetFieldMappingActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestGetFieldMappingAction(Settings.EMPTY, controller()); + new RestGetFieldMappingAction(controller()); } public void testIncludeTypeName() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java index 3490e8f2c88..f35425332f3 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.rest.FakeRestRequest; @@ -47,7 +46,7 @@ public class RestGetIndicesActionTests extends RestActionTestCase { .withParams(params) .build(); - RestGetIndicesAction handler = new RestGetIndicesAction(Settings.EMPTY, mock(RestController.class)); + RestGetIndicesAction handler = new RestGetIndicesAction(mock(RestController.class)); handler.prepareRequest(request, mock(NodeClient.class)); assertWarnings(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE); @@ -71,7 +70,7 @@ public class RestGetIndicesActionTests extends RestActionTestCase { .withParams(params) .build(); - RestGetIndicesAction handler = new RestGetIndicesAction(Settings.EMPTY, mock(RestController.class)); + RestGetIndicesAction handler = new RestGetIndicesAction(mock(RestController.class)); handler.prepareRequest(request, mock(NodeClient.class)); } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java index 7ce32e371de..624491b2e42 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java @@ -40,7 +40,7 @@ public class RestGetMappingActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestGetMappingAction(Settings.EMPTY, controller()); + new RestGetMappingAction(controller()); } public void testTypeExistsDeprecation() throws Exception { @@ -51,7 +51,7 @@ public class RestGetMappingActionTests extends RestActionTestCase { .withParams(params) .build(); - RestGetMappingAction handler = new RestGetMappingAction(Settings.EMPTY, mock(RestController.class)); + RestGetMappingAction handler = new RestGetMappingAction(mock(RestController.class)); handler.prepareRequest(request, mock(NodeClient.class)); assertWarnings("Type exists requests are deprecated, as types have been deprecated."); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java index 1eda721f53b..bdd3892e381 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; @@ -43,8 +42,8 @@ public class RestIndicesStatsActionTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); UsageService usageService = new UsageService(); - action = new RestIndicesStatsAction(Settings.EMPTY, - new RestController(Collections.emptySet(), null, null, null, usageService)); + action = new RestIndicesStatsAction( + new RestController(Collections.emptySet(), null, null, null, usageService)); } public void testUnrecognizedMetric() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java index 2a5e5db92d4..c81a0a53b4b 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -42,7 +41,7 @@ public class RestPutIndexTemplateActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestPutIndexTemplateAction(Settings.EMPTY, controller()); + action = new RestPutIndexTemplateAction(controller()); } public void testIncludeTypeName() throws IOException { @@ -68,7 +67,7 @@ public class RestPutIndexTemplateActionTests extends RestActionTestCase { .withPath("/_template/_some_template") .withContent(BytesReference.bytes(typedContent), XContentType.JSON) .build(); - action.prepareRequest(request, mock(NodeClient.class)); + action.prepareRequest(request, mock(NodeClient.class)); assertWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE); } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingActionTests.java index daa69c20007..8df69145c2f 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingActionTests.java @@ -37,7 +37,7 @@ public class RestPutMappingActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestPutMappingAction(Settings.EMPTY, controller()); + new RestPutMappingAction(controller()); } public void testIncludeTypeName() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java index b9da177b0b8..c989ee0d2f4 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.rest.RestController; import org.elasticsearch.test.ESTestCase; @@ -38,7 +37,7 @@ public class RestResizeHandlerTests extends ESTestCase { public void testShrinkCopySettingsDeprecated() throws IOException { final RestResizeHandler.RestShrinkIndexAction handler = - new RestResizeHandler.RestShrinkIndexAction(Settings.EMPTY, mock(RestController.class)); + new RestResizeHandler.RestShrinkIndexAction(mock(RestController.class)); for (final String copySettings : new String[]{null, "", "true", "false"}) { runTestResizeCopySettingsDeprecated(handler, "shrink", copySettings); } @@ -46,7 +45,7 @@ public class RestResizeHandlerTests extends ESTestCase { public void testSplitCopySettingsDeprecated() throws IOException { final RestResizeHandler.RestSplitIndexAction handler = - new RestResizeHandler.RestSplitIndexAction(Settings.EMPTY, mock(RestController.class)); + new RestResizeHandler.RestSplitIndexAction(mock(RestController.class)); for (final String copySettings : new String[]{null, "", "true", "false"}) { runTestResizeCopySettingsDeprecated(handler, "split", copySettings); } diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java index 0e5dada0f7c..86dfc2a86fb 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -61,7 +61,7 @@ public class RestValidateQueryActionTests extends AbstractSearchTestCase { private static UsageService usageService = new UsageService(); private static RestController controller = new RestController(emptySet(), null, client, new NoneCircuitBreakerService(), usageService); - private static RestValidateQueryAction action = new RestValidateQueryAction(Settings.EMPTY, controller); + private static RestValidateQueryAction action = new RestValidateQueryAction(controller); /** * Configures {@link NodeClient} to stub {@link ValidateQueryAction} transport action. diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java index 37dc84c126e..776129eb1d5 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java @@ -128,7 +128,7 @@ public class RestIndicesActionTests extends ESTestCase { } final RestController restController = new RestController(Collections.emptySet(), null, null, null, new UsageService()); - final RestIndicesAction action = new RestIndicesAction(Settings.EMPTY, restController); + final RestIndicesAction action = new RestIndicesAction(restController); final Table table = action.buildTable(new FakeRestRequest(), indicesSettings, indicesHealths, indicesStats, indicesMetaDatas); // now, verify the table is correct diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java index b9346e1c71a..bbdb098a42d 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -48,8 +47,8 @@ public class RestNodesActionTests extends ESTestCase { @Before public void setUpAction() { UsageService usageService = new UsageService(); - action = new RestNodesAction(Settings.EMPTY, - new RestController(Collections.emptySet(), null, null, null, usageService)); + action = new RestNodesAction( + new RestController(Collections.emptySet(), null, null, null, usageService)); } public void testBuildTableDoesNotThrowGivenNullNodeInfoAndStats() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java index 7bfa50ff2b7..30e85230a4f 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java @@ -56,7 +56,7 @@ public class RestRecoveryActionTests extends ESTestCase { final Settings settings = Settings.EMPTY; UsageService usageService = new UsageService(); final RestController restController = new RestController(Collections.emptySet(), null, null, null, usageService); - final RestCatRecoveryAction action = new RestCatRecoveryAction(settings, restController); + final RestCatRecoveryAction action = new RestCatRecoveryAction(restController); final int totalShards = randomIntBetween(1, 32); final int successfulShards = Math.max(0, totalShards - randomIntBetween(1, 2)); final int failedShards = totalShards - successfulShards; diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java index 0b485af7df2..d95af547e8d 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestDeleteActionTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.document; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.test.rest.RestActionTestCase; @@ -30,7 +29,7 @@ public class RestDeleteActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestDeleteAction(Settings.EMPTY, controller()); + new RestDeleteAction(controller()); } public void testTypeInPath() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java index 44781443ec4..494e5cf9fb2 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetActionTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.document; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; @@ -29,7 +28,7 @@ public class RestGetActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestGetAction(Settings.EMPTY, controller()); + new RestGetAction(controller()); } public void testTypeInPathWithGet() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java index 53d78c7d03e..a42cfce31b5 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest.Method; @@ -52,7 +51,7 @@ public class RestGetSourceActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestGetSourceAction(Settings.EMPTY, controller()); + new RestGetSourceAction(controller()); } @AfterClass diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java index 1f45f5265ac..2fd0ce25805 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java @@ -34,7 +34,7 @@ public class RestIndexActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestIndexAction(Settings.EMPTY, controller()); + action = new RestIndexAction(controller()); } public void testTypeInPath() { @@ -69,7 +69,7 @@ public class RestIndexActionTests extends RestActionTestCase { public void testCreateOpTypeValidation() { Settings settings = settings(Version.CURRENT).build(); - RestIndexAction.CreateHandler create = action.new CreateHandler(settings); + RestIndexAction.CreateHandler create = action.new CreateHandler(); String opType = randomFrom("CREATE", null); create.validateOpType(opType); diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java index dd98089246b..ed9756e4067 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.document; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -38,7 +37,7 @@ public class RestMultiTermVectorsActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestMultiTermVectorsAction(Settings.EMPTY, controller()); + new RestMultiTermVectorsAction(controller()); } public void testTypeInPath() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java index d93f7749f63..fbe1ace7383 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.document; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -36,7 +35,7 @@ public class RestTermVectorsActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestTermVectorsAction(Settings.EMPTY, controller()); + new RestTermVectorsAction(controller()); } public void testTypeInPath() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java index 119057a66d9..639cbcde562 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestUpdateActionTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.rest.action.document; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.RestRequest; @@ -43,7 +42,7 @@ public class RestUpdateActionTests extends RestActionTestCase { @Before public void setUpAction() { - action = new RestUpdateAction(Settings.EMPTY, controller()); + action = new RestUpdateAction(controller()); } public void testTypeInPath() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java index c6af3d12e29..26679c14340 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestCountActionTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.search; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.test.rest.RestActionTestCase; @@ -33,7 +32,7 @@ public class RestCountActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestCountAction(Settings.EMPTY, controller()); + new RestCountAction(controller()); } public void testTypeInPath() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java index 1c7f266fe99..c2f5acd1e1f 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.search; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; @@ -29,7 +28,7 @@ public class RestExplainActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestExplainAction(Settings.EMPTY, controller()); + new RestExplainAction(controller()); } public void testTypeInPath() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index 522d04b37c6..0448ed666ad 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.search; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.rest.RestActionTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -32,7 +31,7 @@ public class RestSearchActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestSearchAction(Settings.EMPTY, controller()); + new RestSearchAction(controller()); } public void testTypeInPath() { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 57020901aae..fa4bce9a4e9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -108,7 +108,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati assertThat(result, equalTo(2)); } - + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/39497") public void testReduceRandom() { super.testReduceRandom(); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java index 7feeecedd99..97e5fac24dc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java @@ -111,7 +111,7 @@ public class BucketScriptAggregatorTests extends AggregatorTestCase { indexWriter.close(); try (IndexReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + IndexSearcher indexSearcher = newIndexSearcher(indexReader); InternalFilters filters; filters = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); diff --git a/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java b/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java index 905bddbcf16..8642b264814 100644 --- a/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java +++ b/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.scroll; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -45,7 +44,7 @@ import static org.mockito.Mockito.verify; public class RestClearScrollActionTests extends ESTestCase { public void testParseClearScrollRequestWithInvalidJsonThrowsException() throws Exception { - RestClearScrollAction action = new RestClearScrollAction(Settings.EMPTY, mock(RestController.class)); + RestClearScrollAction action = new RestClearScrollAction(mock(RestController.class)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) .withContent(new BytesArray("{invalid_json}"), XContentType.JSON).build(); Exception e = expectThrows(IllegalArgumentException.class, () -> action.prepareRequest(request, null)); @@ -56,7 +55,7 @@ public class RestClearScrollActionTests extends ESTestCase { NodeClient nodeClient = mock(NodeClient.class); doNothing().when(nodeClient).searchScroll(any(), any()); - RestClearScrollAction action = new RestClearScrollAction(Settings.EMPTY, mock(RestController.class)); + RestClearScrollAction action = new RestClearScrollAction(mock(RestController.class)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) .withParams(Collections.singletonMap("scroll_id", "QUERY_STRING")) .withContent(new BytesArray("{\"scroll_id\": [\"BODY\"]}"), XContentType.JSON).build(); diff --git a/server/src/test/java/org/elasticsearch/search/scroll/RestSearchScrollActionTests.java b/server/src/test/java/org/elasticsearch/search/scroll/RestSearchScrollActionTests.java index 078eab68d04..b90b01d4b7b 100644 --- a/server/src/test/java/org/elasticsearch/search/scroll/RestSearchScrollActionTests.java +++ b/server/src/test/java/org/elasticsearch/search/scroll/RestSearchScrollActionTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.scroll; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -45,7 +44,7 @@ import static org.mockito.Mockito.verify; public class RestSearchScrollActionTests extends ESTestCase { public void testParseSearchScrollRequestWithInvalidJsonThrowsException() throws Exception { - RestSearchScrollAction action = new RestSearchScrollAction(Settings.EMPTY, mock(RestController.class)); + RestSearchScrollAction action = new RestSearchScrollAction(mock(RestController.class)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) .withContent(new BytesArray("{invalid_json}"), XContentType.JSON).build(); Exception e = expectThrows(IllegalArgumentException.class, () -> action.prepareRequest(request, null)); @@ -56,7 +55,7 @@ public class RestSearchScrollActionTests extends ESTestCase { NodeClient nodeClient = mock(NodeClient.class); doNothing().when(nodeClient).searchScroll(any(), any()); - RestSearchScrollAction action = new RestSearchScrollAction(Settings.EMPTY, mock(RestController.class)); + RestSearchScrollAction action = new RestSearchScrollAction(mock(RestController.class)); Map params = new HashMap<>(); params.put("scroll_id", "QUERY_STRING"); params.put("scroll", "1000m"); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 22c19765fed..a751ec4b065 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -775,6 +775,7 @@ public class SuggestSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) .put(IndexSettings.MAX_SHINGLE_DIFF_SETTING.getKey(), 4) + .put("index.refresh_interval", -1) // prevents occasional scoring glitches due to multi segments .put("index.analysis.analyzer.suggest.tokenizer", "standard") .putList("index.analysis.analyzer.suggest.filter", "lowercase", "shingler") .put("index.analysis.filter.shingler.type", "shingle") diff --git a/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index eaef8c6a6c9..b86627e816d 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -766,7 +766,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest ).get(); NodeClient nodeClient = internalCluster().getInstance(NodeClient.class); - RestGetRepositoriesAction getRepoAction = new RestGetRepositoriesAction(nodeSettings, mock(RestController.class), + RestGetRepositoriesAction getRepoAction = new RestGetRepositoriesAction(mock(RestController.class), internalCluster().getInstance(SettingsFilter.class)); RestRequest getRepoRequest = new FakeRestRequest(); getRepoRequest.params().put("repository", "test-repo"); @@ -789,7 +789,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest throw getRepoError.get(); } - RestClusterStateAction clusterStateAction = new RestClusterStateAction(nodeSettings, mock(RestController.class), + RestClusterStateAction clusterStateAction = new RestClusterStateAction(mock(RestController.class), internalCluster().getInstance(SettingsFilter.class)); RestRequest clusterStateRequest = new FakeRestRequest(); final CountDownLatch clusterStateLatch = new CountDownLatch(1); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index d537e8a7cf0..c47ec0e9b73 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -25,24 +25,28 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RequestValidators; +import org.elasticsearch.action.StepListener; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.TransportClusterStateAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -53,6 +57,7 @@ import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.index.IndexRequest; @@ -61,6 +66,7 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchExecutionStatsCollector; import org.elasticsearch.action.search.SearchPhaseController; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; @@ -69,6 +75,7 @@ import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.node.NodeClient; @@ -181,7 +188,6 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -244,68 +250,68 @@ public class SnapshotResiliencyTests extends ESTestCase { final int shards = randomIntBetween(1, 10); final int documents = randomIntBetween(0, 100); - TestClusterNode masterNode = + + final StepListener createRepositoryListener = new StepListener<>(); + + final TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); - final AtomicBoolean createdSnapshot = new AtomicBoolean(); - final AtomicBoolean snapshotRestored = new AtomicBoolean(); - final AtomicBoolean documentCountVerified = new AtomicBoolean(); masterNode.client.admin().cluster().preparePutRepository(repoName) .setType(FsRepository.TYPE).setSettings(Settings.builder().put("location", randomAlphaOfLength(10))) - .execute( - assertNoFailureListener( - () -> masterNode.client.admin().indices().create( - new CreateIndexRequest(index).waitForActiveShards(ActiveShardCount.ALL) - .settings(defaultIndexSettings(shards)), - assertNoFailureListener( - () -> { - final Runnable afterIndexing = () -> - masterNode.client.admin().cluster().prepareCreateSnapshot(repoName, snapshotName) - .setWaitForCompletion(true).execute(assertNoFailureListener(() -> { - createdSnapshot.set(true); - masterNode.client.admin().indices().delete( - new DeleteIndexRequest(index), - assertNoFailureListener(() -> masterNode.client.admin().cluster().restoreSnapshot( - new RestoreSnapshotRequest(repoName, snapshotName).waitForCompletion(true), - assertNoFailureListener(restoreSnapshotResponse -> { - snapshotRestored.set(true); - assertEquals(shards, restoreSnapshotResponse.getRestoreInfo().totalShards()); - masterNode.client.search( - new SearchRequest(index).source( - new SearchSourceBuilder().size(0).trackTotalHits(true) - ), - assertNoFailureListener(r -> { - assertEquals( - (long) documents, - Objects.requireNonNull(r.getHits().getTotalHits()).value - ); - documentCountVerified.set(true); - })); - }) - ))); - })); - final AtomicInteger countdown = new AtomicInteger(documents); - for (int i = 0; i < documents; ++i) { - masterNode.client.bulk( - new BulkRequest().add(new IndexRequest(index).source( - Collections.singletonMap("foo", "bar" + i))) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), - assertNoFailureListener( - bulkResponse -> { - assertFalse( - "Failures in bulkresponse: " + bulkResponse.buildFailureMessage(), - bulkResponse.hasFailures()); - if (countdown.decrementAndGet() == 0) { - afterIndexing.run(); - } - })); - } - if (documents == 0) { - afterIndexing.run(); - } - })))); + .execute(createRepositoryListener); + + final StepListener createIndexResponseStepListener = new StepListener<>(); + createRepositoryListener.whenComplete(acknowledgedResponse -> masterNode.client.admin().indices().create( + new CreateIndexRequest(index).waitForActiveShards(ActiveShardCount.ALL).settings(defaultIndexSettings(shards)), + createIndexResponseStepListener), SnapshotResiliencyTests::rethrowAssertion); + + final StepListener createSnapshotResponseListener = new StepListener<>(); + createIndexResponseStepListener.whenComplete(createIndexResponse -> { + final Runnable afterIndexing = () -> masterNode.client.admin().cluster().prepareCreateSnapshot(repoName, snapshotName) + .setWaitForCompletion(true).execute(createSnapshotResponseListener); + if (documents == 0) { + afterIndexing.run(); + } else { + final BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int i = 0; i < documents; ++i) { + bulkRequest.add(new IndexRequest(index).source(Collections.singletonMap("foo", "bar" + i))); + } + final StepListener bulkResponseStepListener = new StepListener<>(); + masterNode.client.bulk(bulkRequest, bulkResponseStepListener); + bulkResponseStepListener.whenComplete(bulkResponse -> { + assertFalse("Failures in bulk response: " + bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); + assertEquals(documents, bulkResponse.getItems().length); + afterIndexing.run(); + }, SnapshotResiliencyTests::rethrowAssertion); + } + }, SnapshotResiliencyTests::rethrowAssertion); + + final StepListener deleteIndexListener = new StepListener<>(); + + createSnapshotResponseListener.whenComplete( + createSnapshotResponse -> masterNode.client.admin().indices().delete(new DeleteIndexRequest(index), deleteIndexListener), + SnapshotResiliencyTests::rethrowAssertion); + + final StepListener restoreSnapshotResponseListener = new StepListener<>(); + deleteIndexListener.whenComplete(ignored -> masterNode.client.admin().cluster().restoreSnapshot( + new RestoreSnapshotRequest(repoName, snapshotName).waitForCompletion(true), restoreSnapshotResponseListener), + SnapshotResiliencyTests::rethrowAssertion); + + final StepListener searchResponseListener = new StepListener<>(); + restoreSnapshotResponseListener.whenComplete(restoreSnapshotResponse -> { + assertEquals(shards, restoreSnapshotResponse.getRestoreInfo().totalShards()); + masterNode.client.search( + new SearchRequest(index).source(new SearchSourceBuilder().size(0).trackTotalHits(true)), searchResponseListener); + }, SnapshotResiliencyTests::rethrowAssertion); + + final AtomicBoolean documentCountVerified = new AtomicBoolean(); + searchResponseListener.whenComplete(r -> { + assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value); + documentCountVerified.set(true); + }, SnapshotResiliencyTests::rethrowAssertion); + runUntil(documentCountVerified::get, TimeUnit.MINUTES.toMillis(5L)); - assertTrue(createdSnapshot.get()); - assertTrue(snapshotRestored.get()); + assertNotNull(createSnapshotResponseListener.result()); + assertNotNull(restoreSnapshotResponseListener.result()); assertTrue(documentCountVerified.get()); SnapshotsInProgress finalSnapshotsInProgress = masterNode.clusterService.state().custom(SnapshotsInProgress.TYPE); assertFalse(finalSnapshotsInProgress.entries().stream().anyMatch(entry -> entry.state().completed() == false)); @@ -614,6 +620,10 @@ public class SnapshotResiliencyTests extends ESTestCase { .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0).build(); } + private static void rethrowAssertion(Exception e) { + throw new AssertionError(e); + } + private static ActionListener assertNoFailureListener(Consumer consumer) { return new ActionListener() { @Override diff --git a/server/src/test/java/org/elasticsearch/update/UpdateIT.java b/server/src/test/java/org/elasticsearch/update/UpdateIT.java index 1a0df1c4a10..0bd5851e35b 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -292,7 +292,8 @@ public class UpdateIT extends ESIntegTestCase { } public void testUpdate() throws Exception { - createTestIndex(); + assertAcked(prepareCreate("test").addAlias(new Alias("alias").writeIndex(true))); + assertAcked(prepareCreate("test2").addAlias(new Alias("alias"))); ensureGreen(); Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field")); diff --git a/server/src/test/java/org/elasticsearch/usage/UsageServiceTests.java b/server/src/test/java/org/elasticsearch/usage/UsageServiceTests.java index c38030afbe9..5a38ca8ca4e 100644 --- a/server/src/test/java/org/elasticsearch/usage/UsageServiceTests.java +++ b/server/src/test/java/org/elasticsearch/usage/UsageServiceTests.java @@ -23,14 +23,12 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.usage.NodeUsage; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; -import java.io.IOException; import java.net.InetAddress; import java.util.Map; @@ -44,14 +42,13 @@ public class UsageServiceTests extends ESTestCase { public void testRestUsage() throws Exception { DiscoveryNode discoveryNode = new DiscoveryNode("foo", new TransportAddress(InetAddress.getByName("localhost"), 12345), Version.CURRENT); - Settings settings = Settings.EMPTY; RestRequest restRequest = new FakeRestRequest(); - BaseRestHandler handlerA = new MockRestHandler("a", settings); - BaseRestHandler handlerB = new MockRestHandler("b", settings); - BaseRestHandler handlerC = new MockRestHandler("c", settings); - BaseRestHandler handlerD = new MockRestHandler("d", settings); - BaseRestHandler handlerE = new MockRestHandler("e", settings); - BaseRestHandler handlerF = new MockRestHandler("f", settings); + BaseRestHandler handlerA = new MockRestHandler("a"); + BaseRestHandler handlerB = new MockRestHandler("b"); + BaseRestHandler handlerC = new MockRestHandler("c"); + BaseRestHandler handlerD = new MockRestHandler("d"); + BaseRestHandler handlerE = new MockRestHandler("e"); + BaseRestHandler handlerF = new MockRestHandler("f"); UsageService usageService = new UsageService(); usageService.addRestHandler(handlerA); usageService.addRestHandler(handlerB); @@ -94,8 +91,7 @@ public class UsageServiceTests extends ESTestCase { private String name; - protected MockRestHandler(String name, Settings settings) { - super(settings); + protected MockRestHandler(String name) { this.name = name; } @@ -105,7 +101,7 @@ public class UsageServiceTests extends ESTestCase { } @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { return channel -> { }; } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 2c54189e20c..2b5f29d192a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -115,7 +115,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -132,11 +131,13 @@ import java.util.stream.Collectors; import static java.util.Collections.emptyList; import static java.util.Collections.shuffle; +import static org.elasticsearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA; import static org.elasticsearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; public abstract class EngineTestCase extends ESTestCase { @@ -674,7 +675,8 @@ public abstract class EngineTestCase extends ESTestCase { SequenceNumbers.NO_OPS_PERFORMED, update -> {}, () -> 0L, - (leases, listener) -> listener.onResponse(new ReplicationResponse())); + (leases, listener) -> listener.onResponse(new ReplicationResponse()), + () -> SafeCommitInfo.EMPTY); globalCheckpointSupplier = replicationTracker; retentionLeasesSupplier = replicationTracker::getRetentionLeases; } else { @@ -849,14 +851,15 @@ public abstract class EngineTestCase extends ESTestCase { switch (opType) { case INDEX: operations.add(new Engine.Index(EngineTestCase.newUid(doc), doc, seqNo, primaryTerm.get(), - i, null, Engine.Operation.Origin.REPLICA, startTime, -1, true, SequenceNumbers.UNASSIGNED_SEQ_NO, 0)); + i, null, randomFrom(REPLICA, PEER_RECOVERY), startTime, -1, true, SequenceNumbers.UNASSIGNED_SEQ_NO, 0)); break; case DELETE: operations.add(new Engine.Delete(doc.type(), doc.id(), EngineTestCase.newUid(doc), seqNo, primaryTerm.get(), - i, null, Engine.Operation.Origin.REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0)); + i, null, randomFrom(REPLICA, PEER_RECOVERY), startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0)); break; case NO_OP: - operations.add(new Engine.NoOp(seqNo, primaryTerm.get(), Engine.Operation.Origin.REPLICA, startTime, "test-" + i)); + operations.add(new Engine.NoOp(seqNo, primaryTerm.get(), + randomFrom(REPLICA, PEER_RECOVERY), startTime, "test-" + i)); break; default: throw new IllegalStateException("Unknown operation type [" + opType + "]"); @@ -1055,8 +1058,7 @@ public abstract class EngineTestCase extends ESTestCase { */ public static List readAllOperationsInLucene(Engine engine, MapperService mapper) throws IOException { final List operations = new ArrayList<>(); - long maxSeqNo = Math.max(0, ((InternalEngine)engine).getLocalCheckpointTracker().getMaxSeqNo()); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapper, 0, maxSeqNo, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapper, 0, Long.MAX_VALUE, false)) { Translog.Operation op; while ((op = snapshot.next()) != null){ operations.add(op); @@ -1074,18 +1076,19 @@ public abstract class EngineTestCase extends ESTestCase { return; } final long maxSeqNo = ((InternalEngine) engine).getLocalCheckpointTracker().getMaxSeqNo(); - if (maxSeqNo < 0) { - return; // nothing to check - } - final Map translogOps = new HashMap<>(); + final List translogOps = new ArrayList<>(); try (Translog.Snapshot snapshot = EngineTestCase.getTranslog(engine).newSnapshot()) { Translog.Operation op; while ((op = snapshot.next()) != null) { - translogOps.put(op.seqNo(), op); + assertThat("translog operation [" + op + "] > max_seq_no[" + maxSeqNo + "]", op.seqNo(), lessThanOrEqualTo(maxSeqNo)); + translogOps.add(op); } } final Map luceneOps = readAllOperationsInLucene(engine, mapper).stream() .collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity())); + for (Translog.Operation op : luceneOps.values()) { + assertThat("lucene operation [" + op + "] > max_seq_no[" + maxSeqNo + "]", op.seqNo(), lessThanOrEqualTo(maxSeqNo)); + } final long globalCheckpoint = EngineTestCase.getTranslog(engine).getLastSyncedGlobalCheckpoint(); final long retainedOps = engine.config().getIndexSettings().getSoftDeleteRetentionOperations(); final long seqNoForRecovery; @@ -1093,10 +1096,10 @@ public abstract class EngineTestCase extends ESTestCase { seqNoForRecovery = Long.parseLong(safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; } final long minSeqNoToRetain = Math.min(seqNoForRecovery, globalCheckpoint + 1 - retainedOps); - for (Translog.Operation translogOp : translogOps.values()) { + for (Translog.Operation translogOp : translogOps) { final Translog.Operation luceneOp = luceneOps.get(translogOp.seqNo()); if (luceneOp == null) { - if (minSeqNoToRetain <= translogOp.seqNo() && translogOp.seqNo() <= maxSeqNo) { + if (minSeqNoToRetain <= translogOp.seqNo()) { fail("Operation not found seq# [" + translogOp.seqNo() + "], global checkpoint [" + globalCheckpoint + "], " + "retention policy [" + retainedOps + "], maxSeqNo [" + maxSeqNo + "], translog op [" + translogOp + "]"); } else { diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 4e505ad13b9..f8e1ca41459 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -814,7 +814,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase /** * indexes the given requests on the supplied primary, modifying it for replicas */ - BulkShardRequest indexOnPrimary(IndexRequest request, IndexShard primary) throws Exception { + public BulkShardRequest indexOnPrimary(IndexRequest request, IndexShard primary) throws Exception { return executeReplicationRequestOnPrimary(primary, request); } @@ -828,7 +828,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase /** * indexes the given requests on the supplied replica shard */ - void indexOnReplica(BulkShardRequest request, ReplicationGroup group, IndexShard replica) throws Exception { + public void indexOnReplica(BulkShardRequest request, ReplicationGroup group, IndexShard replica) throws Exception { indexOnReplica(request, group, replica, group.primary.getPendingPrimaryTerm()); } diff --git a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AsyncRecoveryTarget.java b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AsyncRecoveryTarget.java index 3e1ae3aa2ed..1031a4bb7fb 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AsyncRecoveryTarget.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AsyncRecoveryTarget.java @@ -50,8 +50,8 @@ public class AsyncRecoveryTarget implements RecoveryTargetHandler { } @Override - public void finalizeRecovery(long globalCheckpoint, ActionListener listener) { - executor.execute(() -> target.finalizeRecovery(globalCheckpoint, listener)); + public void finalizeRecovery(long globalCheckpoint, long trimAboveSeqNo, ActionListener listener) { + executor.execute(() -> target.finalizeRecovery(globalCheckpoint, trimAboveSeqNo, listener)); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index e31096050b3..696fc350175 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -243,6 +243,9 @@ public abstract class ESTestCase extends LuceneTestCase { // Enable Netty leak detection and monitor logger for logged leak errors System.setProperty("io.netty.leakDetection.level", "paranoid"); + + // Disable direct buffer pooling + System.setProperty("io.netty.allocator.numDirectArenas", "0"); } protected final Logger logger = LogManager.getLogger(getClass()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java index fdb623d1d1e..246dac18ef8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.plugins.Plugin; import java.util.Arrays; @@ -51,6 +52,7 @@ public final class InternalSettingsPlugin extends Plugin { TRANSLOG_RETENTION_CHECK_INTERVAL_SETTING, IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING, IndexService.RETENTION_LEASE_SYNC_INTERVAL_SETTING, + IndexSettings.FILE_BASED_RECOVERY_THRESHOLD_SETTING, IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING ); } diff --git a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle index cc4b48ed6a6..a44a18f858c 100644 --- a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle +++ b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle @@ -38,8 +38,8 @@ task writeJavaPolicy { task "follow-cluster"(type: RestIntegTestTask) { dependsOn 'writeJavaPolicy', "leader-cluster" - useCluster testClusters."leader-cluster" runner { + useCluster testClusters."leader-cluster" systemProperty 'java.security.policy', "file://${policyFile}" systemProperty 'tests.target_cluster', 'follow' nonInputProperties.systemProperty 'tests.leader_host', "${-> testClusters."leader-cluster".getAllHttpSocketURI().get(0)}" diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index 2ac624195ca..20e86c1d81f 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -24,8 +24,8 @@ testClusters."leader-cluster" { task "middle-cluster"(type: RestIntegTestTask) { dependsOn "leader-cluster" - useCluster testClusters."leader-cluster" runner { + useCluster testClusters."leader-cluster" systemProperty 'tests.target_cluster', 'middle' nonInputProperties.systemProperty 'tests.leader_host', "${-> testClusters."leader-cluster".getAllHttpSocketURI().get(0)}" @@ -40,9 +40,9 @@ testClusters."middle-cluster" { task 'follow-cluster'(type: RestIntegTestTask) { dependsOn "leader-cluster", "middle-cluster" - useCluster testClusters."leader-cluster" - useCluster testClusters."middle-cluster" runner { + useCluster testClusters."leader-cluster" + useCluster testClusters."middle-cluster" systemProperty 'tests.target_cluster', 'follow' nonInputProperties.systemProperty 'tests.leader_host', "${-> testClusters."leader-cluster".getAllHttpSocketURI().get(0)}" diff --git a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle index dc3d5e8a935..77d0a61c976 100644 --- a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle +++ b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle @@ -21,8 +21,8 @@ testClusters.'leader-cluster' { task 'follow-cluster'(type: RestIntegTestTask) { dependsOn 'leader-cluster' - useCluster testClusters.'leader-cluster' runner { + useCluster testClusters.'leader-cluster' systemProperty 'tests.target_cluster', 'follow' nonInputProperties.systemProperty 'tests.leader_host', { "${testClusters.'follow-cluster'.getAllHttpSocketURI().get(0)}" } diff --git a/x-pack/plugin/ccr/qa/restart/build.gradle b/x-pack/plugin/ccr/qa/restart/build.gradle index 564b5f87e0b..9dd401d0093 100644 --- a/x-pack/plugin/ccr/qa/restart/build.gradle +++ b/x-pack/plugin/ccr/qa/restart/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.RestTestRunnerTask apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-test' @@ -20,8 +21,8 @@ testClusters.'leader-cluster' { task 'follow-cluster'(type: RestIntegTestTask) { dependsOn 'leader-cluster' - useCluster testClusters.'leader-cluster' runner { + useCluster testClusters.'leader-cluster' systemProperty 'tests.target_cluster', 'follow' nonInputProperties.systemProperty 'tests.leader_host', "${-> testClusters.'leader-cluster'.getAllHttpSocketURI().get(0)}" @@ -36,12 +37,11 @@ testClusters.'follow-cluster' { nameCustomization = { 'follow' } } -task followClusterRestartTest(type: Test) { +task followClusterRestartTest(type: RestTestRunnerTask) { dependsOn tasks.'follow-cluster' useCluster testClusters.'leader-cluster' useCluster testClusters.'follow-cluster' - maxParallelForks = 1 systemProperty 'tests.rest.load_packaged', 'false' systemProperty 'tests.target_cluster', 'follow-restart' doFirst { @@ -49,7 +49,6 @@ task followClusterRestartTest(type: Test) { nonInputProperties.systemProperty 'tests.leader_host', "${-> testClusters.'leader-cluster'.getAllHttpSocketURI().get(0)}" nonInputProperties.systemProperty 'tests.rest.cluster', "${-> testClusters.'follow-cluster'.getAllHttpSocketURI().join(",")}" } - outputs.doNotCacheIf "Caching of REST tests not implemented yet", { false } } check.dependsOn followClusterRestartTest diff --git a/x-pack/plugin/ccr/qa/security/build.gradle b/x-pack/plugin/ccr/qa/security/build.gradle index 03859867553..a2f1235f590 100644 --- a/x-pack/plugin/ccr/qa/security/build.gradle +++ b/x-pack/plugin/ccr/qa/security/build.gradle @@ -28,8 +28,8 @@ testClusters.'leader-cluster' { task 'follow-cluster'(type: RestIntegTestTask) { dependsOn 'leader-cluster' - useCluster testClusters.'leader-cluster' runner { + useCluster testClusters.'leader-cluster' systemProperty 'tests.target_cluster', 'follow' nonInputProperties.systemProperty 'tests.leader_host', "${-> testClusters.'leader-cluster'.getAllHttpSocketURI().get(0)}" } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index b736200a57f..cff4338386e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -250,20 +250,20 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E return Arrays.asList( // stats API - new RestFollowStatsAction(settings, restController), - new RestCcrStatsAction(settings, restController), - new RestFollowInfoAction(settings, restController), + new RestFollowStatsAction(restController), + new RestCcrStatsAction(restController), + new RestFollowInfoAction(restController), // follow APIs - new RestPutFollowAction(settings, restController), - new RestResumeFollowAction(settings, restController), - new RestPauseFollowAction(settings, restController), - new RestUnfollowAction(settings, restController), + new RestPutFollowAction(restController), + new RestResumeFollowAction(restController), + new RestPauseFollowAction(restController), + new RestUnfollowAction(restController), // auto-follow APIs - new RestDeleteAutoFollowPatternAction(settings, restController), - new RestPutAutoFollowPatternAction(settings, restController), - new RestGetAutoFollowPatternAction(settings, restController), + new RestDeleteAutoFollowPatternAction(restController), + new RestPutAutoFollowPatternAction(restController), + new RestGetAutoFollowPatternAction(restController), // forget follower API - new RestForgetFollowerAction(settings, restController)); + new RestForgetFollowerAction(restController)); } public List getNamedWriteables() { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java index 2407344b04d..08353212c2b 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestCcrStatsAction.java @@ -7,19 +7,15 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; -import java.io.IOException; - public class RestCcrStatsAction extends BaseRestHandler { - public RestCcrStatsAction(final Settings settings, final RestController controller) { - super(settings); + public RestCcrStatsAction(final RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_ccr/stats", this); } @@ -29,7 +25,7 @@ public class RestCcrStatsAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) { final CcrStatsAction.Request request = new CcrStatsAction.Request(); return channel -> client.execute(CcrStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java index 9bfd260ce63..ae63d2004a6 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java @@ -6,21 +6,17 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction.Request; -import java.io.IOException; - import static org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction.INSTANCE; public class RestDeleteAutoFollowPatternAction extends BaseRestHandler { - public RestDeleteAutoFollowPatternAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteAutoFollowPatternAction(RestController controller) { controller.registerHandler(RestRequest.Method.DELETE, "/_ccr/auto_follow/{name}", this); } @@ -30,7 +26,7 @@ public class RestDeleteAutoFollowPatternAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { Request request = new Request(restRequest.param("name")); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java index f2e256bf5f8..86aba9d31a3 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowInfoAction.java @@ -8,19 +8,15 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ccr.action.FollowInfoAction; -import java.io.IOException; - public class RestFollowInfoAction extends BaseRestHandler { - public RestFollowInfoAction(final Settings settings, final RestController controller) { - super(settings); + public RestFollowInfoAction(final RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/{index}/_ccr/info", this); } @@ -30,7 +26,7 @@ public class RestFollowInfoAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) { final FollowInfoAction.Request request = new FollowInfoAction.Request(); request.setFollowerIndices(Strings.splitStringByCommaToArray(restRequest.param("index"))); return channel -> client.execute(FollowInfoAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java index 8da8b66d8c2..31c2f5daac8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestFollowStatsAction.java @@ -8,19 +8,15 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; -import java.io.IOException; - public class RestFollowStatsAction extends BaseRestHandler { - public RestFollowStatsAction(final Settings settings, final RestController controller) { - super(settings); + public RestFollowStatsAction(final RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/{index}/_ccr/stats", this); } @@ -30,7 +26,7 @@ public class RestFollowStatsAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) { final FollowStatsAction.StatsRequest request = new FollowStatsAction.StatsRequest(); request.setIndices(Strings.splitStringByCommaToArray(restRequest.param("index"))); return channel -> client.execute(FollowStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestForgetFollowerAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestForgetFollowerAction.java index dc39aea372d..a79ec34a652 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestForgetFollowerAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestForgetFollowerAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -22,8 +21,7 @@ import java.util.Objects; public class RestForgetFollowerAction extends BaseRestHandler { - public RestForgetFollowerAction(final Settings settings, final RestController restController) { - super(Objects.requireNonNull(settings)); + public RestForgetFollowerAction(final RestController restController) { Objects.requireNonNull(restController); restController.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/forget_follower", this); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java index 40858f7f326..0a7393dc8fc 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java @@ -6,21 +6,17 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction.Request; -import java.io.IOException; - import static org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction.INSTANCE; public class RestGetAutoFollowPatternAction extends BaseRestHandler { - public RestGetAutoFollowPatternAction(Settings settings, RestController controller) { - super(settings); + public RestGetAutoFollowPatternAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow/{name}", this); controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow", this); } @@ -31,7 +27,7 @@ public class RestGetAutoFollowPatternAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { Request request = new Request(); request.setName(restRequest.param("name")); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java index c7be6382fa7..ef80fe1d365 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java @@ -6,21 +6,17 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import java.io.IOException; - import static org.elasticsearch.xpack.core.ccr.action.PauseFollowAction.INSTANCE; import static org.elasticsearch.xpack.core.ccr.action.PauseFollowAction.Request; public class RestPauseFollowAction extends BaseRestHandler { - public RestPauseFollowAction(Settings settings, RestController controller) { - super(settings); + public RestPauseFollowAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/pause_follow", this); } @@ -30,7 +26,7 @@ public class RestPauseFollowAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { Request request = new Request(restRequest.param("index")); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java index 957312ff78d..076c3cfb18e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -20,8 +19,7 @@ import static org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction public class RestPutAutoFollowPatternAction extends BaseRestHandler { - public RestPutAutoFollowPatternAction(Settings settings, RestController controller) { - super(settings); + public RestPutAutoFollowPatternAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/_ccr/auto_follow/{name}", this); } @@ -36,7 +34,7 @@ public class RestPutAutoFollowPatternAction extends BaseRestHandler { return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } - static Request createRequest(RestRequest restRequest) throws IOException { + private static Request createRequest(RestRequest restRequest) throws IOException { try (XContentParser parser = restRequest.contentOrSourceParamParser()) { return Request.fromXContent(parser, restRequest.param("name")); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java index d7a2edd21d2..8ad2002a0c4 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutFollowAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -21,8 +20,7 @@ import static org.elasticsearch.xpack.core.ccr.action.PutFollowAction.Request; public class RestPutFollowAction extends BaseRestHandler { - public RestPutFollowAction(Settings settings, RestController controller) { - super(settings); + public RestPutFollowAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/{index}/_ccr/follow", this); } @@ -37,7 +35,7 @@ public class RestPutFollowAction extends BaseRestHandler { return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } - static Request createRequest(RestRequest restRequest) throws IOException { + private static Request createRequest(RestRequest restRequest) throws IOException { try (XContentParser parser = restRequest.contentOrSourceParamParser()) { ActiveShardCount waitForActiveShards = ActiveShardCount.parseString(restRequest.param("wait_for_active_shards")); return Request.fromXContent(parser, restRequest.param("index"), waitForActiveShards); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java index ce2eab52e0c..29dcd029e99 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -20,8 +19,7 @@ import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request public class RestResumeFollowAction extends BaseRestHandler { - public RestResumeFollowAction(Settings settings, RestController controller) { - super(settings); + public RestResumeFollowAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/resume_follow", this); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java index 127d06eb751..99a7ddac0da 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestUnfollowAction.java @@ -7,21 +7,17 @@ package org.elasticsearch.xpack.ccr.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; -import java.io.IOException; - import static org.elasticsearch.xpack.core.ccr.action.UnfollowAction.INSTANCE; public class RestUnfollowAction extends BaseRestHandler { - public RestUnfollowAction(Settings settings, RestController controller) { - super(settings); + public RestUnfollowAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/{index}/_ccr/unfollow", this); } @@ -31,7 +27,7 @@ public class RestUnfollowAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { UnfollowAction.Request request = new UnfollowAction.Request(restRequest.param("index")); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/Licensing.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/Licensing.java index 6fe3282e72b..3ff79251caf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/Licensing.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/Licensing.java @@ -74,13 +74,13 @@ public class Licensing implements ActionPlugin { IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { List handlers = new ArrayList<>(); - handlers.add(new RestGetLicenseAction(settings, restController)); - handlers.add(new RestPutLicenseAction(settings, restController)); - handlers.add(new RestDeleteLicenseAction(settings, restController)); - handlers.add(new RestGetTrialStatus(settings, restController)); - handlers.add(new RestGetBasicStatus(settings, restController)); - handlers.add(new RestPostStartTrialLicense(settings, restController)); - handlers.add(new RestPostStartBasicLicense(settings, restController)); + handlers.add(new RestGetLicenseAction(restController)); + handlers.add(new RestPutLicenseAction(restController)); + handlers.add(new RestDeleteLicenseAction(restController)); + handlers.add(new RestGetTrialStatus(restController)); + handlers.add(new RestGetBasicStatus(restController)); + handlers.add(new RestPostStartTrialLicense(restController)); + handlers.add(new RestPostStartBasicLicense(restController)); return handlers; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java index 5383726adc3..6dbbea2dbaa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -24,8 +23,7 @@ public class RestDeleteLicenseAction extends XPackRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteLicenseAction.class)); - RestDeleteLicenseAction(Settings settings, RestController controller) { - super(settings); + RestDeleteLicenseAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, "/_license", this, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java index 0195b350b05..74355d83a28 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetBasicStatus.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -21,8 +20,7 @@ public class RestGetBasicStatus extends XPackRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetBasicStatus.class)); - RestGetBasicStatus(Settings settings, RestController controller) { - super(settings); + RestGetBasicStatus(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/_license/basic_status", this, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java index 02809ae974c..89edeceabb9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; @@ -32,8 +31,7 @@ public class RestGetLicenseAction extends XPackRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetLicenseAction.class)); - RestGetLicenseAction(Settings settings, RestController controller) { - super(settings); + RestGetLicenseAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/_license", this, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java index 20366328e50..2215ab7f8bf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetTrialStatus.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -21,8 +20,7 @@ public class RestGetTrialStatus extends XPackRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetTrialStatus.class)); - RestGetTrialStatus(Settings settings, RestController controller) { - super(settings); + RestGetTrialStatus(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/_license/trial_status", this, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java index 79e8849669c..18a9b7cdfc7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; @@ -23,8 +22,7 @@ public class RestPostStartBasicLicense extends XPackRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPostStartBasicLicense.class)); - RestPostStartBasicLicense(Settings settings, RestController controller) { - super(settings); + RestPostStartBasicLicense(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, "/_license/start_basic", this, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java index a263d0d82c2..b65e176f931 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartTrialLicense.java @@ -8,7 +8,6 @@ package org.elasticsearch.license; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -27,8 +26,7 @@ public class RestPostStartTrialLicense extends XPackRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPostStartTrialLicense.class)); - RestPostStartTrialLicense(Settings settings, RestController controller) { - super(settings); + RestPostStartTrialLicense(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, "/_license/start_trial", this, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java index 986dacb6877..0f6bbba3aab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java @@ -8,15 +8,12 @@ package org.elasticsearch.license; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.XPackClient; import org.elasticsearch.xpack.core.rest.XPackRestHandler; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; @@ -24,8 +21,7 @@ public class RestPutLicenseAction extends XPackRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutLicenseAction.class)); - RestPutLicenseAction(Settings settings, RestController controller) { - super(settings); + RestPutLicenseAction(RestController controller) { // TODO: remove POST endpoint? // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( @@ -43,7 +39,7 @@ public class RestPutLicenseAction extends XPackRestHandler { } @Override - public RestChannelConsumer doPrepareRequest(final RestRequest request, final XPackClient client) throws IOException { + public RestChannelConsumer doPrepareRequest(final RestRequest request, final XPackClient client) { if (request.hasContent() == false) { throw new IllegalArgumentException("The license must be provided in the request body"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 287e511d64c..6711bd96cb7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -147,6 +147,7 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsTaskState; import org.elasticsearch.xpack.core.ml.dataframe.analyses.DataFrameAnalysis; import org.elasticsearch.xpack.core.ml.dataframe.analyses.OutlierDetection; +import org.elasticsearch.xpack.core.ml.dataframe.analyses.Regression; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.Evaluation; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.AucRoc; @@ -199,8 +200,8 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl. import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExceptExpression; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.RoleMapperExpression; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import org.elasticsearch.xpack.core.security.transport.netty4.SecurityNetty4Transport; import org.elasticsearch.xpack.core.slm.SnapshotLifecycleMetadata; import org.elasticsearch.xpack.core.slm.action.DeleteSnapshotLifecycleAction; @@ -454,6 +455,7 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl MachineLearningFeatureSetUsage::new), // ML - Data frame analytics new NamedWriteableRegistry.Entry(DataFrameAnalysis.class, OutlierDetection.NAME.getPreferredName(), OutlierDetection::new), + new NamedWriteableRegistry.Entry(DataFrameAnalysis.class, Regression.NAME.getPreferredName(), Regression::new), // ML - Data frame evaluation new NamedWriteableRegistry.Entry(Evaluation.class, BinarySoftClassification.NAME.getPreferredName(), BinarySoftClassification::new), @@ -474,9 +476,9 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl new NamedWriteableRegistry.Entry(NamedDiff.class, TokenMetaData.TYPE, TokenMetaData::readDiffFrom), new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.SECURITY, SecurityFeatureSetUsage::new), // security : conditional privileges - new NamedWriteableRegistry.Entry(ConditionalClusterPrivilege.class, - ConditionalClusterPrivileges.ManageApplicationPrivileges.WRITEABLE_NAME, - ConditionalClusterPrivileges.ManageApplicationPrivileges::createFrom), + new NamedWriteableRegistry.Entry(ConfigurableClusterPrivilege.class, + ConfigurableClusterPrivileges.ManageApplicationPrivileges.WRITEABLE_NAME, + ConfigurableClusterPrivileges.ManageApplicationPrivileges::createFrom), // security : role-mappings new NamedWriteableRegistry.Entry(RoleMapperExpression.class, AllExpression.NAME, AllExpression::new), new NamedWriteableRegistry.Entry(RoleMapperExpression.class, AnyExpression.NAME, AnyExpression::new), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 3898ee999cb..6b88dd95b3b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -293,9 +293,9 @@ public class XPackPlugin extends XPackClientPlugin implements ExtensiblePlugin, IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { List handlers = new ArrayList<>(); - handlers.add(new RestXPackInfoAction(settings, restController)); - handlers.add(new RestXPackUsageAction(settings, restController)); - handlers.add(new RestReloadAnalyzersAction(settings, restController)); + handlers.add(new RestXPackInfoAction(restController)); + handlers.add(new RestXPackUsageAction(restController)); + handlers.add(new RestReloadAnalyzersAction(restController)); handlers.addAll(licensing.getRestHandlers(settings, restController, clusterSettings, indexScopedSettings, settingsFilter, indexNameExpressionResolver, nodesInCluster)); return handlers; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformCheckpointStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformCheckpointStats.java index 1734bca7fcc..5a19bfd3000 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformCheckpointStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformCheckpointStats.java @@ -11,12 +11,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.core.indexing.IndexerState; import java.io.IOException; import java.util.Objects; @@ -30,10 +28,9 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona */ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentObject { - public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, null, 0L, 0L); + public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, 0L, 0L); private final long checkpoint; - private final IndexerState indexerState; private final DataFrameIndexerPosition position; private final DataFrameTransformProgress checkpointProgress; private final long timestampMillis; @@ -42,30 +39,26 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( "data_frame_transform_checkpoint_stats", true, args -> { long checkpoint = args[0] == null ? 0L : (Long) args[0]; - IndexerState indexerState = (IndexerState) args[1]; - DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[2]; - DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[3]; - long timestamp = args[4] == null ? 0L : (Long) args[4]; - long timeUpperBound = args[5] == null ? 0L : (Long) args[5]; + DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[1]; + DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[2]; + long timestamp = args[3] == null ? 0L : (Long) args[3]; + long timeUpperBound = args[4] == null ? 0L : (Long) args[4]; - return new DataFrameTransformCheckpointStats(checkpoint, indexerState, position, checkpointProgress, timestamp, timeUpperBound); + return new DataFrameTransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound); }); static { LENIENT_PARSER.declareLong(optionalConstructorArg(), DataFrameField.CHECKPOINT); - LENIENT_PARSER.declareField(optionalConstructorArg(), p -> IndexerState.fromString(p.text()), DataFrameField.INDEXER_STATE, - ObjectParser.ValueType.STRING); LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameIndexerPosition.PARSER, DataFrameField.POSITION); LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameTransformProgress.PARSER, DataFrameField.CHECKPOINT_PROGRESS); LENIENT_PARSER.declareLong(optionalConstructorArg(), DataFrameField.TIMESTAMP_MILLIS); LENIENT_PARSER.declareLong(optionalConstructorArg(), DataFrameField.TIME_UPPER_BOUND_MILLIS); } - public DataFrameTransformCheckpointStats(final long checkpoint, final IndexerState indexerState, - final DataFrameIndexerPosition position, final DataFrameTransformProgress checkpointProgress, - final long timestampMillis, final long timeUpperBoundMillis) { + public DataFrameTransformCheckpointStats(final long checkpoint, final DataFrameIndexerPosition position, + final DataFrameTransformProgress checkpointProgress, final long timestampMillis, + final long timeUpperBoundMillis) { this.checkpoint = checkpoint; - this.indexerState = indexerState; this.position = position; this.checkpointProgress = checkpointProgress; this.timestampMillis = timestampMillis; @@ -75,11 +68,6 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO public DataFrameTransformCheckpointStats(StreamInput in) throws IOException { if (in.getVersion().onOrAfter(Version.V_7_4_0)) { this.checkpoint = in.readVLong(); - if (in.readBoolean()) { - this.indexerState = in.readEnum(IndexerState.class); - } else { - this.indexerState = null; - } if (in.readBoolean()) { this.position = new DataFrameIndexerPosition(in); } else { @@ -92,7 +80,6 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO } } else { this.checkpoint = 0; - this.indexerState = null; this.position = null; this.checkpointProgress = null; } @@ -104,10 +91,6 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO return checkpoint; } - public IndexerState getIndexerState() { - return indexerState; - } - public DataFrameIndexerPosition getPosition() { return position; } @@ -128,9 +111,6 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(DataFrameField.CHECKPOINT.getPreferredName(), checkpoint); - if (indexerState != null) { - builder.field(DataFrameField.INDEXER_STATE.getPreferredName(), indexerState.value()); - } if (position != null) { builder.field(DataFrameField.POSITION.getPreferredName(), position); } @@ -153,12 +133,6 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_7_4_0)) { out.writeVLong(checkpoint); - if (indexerState != null) { - out.writeBoolean(true); - out.writeEnum(indexerState); - } else { - out.writeBoolean(false); - } if (position != null) { out.writeBoolean(true); position.writeTo(out); @@ -178,7 +152,7 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO @Override public int hashCode() { - return Objects.hash(checkpoint, indexerState, position, checkpointProgress, timestampMillis, timeUpperBoundMillis); + return Objects.hash(checkpoint, position, checkpointProgress, timestampMillis, timeUpperBoundMillis); } @Override @@ -194,7 +168,6 @@ public class DataFrameTransformCheckpointStats implements Writeable, ToXContentO DataFrameTransformCheckpointStats that = (DataFrameTransformCheckpointStats) other; return this.checkpoint == that.checkpoint - && Objects.equals(this.indexerState, that.indexerState) && Objects.equals(this.position, that.position) && Objects.equals(this.checkpointProgress, that.checkpointProgress) && this.timestampMillis == that.timestampMillis diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfig.java index 77ebae93df1..fe31eaffbef 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfig.java @@ -447,6 +447,11 @@ public class DataFrameTransformConfig extends AbstractDiffable new DataFrameTransformStats((String) a[0], - (DataFrameTransformTaskState) a[1], + (State) a[1], (String) a[2], (NodeAttributes) a[3], (DataFrameIndexerTransformStats) a[4], @@ -60,7 +63,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { static { PARSER.declareString(constructorArg(), DataFrameField.ID); - PARSER.declareField(constructorArg(), p -> DataFrameTransformTaskState.fromString(p.text()), TASK_STATE_FIELD, + PARSER.declareField(constructorArg(), p -> DataFrameTransformStats.State.fromString(p.text()), STATE_FIELD, ObjectParser.ValueType.STRING); PARSER.declareString(optionalConstructorArg(), REASON_FIELD); PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE_FIELD, ObjectParser.ValueType.OBJECT); @@ -80,7 +83,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { public static DataFrameTransformStats stoppedStats(String id, DataFrameIndexerTransformStats indexerTransformStats) { return new DataFrameTransformStats(id, - DataFrameTransformTaskState.STOPPED, + State.STOPPED, null, null, indexerTransformStats, @@ -88,11 +91,11 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { } - public DataFrameTransformStats(String id, DataFrameTransformTaskState taskState, @Nullable String reason, + public DataFrameTransformStats(String id, State state, @Nullable String reason, @Nullable NodeAttributes node, DataFrameIndexerTransformStats stats, DataFrameTransformCheckpointingInfo checkpointingInfo) { this.id = Objects.requireNonNull(id); - this.taskState = Objects.requireNonNull(taskState); + this.state = Objects.requireNonNull(state); this.reason = reason; this.node = node; this.indexerStats = Objects.requireNonNull(stats); @@ -102,7 +105,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { public DataFrameTransformStats(StreamInput in) throws IOException { if (in.getVersion().onOrAfter(Version.V_7_4_0)) { this.id = in.readString(); - this.taskState = in.readEnum(DataFrameTransformTaskState.class); + this.state = in.readEnum(State.class); this.reason = in.readOptionalString(); if (in.readBoolean()) { this.node = new NodeAttributes(in); @@ -117,9 +120,9 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { // to do the best we can of reading from a DataFrameTransformStoredDoc object // (which is called DataFrameTransformStateAndStats in 7.2/7.3) this.id = in.readString(); - DataFrameTransformState state = new DataFrameTransformState(in); - this.taskState = state.getTaskState(); - this.reason = state.getReason(); + DataFrameTransformState transformState = new DataFrameTransformState(in); + this.state = State.fromComponents(transformState.getTaskState(), transformState.getIndexerState()); + this.reason = transformState.getReason(); this.node = null; this.indexerStats = new DataFrameIndexerTransformStats(in); this.checkpointingInfo = new DataFrameTransformCheckpointingInfo(in); @@ -130,7 +133,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(DataFrameField.ID.getPreferredName(), id); - builder.field(TASK_STATE_FIELD.getPreferredName(), taskState.value()); + builder.field(STATE_FIELD.getPreferredName(), state.value()); if (reason != null) { builder.field(REASON_FIELD.getPreferredName(), reason); } @@ -147,7 +150,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_7_4_0)) { out.writeString(id); - out.writeEnum(taskState); + out.writeEnum(state); out.writeOptionalString(reason); if (node != null) { out.writeBoolean(true); @@ -162,8 +165,9 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { // to do the best we can of writing to a DataFrameTransformStoredDoc object // (which is called DataFrameTransformStateAndStats in 7.2/7.3) out.writeString(id); - new DataFrameTransformState(taskState, - checkpointingInfo.getNext().getIndexerState(), + Tuple stateComponents = state.toComponents(); + new DataFrameTransformState(stateComponents.v1(), + stateComponents.v2(), checkpointingInfo.getNext().getPosition(), checkpointingInfo.getLast().getCheckpoint(), reason, @@ -176,7 +180,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { @Override public int hashCode() { - return Objects.hash(id, taskState, reason, node, indexerStats, checkpointingInfo); + return Objects.hash(id, state, reason, node, indexerStats, checkpointingInfo); } @Override @@ -192,7 +196,7 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { DataFrameTransformStats that = (DataFrameTransformStats) other; return Objects.equals(this.id, that.id) - && Objects.equals(this.taskState, that.taskState) + && Objects.equals(this.state, that.state) && Objects.equals(this.reason, that.reason) && Objects.equals(this.node, that.node) && Objects.equals(this.indexerStats, that.indexerStats) @@ -203,8 +207,8 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { return id; } - public DataFrameTransformTaskState getTaskState() { - return taskState; + public State getState() { + return state; } @Nullable @@ -233,4 +237,79 @@ public class DataFrameTransformStats implements Writeable, ToXContentObject { public String toString() { return Strings.toString(this); } + + public enum State implements Writeable { + + STARTED, INDEXING, ABORTING, STOPPING, STOPPED, FAILED; + + public static State fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } + + public static State fromStream(StreamInput in) throws IOException { + return in.readEnum(State.class); + } + + public static State fromComponents(DataFrameTransformTaskState taskState, IndexerState indexerState) { + + if (taskState == null || taskState == DataFrameTransformTaskState.STOPPED) { + return STOPPED; + } else if (taskState == DataFrameTransformTaskState.FAILED) { + return FAILED; + } else { + + // If we get here then the task state must be started, and that means we should have an indexer state + assert(taskState == DataFrameTransformTaskState.STARTED); + assert(indexerState != null); + + switch (indexerState) { + case STARTED: + return STARTED; + case INDEXING: + return INDEXING; + case STOPPING: + return STOPPING; + case STOPPED: + return STOPPED; + case ABORTING: + return ABORTING; + default: + throw new IllegalStateException("Unexpected indexer state enum value: " + indexerState); + } + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(this); + } + + public String value() { + return name().toLowerCase(Locale.ROOT); + } + + public Tuple toComponents() { + + switch (this) { + case STARTED: + return new Tuple<>(DataFrameTransformTaskState.STARTED, IndexerState.STARTED); + case INDEXING: + return new Tuple<>(DataFrameTransformTaskState.STARTED, IndexerState.INDEXING); + case ABORTING: + return new Tuple<>(DataFrameTransformTaskState.STARTED, IndexerState.ABORTING); + case STOPPING: + return new Tuple<>(DataFrameTransformTaskState.STARTED, IndexerState.STOPPING); + case STOPPED: + // This one is not deterministic, because an overall state of STOPPED could arise + // from either (STOPPED, null) or (STARTED, STOPPED). However, (STARTED, STOPPED) + // is a very short-lived state so it's reasonable to assume the other, especially + // as this method is only for mixed version cluster compatibility. + return new Tuple<>(DataFrameTransformTaskState.STOPPED, null); + case FAILED: + return new Tuple<>(DataFrameTransformTaskState.FAILED, null); + default: + throw new IllegalStateException("Unexpected state enum value: " + this); + } + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/DataFrameAnalysis.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/DataFrameAnalysis.java index f21533d9176..47d0f96194a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/DataFrameAnalysis.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/DataFrameAnalysis.java @@ -9,8 +9,22 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.xcontent.ToXContentObject; import java.util.Map; +import java.util.Set; public interface DataFrameAnalysis extends ToXContentObject, NamedWriteable { + /** + * @return The analysis parameters as a map + */ Map getParams(); + + /** + * @return {@code true} if this analysis supports fields with categorical values (i.e. text, keyword, ip) + */ + boolean supportsCategoricalFields(); + + /** + * @return The set of fields that analyzed documents must have for the analysis to operate + */ + Set getRequiredFields(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/MlDataFrameAnalysisNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/MlDataFrameAnalysisNamedXContentProvider.java index a48a23e4a83..e33a7748592 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/MlDataFrameAnalysisNamedXContentProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/MlDataFrameAnalysisNamedXContentProvider.java @@ -22,6 +22,10 @@ public class MlDataFrameAnalysisNamedXContentProvider implements NamedXContentPr boolean ignoreUnknownFields = (boolean) c; return OutlierDetection.fromXContent(p, ignoreUnknownFields); })); + namedXContent.add(new NamedXContentRegistry.Entry(DataFrameAnalysis.class, Regression.NAME, (p, c) -> { + boolean ignoreUnknownFields = (boolean) c; + return Regression.fromXContent(p, ignoreUnknownFields); + })); return namedXContent; } @@ -31,6 +35,8 @@ public class MlDataFrameAnalysisNamedXContentProvider implements NamedXContentPr namedWriteables.add(new NamedWriteableRegistry.Entry(DataFrameAnalysis.class, OutlierDetection.NAME.getPreferredName(), OutlierDetection::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(DataFrameAnalysis.class, Regression.NAME.getPreferredName(), + Regression::new)); return namedWriteables; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetection.java index e6891116ad6..35b3b5d3e95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/OutlierDetection.java @@ -16,10 +16,12 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Set; public class OutlierDetection implements DataFrameAnalysis { @@ -152,6 +154,16 @@ public class OutlierDetection implements DataFrameAnalysis { return params; } + @Override + public boolean supportsCategoricalFields() { + return false; + } + + @Override + public Set getRequiredFields() { + return Collections.emptySet(); + } + public enum Method { LOF, LDOF, DISTANCE_KTH_NN, DISTANCE_KNN; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/Regression.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/Regression.java new file mode 100644 index 00000000000..a6b7c983a29 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/Regression.java @@ -0,0 +1,205 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.dataframe.analyses; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +public class Regression implements DataFrameAnalysis { + + public static final ParseField NAME = new ParseField("regression"); + + public static final ParseField DEPENDENT_VARIABLE = new ParseField("dependent_variable"); + public static final ParseField LAMBDA = new ParseField("lambda"); + public static final ParseField GAMMA = new ParseField("gamma"); + public static final ParseField ETA = new ParseField("eta"); + public static final ParseField MAXIMUM_NUMBER_TREES = new ParseField("maximum_number_trees"); + public static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); + public static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name"); + + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); + + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME.getPreferredName(), lenient, + a -> new Regression((String) a[0], (Double) a[1], (Double) a[2], (Double) a[3], (Integer) a[4], (Double) a[5], (String) a[6])); + parser.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE); + parser.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA); + parser.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA); + parser.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA); + parser.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAXIMUM_NUMBER_TREES); + parser.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME); + return parser; + } + + public static Regression fromXContent(XContentParser parser, boolean ignoreUnknownFields) { + return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } + + private final String dependentVariable; + private final Double lambda; + private final Double gamma; + private final Double eta; + private final Integer maximumNumberTrees; + private final Double featureBagFraction; + private final String predictionFieldName; + + public Regression(String dependentVariable, @Nullable Double lambda, @Nullable Double gamma, @Nullable Double eta, + @Nullable Integer maximumNumberTrees, @Nullable Double featureBagFraction, @Nullable String predictionFieldName) { + this.dependentVariable = Objects.requireNonNull(dependentVariable); + + if (lambda != null && lambda < 0) { + throw ExceptionsHelper.badRequestException("[{}] must be a non-negative double", LAMBDA.getPreferredName()); + } + this.lambda = lambda; + + if (gamma != null && gamma < 0) { + throw ExceptionsHelper.badRequestException("[{}] must be a non-negative double", GAMMA.getPreferredName()); + } + this.gamma = gamma; + + if (eta != null && (eta < 0.001 || eta > 1)) { + throw ExceptionsHelper.badRequestException("[{}] must be a double in [0.001, 1]", ETA.getPreferredName()); + } + this.eta = eta; + + if (maximumNumberTrees != null && (maximumNumberTrees <= 0 || maximumNumberTrees > 2000)) { + throw ExceptionsHelper.badRequestException("[{}] must be an integer in [1, 2000]", MAXIMUM_NUMBER_TREES.getPreferredName()); + } + this.maximumNumberTrees = maximumNumberTrees; + + if (featureBagFraction != null && (featureBagFraction <= 0 || featureBagFraction > 1.0)) { + throw ExceptionsHelper.badRequestException("[{}] must be a double in (0, 1]", FEATURE_BAG_FRACTION.getPreferredName()); + } + this.featureBagFraction = featureBagFraction; + + this.predictionFieldName = predictionFieldName; + } + + public Regression(String dependentVariable) { + this(dependentVariable, null, null, null, null, null, null); + } + + public Regression(StreamInput in) throws IOException { + dependentVariable = in.readString(); + lambda = in.readOptionalDouble(); + gamma = in.readOptionalDouble(); + eta = in.readOptionalDouble(); + maximumNumberTrees = in.readOptionalVInt(); + featureBagFraction = in.readOptionalDouble(); + predictionFieldName = in.readOptionalString(); + } + + @Override + public String getWriteableName() { + return NAME.getPreferredName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(dependentVariable); + out.writeOptionalDouble(lambda); + out.writeOptionalDouble(gamma); + out.writeOptionalDouble(eta); + out.writeOptionalVInt(maximumNumberTrees); + out.writeOptionalDouble(featureBagFraction); + out.writeOptionalString(predictionFieldName); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable); + if (lambda != null) { + builder.field(LAMBDA.getPreferredName(), lambda); + } + if (gamma != null) { + builder.field(GAMMA.getPreferredName(), gamma); + } + if (eta != null) { + builder.field(ETA.getPreferredName(), eta); + } + if (maximumNumberTrees != null) { + builder.field(MAXIMUM_NUMBER_TREES.getPreferredName(), maximumNumberTrees); + } + if (featureBagFraction != null) { + builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); + } + if (predictionFieldName != null) { + builder.field(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName); + } + builder.endObject(); + return builder; + } + + @Override + public Map getParams() { + Map params = new HashMap<>(); + params.put(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable); + if (lambda != null) { + params.put(LAMBDA.getPreferredName(), lambda); + } + if (gamma != null) { + params.put(GAMMA.getPreferredName(), gamma); + } + if (eta != null) { + params.put(ETA.getPreferredName(), eta); + } + if (maximumNumberTrees != null) { + params.put(MAXIMUM_NUMBER_TREES.getPreferredName(), maximumNumberTrees); + } + if (featureBagFraction != null) { + params.put(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); + } + if (predictionFieldName != null) { + params.put(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName); + } + return params; + } + + @Override + public boolean supportsCategoricalFields() { + return true; + } + + @Override + public Set getRequiredFields() { + return Collections.singleton(dependentVariable); + } + + @Override + public int hashCode() { + return Objects.hash(dependentVariable, lambda, gamma, eta, maximumNumberTrees, featureBagFraction, predictionFieldName); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Regression that = (Regression) o; + return Objects.equals(dependentVariable, that.dependentVariable) + && Objects.equals(lambda, that.lambda) + && Objects.equals(gamma, that.gamma) + && Objects.equals(eta, that.eta) + && Objects.equals(maximumNumberTrees, that.maximumNumberTrees) + && Objects.equals(featureBagFraction, that.featureBagFraction) + && Objects.equals(predictionFieldName, that.predictionFieldName); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index baf655a280d..11674bf26f4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; import org.elasticsearch.xpack.core.ml.dataframe.analyses.OutlierDetection; +import org.elasticsearch.xpack.core.ml.dataframe.analyses.Regression; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; @@ -443,6 +444,31 @@ public class ElasticsearchMappings { .endObject() .endObject() .endObject() + .startObject(Regression.NAME.getPreferredName()) + .startObject(PROPERTIES) + .startObject(Regression.DEPENDENT_VARIABLE.getPreferredName()) + .field(TYPE, KEYWORD) + .endObject() + .startObject(Regression.LAMBDA.getPreferredName()) + .field(TYPE, DOUBLE) + .endObject() + .startObject(Regression.GAMMA.getPreferredName()) + .field(TYPE, DOUBLE) + .endObject() + .startObject(Regression.ETA.getPreferredName()) + .field(TYPE, DOUBLE) + .endObject() + .startObject(Regression.MAXIMUM_NUMBER_TREES.getPreferredName()) + .field(TYPE, INTEGER) + .endObject() + .startObject(Regression.FEATURE_BAG_FRACTION.getPreferredName()) + .field(TYPE, DOUBLE) + .endObject() + .startObject(Regression.PREDICTION_FIELD_NAME.getPreferredName()) + .field(TYPE, KEYWORD) + .endObject() + .endObject() + .endObject() .endObject() .endObject() // re-used: CREATE_TIME diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java index 76860e28481..92583693af2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; import org.elasticsearch.xpack.core.ml.dataframe.analyses.OutlierDetection; +import org.elasticsearch.xpack.core.ml.dataframe.analyses.Regression; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; @@ -299,6 +300,14 @@ public final class ReservedFieldNames { OutlierDetection.N_NEIGHBORS.getPreferredName(), OutlierDetection.METHOD.getPreferredName(), OutlierDetection.FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), + Regression.NAME.getPreferredName(), + Regression.DEPENDENT_VARIABLE.getPreferredName(), + Regression.LAMBDA.getPreferredName(), + Regression.GAMMA.getPreferredName(), + Regression.ETA.getPreferredName(), + Regression.MAXIMUM_NUMBER_TREES.getPreferredName(), + Regression.FEATURE_BAG_FRACTION.getPreferredName(), + Regression.PREDICTION_FIELD_NAME.getPreferredName(), ElasticsearchMappings.CONFIG_TYPE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/XPackRestHandler.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/XPackRestHandler.java index 5ac0969624b..90513bbc092 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/XPackRestHandler.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/XPackRestHandler.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.core.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.xpack.core.XPackClient; @@ -17,10 +16,6 @@ public abstract class XPackRestHandler extends BaseRestHandler { protected static String URI_BASE = "/_xpack"; - public XPackRestHandler(Settings settings) { - super(settings); - } - @Override public final RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { return doPrepareRequest(request, new XPackClient(client)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestReloadAnalyzersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestReloadAnalyzersAction.java index 3b379e8cebb..e3641f079f3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestReloadAnalyzersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestReloadAnalyzersAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.rest.action; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -19,8 +18,7 @@ import org.elasticsearch.xpack.core.action.ReloadAnalyzersRequest; import java.io.IOException; public class RestReloadAnalyzersAction extends BaseRestHandler { - public RestReloadAnalyzersAction(Settings settings, RestController controller) { - super(settings); + public RestReloadAnalyzersAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/{index}/_reload_search_analyzers", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_reload_search_analyzers", this); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java index c057c04cc63..e70d1a54443 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.rest.action; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -20,8 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; public class RestXPackInfoAction extends XPackRestHandler { - public RestXPackInfoAction(Settings settings, RestController controller) { - super(settings); + public RestXPackInfoAction(RestController controller) { ; controller.registerHandler(HEAD, URI_BASE, this); controller.registerHandler(GET, URI_BASE, this); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackUsageAction.java index 0f09f17dbb0..8bdb5cb5ee1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackUsageAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.core.rest.action; import org.elasticsearch.action.support.master.MasterNodeRequest; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; @@ -20,14 +19,12 @@ import org.elasticsearch.xpack.core.action.XPackUsageRequestBuilder; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.rest.XPackRestHandler; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestStatus.OK; public class RestXPackUsageAction extends XPackRestHandler { - public RestXPackUsageAction(Settings settings, RestController controller) { - super(settings); + + public RestXPackUsageAction(RestController controller) { controller.registerHandler(GET, URI_BASE + "/usage", this); } @@ -37,7 +34,7 @@ public class RestXPackUsageAction extends XPackRestHandler { } @Override - public RestChannelConsumer doPrepareRequest(RestRequest request, XPackClient client) throws IOException { + public RestChannelConsumer doPrepareRequest(RestRequest request, XPackClient client) { final TimeValue masterTimeout = request.paramAsTime("master_timeout", MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT); return channel -> new XPackUsageRequestBuilder(client.es()) .setMasterNodeTimeout(masterTimeout) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java index 6a48a814eba..3c310deabd9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java @@ -16,8 +16,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import java.io.IOException; @@ -36,7 +36,7 @@ public class PutRoleRequest extends ActionRequest implements WriteRequest indicesPrivileges = new ArrayList<>(); private List applicationPrivileges = new ArrayList<>(); private String[] runAs = Strings.EMPTY_ARRAY; @@ -54,7 +54,7 @@ public class PutRoleRequest extends ActionRequest implements WriteRequest cluster; - private Set conditionalCluster; + private Set configurableClusterPrivileges; private Set index; private Set application; private Set runAs; @@ -41,18 +41,18 @@ public final class GetUserPrivilegesResponse extends ActionResponse { public GetUserPrivilegesResponse(StreamInput in) throws IOException { super(in); cluster = Collections.unmodifiableSet(in.readSet(StreamInput::readString)); - conditionalCluster = Collections.unmodifiableSet(in.readSet(ConditionalClusterPrivileges.READER)); + configurableClusterPrivileges = Collections.unmodifiableSet(in.readSet(ConfigurableClusterPrivileges.READER)); index = Collections.unmodifiableSet(in.readSet(Indices::new)); application = Collections.unmodifiableSet(in.readSet(RoleDescriptor.ApplicationResourcePrivileges::new)); runAs = Collections.unmodifiableSet(in.readSet(StreamInput::readString)); } - public GetUserPrivilegesResponse(Set cluster, Set conditionalCluster, + public GetUserPrivilegesResponse(Set cluster, Set conditionalCluster, Set index, Set application, Set runAs) { this.cluster = Collections.unmodifiableSet(cluster); - this.conditionalCluster = Collections.unmodifiableSet(conditionalCluster); + this.configurableClusterPrivileges = Collections.unmodifiableSet(conditionalCluster); this.index = Collections.unmodifiableSet(index); this.application = Collections.unmodifiableSet(application); this.runAs = Collections.unmodifiableSet(runAs); @@ -62,8 +62,8 @@ public final class GetUserPrivilegesResponse extends ActionResponse { return cluster; } - public Set getConditionalClusterPrivileges() { - return conditionalCluster; + public Set getConditionalClusterPrivileges() { + return configurableClusterPrivileges; } public Set getIndexPrivileges() { @@ -81,7 +81,7 @@ public final class GetUserPrivilegesResponse extends ActionResponse { @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(cluster, StreamOutput::writeString); - out.writeCollection(conditionalCluster, ConditionalClusterPrivileges.WRITER); + out.writeCollection(configurableClusterPrivileges, ConfigurableClusterPrivileges.WRITER); out.writeCollection(index); out.writeCollection(application); out.writeCollection(runAs, StreamOutput::writeString); @@ -97,7 +97,7 @@ public final class GetUserPrivilegesResponse extends ActionResponse { } final GetUserPrivilegesResponse that = (GetUserPrivilegesResponse) other; return Objects.equals(cluster, that.cluster) && - Objects.equals(conditionalCluster, that.conditionalCluster) && + Objects.equals(configurableClusterPrivileges, that.configurableClusterPrivileges) && Objects.equals(index, that.index) && Objects.equals(application, that.application) && Objects.equals(runAs, that.runAs); @@ -105,7 +105,7 @@ public final class GetUserPrivilegesResponse extends ActionResponse { @Override public int hashCode() { - return Objects.hash(cluster, conditionalCluster, index, application, runAs); + return Objects.hash(cluster, configurableClusterPrivileges, index, application, runAs); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index 58b9fbf426b..89016ab8b41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -24,8 +24,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import org.elasticsearch.xpack.core.security.support.Validation; import org.elasticsearch.xpack.core.security.xcontent.XContentUtils; @@ -49,7 +49,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { private final String name; private final String[] clusterPrivileges; - private final ConditionalClusterPrivilege[] conditionalClusterPrivileges; + private final ConfigurableClusterPrivilege[] configurableClusterPrivileges; private final IndicesPrivileges[] indicesPrivileges; private final ApplicationResourcePrivileges[] applicationPrivileges; private final String[] runAs; @@ -65,7 +65,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { /** * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], - * ConditionalClusterPrivilege[], String[], Map, Map)} + * ConfigurableClusterPrivilege[], String[], Map, Map)} */ @Deprecated public RoleDescriptor(String name, @@ -78,7 +78,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { /** * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], - * ConditionalClusterPrivilege[], String[], Map, Map)} + * ConfigurableClusterPrivilege[], String[], Map, Map)} */ @Deprecated public RoleDescriptor(String name, @@ -94,14 +94,14 @@ public class RoleDescriptor implements ToXContentObject, Writeable { @Nullable String[] clusterPrivileges, @Nullable IndicesPrivileges[] indicesPrivileges, @Nullable ApplicationResourcePrivileges[] applicationPrivileges, - @Nullable ConditionalClusterPrivilege[] conditionalClusterPrivileges, + @Nullable ConfigurableClusterPrivilege[] configurableClusterPrivileges, @Nullable String[] runAs, @Nullable Map metadata, @Nullable Map transientMetadata) { this.name = name; this.clusterPrivileges = clusterPrivileges != null ? clusterPrivileges : Strings.EMPTY_ARRAY; - this.conditionalClusterPrivileges = conditionalClusterPrivileges != null - ? conditionalClusterPrivileges : ConditionalClusterPrivileges.EMPTY_ARRAY; + this.configurableClusterPrivileges = configurableClusterPrivileges != null + ? configurableClusterPrivileges : ConfigurableClusterPrivileges.EMPTY_ARRAY; this.indicesPrivileges = indicesPrivileges != null ? indicesPrivileges : IndicesPrivileges.NONE; this.applicationPrivileges = applicationPrivileges != null ? applicationPrivileges : ApplicationResourcePrivileges.NONE; this.runAs = runAs != null ? runAs : Strings.EMPTY_ARRAY; @@ -121,13 +121,12 @@ public class RoleDescriptor implements ToXContentObject, Writeable { this.runAs = in.readStringArray(); this.metadata = in.readMap(); this.transientMetadata = in.readMap(); - if (in.getVersion().onOrAfter(Version.V_6_4_0)) { this.applicationPrivileges = in.readArray(ApplicationResourcePrivileges::new, ApplicationResourcePrivileges[]::new); - this.conditionalClusterPrivileges = ConditionalClusterPrivileges.readArray(in); + this.configurableClusterPrivileges = ConfigurableClusterPrivileges.readArray(in); } else { this.applicationPrivileges = ApplicationResourcePrivileges.NONE; - this.conditionalClusterPrivileges = ConditionalClusterPrivileges.EMPTY_ARRAY; + this.configurableClusterPrivileges = ConfigurableClusterPrivileges.EMPTY_ARRAY; } } @@ -139,8 +138,8 @@ public class RoleDescriptor implements ToXContentObject, Writeable { return this.clusterPrivileges; } - public ConditionalClusterPrivilege[] getConditionalClusterPrivileges() { - return this.conditionalClusterPrivileges; + public ConfigurableClusterPrivilege[] getConditionalClusterPrivileges() { + return this.configurableClusterPrivileges; } public IndicesPrivileges[] getIndicesPrivileges() { @@ -172,7 +171,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { StringBuilder sb = new StringBuilder("Role["); sb.append("name=").append(name); sb.append(", cluster=[").append(Strings.arrayToCommaDelimitedString(clusterPrivileges)); - sb.append("], global=[").append(Strings.arrayToCommaDelimitedString(conditionalClusterPrivileges)); + sb.append("], global=[").append(Strings.arrayToCommaDelimitedString(configurableClusterPrivileges)); sb.append("], indicesPrivileges=["); for (IndicesPrivileges group : indicesPrivileges) { sb.append(group.toString()).append(","); @@ -197,7 +196,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { if (!name.equals(that.name)) return false; if (!Arrays.equals(clusterPrivileges, that.clusterPrivileges)) return false; - if (!Arrays.equals(conditionalClusterPrivileges, that.conditionalClusterPrivileges)) return false; + if (!Arrays.equals(configurableClusterPrivileges, that.configurableClusterPrivileges)) return false; if (!Arrays.equals(indicesPrivileges, that.indicesPrivileges)) return false; if (!Arrays.equals(applicationPrivileges, that.applicationPrivileges)) return false; if (!metadata.equals(that.getMetadata())) return false; @@ -208,7 +207,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { public int hashCode() { int result = name.hashCode(); result = 31 * result + Arrays.hashCode(clusterPrivileges); - result = 31 * result + Arrays.hashCode(conditionalClusterPrivileges); + result = 31 * result + Arrays.hashCode(configurableClusterPrivileges); result = 31 * result + Arrays.hashCode(indicesPrivileges); result = 31 * result + Arrays.hashCode(applicationPrivileges); result = 31 * result + Arrays.hashCode(runAs); @@ -235,9 +234,9 @@ public class RoleDescriptor implements ToXContentObject, Writeable { public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean docCreation) throws IOException { builder.startObject(); builder.array(Fields.CLUSTER.getPreferredName(), clusterPrivileges); - if (conditionalClusterPrivileges.length != 0) { + if (configurableClusterPrivileges.length != 0) { builder.field(Fields.GLOBAL.getPreferredName()); - ConditionalClusterPrivileges.toXContent(builder, params, Arrays.asList(conditionalClusterPrivileges)); + ConfigurableClusterPrivileges.toXContent(builder, params, Arrays.asList(configurableClusterPrivileges)); } builder.array(Fields.INDICES.getPreferredName(), (Object[]) indicesPrivileges); builder.array(Fields.APPLICATIONS.getPreferredName(), (Object[]) applicationPrivileges); @@ -266,7 +265,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { out.writeMap(transientMetadata); if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeArray(ApplicationResourcePrivileges::write, applicationPrivileges); - ConditionalClusterPrivileges.writeArray(out, getConditionalClusterPrivileges()); + ConfigurableClusterPrivileges.writeArray(out, getConditionalClusterPrivileges()); } } @@ -298,7 +297,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { String currentFieldName = null; IndicesPrivileges[] indicesPrivileges = null; String[] clusterPrivileges = null; - List conditionalClusterPrivileges = Collections.emptyList(); + List configurableClusterPrivileges = Collections.emptyList(); ApplicationResourcePrivileges[] applicationPrivileges = null; String[] runAsUsers = null; Map metadata = null; @@ -316,7 +315,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { || Fields.APPLICATION.match(currentFieldName, parser.getDeprecationHandler())) { applicationPrivileges = parseApplicationPrivileges(name, parser); } else if (Fields.GLOBAL.match(currentFieldName, parser.getDeprecationHandler())) { - conditionalClusterPrivileges = ConditionalClusterPrivileges.parse(parser); + configurableClusterPrivileges = ConfigurableClusterPrivileges.parse(parser); } else if (Fields.METADATA.match(currentFieldName, parser.getDeprecationHandler())) { if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException( @@ -337,7 +336,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { } } return new RoleDescriptor(name, clusterPrivileges, indicesPrivileges, applicationPrivileges, - conditionalClusterPrivileges.toArray(new ConditionalClusterPrivilege[conditionalClusterPrivileges.size()]), runAsUsers, + configurableClusterPrivileges.toArray(new ConfigurableClusterPrivilege[configurableClusterPrivileges.size()]), runAsUsers, metadata, null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 68779897139..c9ed0f39fbe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -5,121 +5,196 @@ */ package org.elasticsearch.xpack.core.security.authz.permission; +import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.support.Automatons; -import java.util.Collection; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.function.Predicate; -import java.util.stream.Collectors; /** * A permission that is based on privileges for cluster wide actions, with the optional ability to inspect the request object */ -public abstract class ClusterPermission { - private final ClusterPrivilege privilege; +public class ClusterPermission { + public static final ClusterPermission NONE = new ClusterPermission(Collections.emptySet(), Collections.emptyList()); - ClusterPermission(ClusterPrivilege privilege) { - this.privilege = privilege; + private final Set clusterPrivileges; + private final List checks; + + private ClusterPermission(final Set clusterPrivileges, + final List checks) { + this.clusterPrivileges = Collections.unmodifiableSet(clusterPrivileges); + this.checks = Collections.unmodifiableList(checks); } - public ClusterPrivilege privilege() { - return privilege; - } - - public abstract boolean check(String action, TransportRequest request); - - public boolean grants(ClusterPrivilege clusterPrivilege) { - return Operations.subsetOf(clusterPrivilege.getAutomaton(), this.privilege().getAutomaton()); - } - - public abstract List> privileges(); - /** - * A permission that is based solely on cluster privileges and does not consider request state + * Checks permission to a cluster action for a given request. + * + * @param action cluster action + * @param request {@link TransportRequest} + * @return {@code true} if the access is allowed else returns {@code false} */ - public static class SimpleClusterPermission extends ClusterPermission { + public boolean check(final String action, final TransportRequest request) { + return checks.stream().anyMatch(permission -> permission.check(action, request)); + } - public static final SimpleClusterPermission NONE = new SimpleClusterPermission(ClusterPrivilege.NONE); + /** + * Checks if the specified {@link ClusterPermission}'s actions are implied by this {@link ClusterPermission} + * + * @param otherClusterPermission {@link ClusterPermission} + * @return {@code true} if the specified cluster permissions actions are implied by this cluster permission else returns {@code false} + */ + public boolean implies(final ClusterPermission otherClusterPermission) { + if (otherClusterPermission.checks.isEmpty()) { + return true; + } else { + for (PermissionCheck otherPermissionCheck : otherClusterPermission.checks) { + boolean isImplied = this.checks.stream().anyMatch(thisPermissionCheck -> thisPermissionCheck.implies(otherPermissionCheck)); + if (isImplied == false) { + return false; + } + } + return true; + } + } - private final Predicate predicate; + public Set privileges() { + return clusterPrivileges; + } - SimpleClusterPermission(ClusterPrivilege privilege) { - super(privilege); - this.predicate = privilege.predicate(); + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private final Set clusterPrivileges = new HashSet<>(); + private final List actionAutomatons = new ArrayList<>(); + private final List permissionChecks = new ArrayList<>(); + + public Builder add(final ClusterPrivilege clusterPrivilege, final Set allowedActionPatterns, + final Set excludeActionPatterns) { + this.clusterPrivileges.add(clusterPrivilege); + if (allowedActionPatterns.isEmpty() && excludeActionPatterns.isEmpty()) { + this.actionAutomatons.add(Automatons.EMPTY); + } else { + final Automaton allowedAutomaton = Automatons.patterns(allowedActionPatterns); + final Automaton excludedAutomaton = Automatons.patterns(excludeActionPatterns); + this.actionAutomatons.add(Automatons.minusAndMinimize(allowedAutomaton, excludedAutomaton)); + } + return this; } - @Override - public boolean check(String action, TransportRequest request) { - return predicate.test(action); + public Builder add(final ConfigurableClusterPrivilege configurableClusterPrivilege, final Predicate actionPredicate, + final Predicate requestPredicate) { + return add(configurableClusterPrivilege, new ActionRequestPredicatePermissionCheck(configurableClusterPrivilege, + actionPredicate, + requestPredicate)); } - @Override - public List> privileges() { - return Collections.singletonList(new Tuple<>(super.privilege, null)); + public Builder add(final ClusterPrivilege clusterPrivilege, final PermissionCheck permissionCheck) { + this.clusterPrivileges.add(clusterPrivilege); + this.permissionChecks.add(permissionCheck); + return this; + } + + public ClusterPermission build() { + if (clusterPrivileges.isEmpty()) { + return NONE; + } + List checks = this.permissionChecks; + if (false == actionAutomatons.isEmpty()) { + final Automaton mergedAutomaton = Automatons.unionAndMinimize(this.actionAutomatons); + checks = new ArrayList<>(this.permissionChecks.size() + 1); + checks.add(new AutomatonPermissionCheck(mergedAutomaton)); + checks.addAll(this.permissionChecks); + } + return new ClusterPermission(this.clusterPrivileges, checks); } } /** - * A permission that makes use of both cluster privileges and request inspection + * Evaluates whether the cluster actions (optionally for a given request) + * is permitted by this permission. */ - public static class ConditionalClusterPermission extends ClusterPermission { - private final ConditionalClusterPrivilege conditionalPrivilege; + public interface PermissionCheck { + /** + * Checks permission to a cluster action for a given request. + * + * @param action action name + * @param request {@link TransportRequest} + * @return {@code true} if the specified action for given request is allowed else returns {@code false} + */ + boolean check(String action, TransportRequest request); - public ConditionalClusterPermission(ConditionalClusterPrivilege conditionalPrivilege) { - super(conditionalPrivilege.getPrivilege()); - this.conditionalPrivilege = conditionalPrivilege; + /** + * Checks whether specified {@link PermissionCheck} is implied by this {@link PermissionCheck}.
+ * This is important method to be considered during implementation as it compares {@link PermissionCheck}s. + * If {@code permissionCheck.implies(otherPermissionCheck)}, that means all the actions allowed by {@code otherPermissionCheck} + * are also allowed by {@code permissionCheck}, irrespective of the request structure. + * + * @param otherPermissionCheck {@link PermissionCheck} + * @return {@code true} if the specified permission is implied by this {@link PermissionCheck} else + * returns {@code false} + */ + boolean implies(PermissionCheck otherPermissionCheck); + } + + // Automaton based permission check + private static class AutomatonPermissionCheck implements PermissionCheck { + private final Automaton automaton; + private final Predicate actionPredicate; + + AutomatonPermissionCheck(final Automaton automaton) { + this.automaton = automaton; + this.actionPredicate = Automatons.predicate(automaton); } @Override - public boolean check(String action, TransportRequest request) { - return super.privilege.predicate().test(action) && conditionalPrivilege.getRequestPredicate().test(request); + public boolean check(final String action, final TransportRequest request) { + return actionPredicate.test(action); } @Override - public List> privileges() { - return Collections.singletonList(new Tuple<>(super.privilege, conditionalPrivilege)); + public boolean implies(final PermissionCheck permissionCheck) { + if (permissionCheck instanceof AutomatonPermissionCheck) { + return Operations.subsetOf(((AutomatonPermissionCheck) permissionCheck).automaton, this.automaton); + } + return false; } } - /** - * A permission that composes a number of other cluster permissions - */ - public static class CompositeClusterPermission extends ClusterPermission { - private final Collection children; + // action and request based permission check + private static class ActionRequestPredicatePermissionCheck implements PermissionCheck { + private final ClusterPrivilege clusterPrivilege; + final Predicate actionPredicate; + final Predicate requestPredicate; - public CompositeClusterPermission(Collection children) { - super(buildPrivilege(children)); - this.children = children; - } - - private static ClusterPrivilege buildPrivilege(Collection children) { - final Set names = children.stream() - .map(ClusterPermission::privilege) - .map(ClusterPrivilege::name) - .flatMap(Set::stream) - .collect(Collectors.toSet()); - return ClusterPrivilege.get(names); + ActionRequestPredicatePermissionCheck(final ClusterPrivilege clusterPrivilege, final Predicate actionPredicate, + final Predicate requestPredicate) { + this.clusterPrivilege = clusterPrivilege; + this.actionPredicate = actionPredicate; + this.requestPredicate = requestPredicate; } @Override - public List> privileges() { - return children.stream().map(ClusterPermission::privileges).flatMap(List::stream).collect(Collectors.toList()); + public boolean check(final String action, final TransportRequest request) { + return actionPredicate.test(action) && requestPredicate.test(request); } @Override - public boolean check(String action, TransportRequest request) { - return children.stream().anyMatch(p -> p.check(action, request)); - } - - @Override - public boolean grants(ClusterPrivilege clusterPrivilege) { - return children.stream().anyMatch(p -> p.grants(clusterPrivilege)); + public boolean implies(final PermissionCheck permissionCheck) { + if (permissionCheck instanceof ActionRequestPredicatePermissionCheck) { + final ActionRequestPredicatePermissionCheck otherCheck = (ActionRequestPredicatePermissionCheck) permissionCheck; + return this.clusterPrivilege.equals(otherCheck.clusterPrivilege); + } + return false; } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java index 207a5ab0567..ef898a0876d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java @@ -17,7 +17,8 @@ import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessCo import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; @@ -137,7 +138,7 @@ public class Role { * @return {@code true} if cluster privilege is allowed else returns {@code false} */ public boolean grants(ClusterPrivilege clusterPrivilege) { - return cluster.grants(clusterPrivilege); + return cluster.implies(clusterPrivilege.buildPermission(ClusterPermission.builder()).build()); } /** @@ -184,7 +185,7 @@ public class Role { public static class Builder { private final String[] names; - private ClusterPermission cluster = ClusterPermission.SimpleClusterPermission.NONE; + private ClusterPermission cluster = ClusterPermission.NONE; private RunAsPermission runAs = RunAsPermission.NONE; private List groups = new ArrayList<>(); private List>> applicationPrivs = new ArrayList<>(); @@ -209,30 +210,18 @@ public class Role { } } - public Builder cluster(Set privilegeNames, Iterable conditionalClusterPrivileges) { + public Builder cluster(Set privilegeNames, Iterable configurableClusterPrivileges) { + ClusterPermission.Builder builder = ClusterPermission.builder(); List clusterPermissions = new ArrayList<>(); if (privilegeNames.isEmpty() == false) { - clusterPermissions.add(new ClusterPermission.SimpleClusterPermission(ClusterPrivilege.get(privilegeNames))); + for (String name : privilegeNames) { + builder = ClusterPrivilegeResolver.resolve(name).buildPermission(builder); + } } - for (ConditionalClusterPrivilege ccp : conditionalClusterPrivileges) { - clusterPermissions.add(new ClusterPermission.ConditionalClusterPermission(ccp)); + for (ConfigurableClusterPrivilege ccp : configurableClusterPrivileges) { + builder = ccp.buildPermission(builder); } - if (clusterPermissions.isEmpty()) { - this.cluster = ClusterPermission.SimpleClusterPermission.NONE; - } else if (clusterPermissions.size() == 1) { - this.cluster = clusterPermissions.get(0); - } else { - this.cluster = new ClusterPermission.CompositeClusterPermission(clusterPermissions); - } - return this; - } - - /** - * @deprecated Use {@link #cluster(Set, Iterable)} - */ - @Deprecated - public Builder cluster(ClusterPrivilege privilege) { - cluster = new ClusterPermission.SimpleClusterPermission(privilege); + this.cluster = builder.build(); return this; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java new file mode 100644 index 00000000000..e1c4120e6b2 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ActionClusterPrivilege.java @@ -0,0 +1,66 @@ +/* + * + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + * + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; + +import java.util.Collections; +import java.util.Set; + +/** + * A {@link NamedClusterPrivilege} that can be used to define an access to cluster level actions. + */ +public class ActionClusterPrivilege implements NamedClusterPrivilege { + private final String name; + private final Set allowedActionPatterns; + private final Set excludedActionPatterns; + + /** + * Constructor for {@link ActionClusterPrivilege} defining what cluster actions are accessible for the user with this privilege. + * + * @param name name for the cluster privilege + * @param allowedActionPatterns a set of cluster action patterns that are allowed for the user with this privilege. + */ + public ActionClusterPrivilege(final String name, final Set allowedActionPatterns) { + this(name, allowedActionPatterns, Collections.emptySet()); + } + + /** + * Constructor for {@link ActionClusterPrivilege} that defines what cluster actions are accessible for the + * user with this privilege after excluding the action patterns {@code excludedActionPatterns} from the allowed action patterns + * {@code allowedActionPatterns} + * + * @param name name for the cluster privilege + * @param allowedActionPatterns a set of cluster action patterns + * @param excludedActionPatterns a set of cluster action patterns + */ + public ActionClusterPrivilege(final String name, final Set allowedActionPatterns, final Set excludedActionPatterns) { + this.name = name; + this.allowedActionPatterns = allowedActionPatterns; + this.excludedActionPatterns = excludedActionPatterns; + } + + @Override + public String name() { + return name; + } + + public Set getAllowedActionPatterns() { + return allowedActionPatterns; + } + + public Set getExcludedActionPatterns() { + return excludedActionPatterns; + } + + @Override + public ClusterPermission.Builder buildPermission(final ClusterPermission.Builder builder) { + return builder.add(this, allowedActionPatterns, excludedActionPatterns); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java index c5d92164533..a99c46773d3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilege.java @@ -5,193 +5,16 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Automaton; -import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction; -import org.elasticsearch.xpack.core.ilm.action.GetStatusAction; -import org.elasticsearch.xpack.core.ilm.action.StartILMAction; -import org.elasticsearch.xpack.core.ilm.action.StopILMAction; -import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; -import org.elasticsearch.xpack.core.security.action.token.RefreshTokenAction; -import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; -import org.elasticsearch.xpack.core.security.support.Automatons; -import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleAction; +import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; -import java.util.Collections; -import java.util.HashSet; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.Predicate; - -import static org.elasticsearch.xpack.core.security.support.Automatons.minusAndMinimize; -import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; - -public final class ClusterPrivilege extends Privilege { - - // shared automatons - private static final Automaton MANAGE_SECURITY_AUTOMATON = patterns("cluster:admin/xpack/security/*"); - private static final Automaton MANAGE_SAML_AUTOMATON = patterns("cluster:admin/xpack/security/saml/*", - InvalidateTokenAction.NAME, RefreshTokenAction.NAME); - private static final Automaton MANAGE_OIDC_AUTOMATON = patterns("cluster:admin/xpack/security/oidc/*"); - private static final Automaton MANAGE_TOKEN_AUTOMATON = patterns("cluster:admin/xpack/security/token/*"); - private static final Automaton MANAGE_API_KEY_AUTOMATON = patterns("cluster:admin/xpack/security/api_key/*"); - private static final Automaton MONITOR_AUTOMATON = patterns("cluster:monitor/*"); - private static final Automaton MONITOR_ML_AUTOMATON = patterns("cluster:monitor/xpack/ml/*"); - private static final Automaton MONITOR_DATA_FRAME_AUTOMATON = patterns("cluster:monitor/data_frame/*"); - private static final Automaton MONITOR_WATCHER_AUTOMATON = patterns("cluster:monitor/xpack/watcher/*"); - private static final Automaton MONITOR_ROLLUP_AUTOMATON = patterns("cluster:monitor/xpack/rollup/*"); - private static final Automaton ALL_CLUSTER_AUTOMATON = patterns("cluster:*", "indices:admin/template/*"); - private static final Automaton MANAGE_AUTOMATON = minusAndMinimize(ALL_CLUSTER_AUTOMATON, MANAGE_SECURITY_AUTOMATON); - private static final Automaton MANAGE_ML_AUTOMATON = patterns("cluster:admin/xpack/ml/*", "cluster:monitor/xpack/ml/*"); - private static final Automaton MANAGE_DATA_FRAME_AUTOMATON = patterns("cluster:admin/data_frame/*", "cluster:monitor/data_frame/*"); - private static final Automaton MANAGE_WATCHER_AUTOMATON = patterns("cluster:admin/xpack/watcher/*", "cluster:monitor/xpack/watcher/*"); - private static final Automaton TRANSPORT_CLIENT_AUTOMATON = patterns("cluster:monitor/nodes/liveness", "cluster:monitor/state"); - private static final Automaton MANAGE_IDX_TEMPLATE_AUTOMATON = patterns("indices:admin/template/*"); - private static final Automaton MANAGE_INGEST_PIPELINE_AUTOMATON = patterns("cluster:admin/ingest/pipeline/*"); - private static final Automaton MANAGE_ROLLUP_AUTOMATON = patterns("cluster:admin/xpack/rollup/*", "cluster:monitor/xpack/rollup/*"); - private static final Automaton MANAGE_CCR_AUTOMATON = - patterns("cluster:admin/xpack/ccr/*", ClusterStateAction.NAME, HasPrivilegesAction.NAME); - private static final Automaton CREATE_SNAPSHOT_AUTOMATON = patterns(CreateSnapshotAction.NAME, SnapshotsStatusAction.NAME + "*", - GetSnapshotsAction.NAME, SnapshotsStatusAction.NAME, GetRepositoriesAction.NAME); - private static final Automaton READ_CCR_AUTOMATON = patterns(ClusterStateAction.NAME, HasPrivilegesAction.NAME); - private static final Automaton MANAGE_ILM_AUTOMATON = patterns("cluster:admin/ilm/*"); - private static final Automaton READ_ILM_AUTOMATON = patterns(GetLifecycleAction.NAME, GetStatusAction.NAME); - private static final Automaton MANAGE_SLM_AUTOMATON = - patterns("cluster:admin/slm/*", StartILMAction.NAME, StopILMAction.NAME, GetStatusAction.NAME); - private static final Automaton READ_SLM_AUTOMATON = patterns(GetSnapshotLifecycleAction.NAME, GetStatusAction.NAME); - private static final Automaton MANAGE_ENRICH_AUTOMATON = patterns("cluster:admin/xpack/enrich/*"); - - public static final ClusterPrivilege NONE = new ClusterPrivilege("none", Automatons.EMPTY); - public static final ClusterPrivilege ALL = new ClusterPrivilege("all", ALL_CLUSTER_AUTOMATON); - public static final ClusterPrivilege MONITOR = new ClusterPrivilege("monitor", MONITOR_AUTOMATON); - public static final ClusterPrivilege MONITOR_ML = new ClusterPrivilege("monitor_ml", MONITOR_ML_AUTOMATON); - public static final ClusterPrivilege MONITOR_DATA_FRAME = - new ClusterPrivilege("monitor_data_frame_transforms", MONITOR_DATA_FRAME_AUTOMATON); - public static final ClusterPrivilege MONITOR_WATCHER = new ClusterPrivilege("monitor_watcher", MONITOR_WATCHER_AUTOMATON); - public static final ClusterPrivilege MONITOR_ROLLUP = new ClusterPrivilege("monitor_rollup", MONITOR_ROLLUP_AUTOMATON); - public static final ClusterPrivilege MANAGE = new ClusterPrivilege("manage", MANAGE_AUTOMATON); - public static final ClusterPrivilege MANAGE_ML = new ClusterPrivilege("manage_ml", MANAGE_ML_AUTOMATON); - public static final ClusterPrivilege MANAGE_DATA_FRAME = - new ClusterPrivilege("manage_data_frame_transforms", MANAGE_DATA_FRAME_AUTOMATON); - public static final ClusterPrivilege MANAGE_TOKEN = new ClusterPrivilege("manage_token", MANAGE_TOKEN_AUTOMATON); - public static final ClusterPrivilege MANAGE_WATCHER = new ClusterPrivilege("manage_watcher", MANAGE_WATCHER_AUTOMATON); - public static final ClusterPrivilege MANAGE_ROLLUP = new ClusterPrivilege("manage_rollup", MANAGE_ROLLUP_AUTOMATON); - public static final ClusterPrivilege MANAGE_IDX_TEMPLATES = - new ClusterPrivilege("manage_index_templates", MANAGE_IDX_TEMPLATE_AUTOMATON); - public static final ClusterPrivilege MANAGE_INGEST_PIPELINES = - new ClusterPrivilege("manage_ingest_pipelines", MANAGE_INGEST_PIPELINE_AUTOMATON); - public static final ClusterPrivilege TRANSPORT_CLIENT = new ClusterPrivilege("transport_client", TRANSPORT_CLIENT_AUTOMATON); - public static final ClusterPrivilege MANAGE_SECURITY = new ClusterPrivilege("manage_security", MANAGE_SECURITY_AUTOMATON); - public static final ClusterPrivilege MANAGE_SAML = new ClusterPrivilege("manage_saml", MANAGE_SAML_AUTOMATON); - public static final ClusterPrivilege MANAGE_OIDC = new ClusterPrivilege("manage_oidc", MANAGE_OIDC_AUTOMATON); - public static final ClusterPrivilege MANAGE_API_KEY = new ClusterPrivilege("manage_api_key", MANAGE_API_KEY_AUTOMATON); - public static final ClusterPrivilege MANAGE_PIPELINE = new ClusterPrivilege("manage_pipeline", "cluster:admin/ingest/pipeline/*"); - public static final ClusterPrivilege MANAGE_CCR = new ClusterPrivilege("manage_ccr", MANAGE_CCR_AUTOMATON); - public static final ClusterPrivilege READ_CCR = new ClusterPrivilege("read_ccr", READ_CCR_AUTOMATON); - public static final ClusterPrivilege CREATE_SNAPSHOT = new ClusterPrivilege("create_snapshot", CREATE_SNAPSHOT_AUTOMATON); - public static final ClusterPrivilege MANAGE_ILM = new ClusterPrivilege("manage_ilm", MANAGE_ILM_AUTOMATON); - public static final ClusterPrivilege READ_ILM = new ClusterPrivilege("read_ilm", READ_ILM_AUTOMATON); - public static final ClusterPrivilege MANAGE_SLM = new ClusterPrivilege("manage_slm", MANAGE_SLM_AUTOMATON); - public static final ClusterPrivilege READ_SLM = new ClusterPrivilege("read_slm", READ_SLM_AUTOMATON); - public static final ClusterPrivilege MANAGE_ENRICH = new ClusterPrivilege("manage_enrich", MANAGE_ENRICH_AUTOMATON); - - public static final Predicate ACTION_MATCHER = ClusterPrivilege.ALL.predicate(); - - private static final Map VALUES = MapBuilder.newMapBuilder() - .put("none", NONE) - .put("all", ALL) - .put("monitor", MONITOR) - .put("monitor_ml", MONITOR_ML) - .put("monitor_data_frame_transforms", MONITOR_DATA_FRAME) - .put("monitor_watcher", MONITOR_WATCHER) - .put("monitor_rollup", MONITOR_ROLLUP) - .put("manage", MANAGE) - .put("manage_ml", MANAGE_ML) - .put("manage_data_frame_transforms", MANAGE_DATA_FRAME) - .put("manage_token", MANAGE_TOKEN) - .put("manage_watcher", MANAGE_WATCHER) - .put("manage_index_templates", MANAGE_IDX_TEMPLATES) - .put("manage_ingest_pipelines", MANAGE_INGEST_PIPELINES) - .put("transport_client", TRANSPORT_CLIENT) - .put("manage_security", MANAGE_SECURITY) - .put("manage_saml", MANAGE_SAML) - .put("manage_oidc", MANAGE_OIDC) - .put("manage_api_key", MANAGE_API_KEY) - .put("manage_pipeline", MANAGE_PIPELINE) - .put("manage_rollup", MANAGE_ROLLUP) - .put("manage_ccr", MANAGE_CCR) - .put("read_ccr", READ_CCR) - .put("create_snapshot", CREATE_SNAPSHOT) - .put("manage_ilm", MANAGE_ILM) - .put("read_ilm", READ_ILM) - .put("manage_slm", MANAGE_SLM) - .put("read_slm", READ_SLM) - .put("manage_enrich", MANAGE_ENRICH) - .immutableMap(); - - private static final ConcurrentHashMap, ClusterPrivilege> CACHE = new ConcurrentHashMap<>(); - - private ClusterPrivilege(String name, String... patterns) { - super(name, patterns); - } - - private ClusterPrivilege(String name, Automaton automaton) { - super(Collections.singleton(name), automaton); - } - - private ClusterPrivilege(Set name, Automaton automaton) { - super(name, automaton); - } - - public static ClusterPrivilege get(Set name) { - if (name == null || name.isEmpty()) { - return NONE; - } - return CACHE.computeIfAbsent(name, ClusterPrivilege::resolve); - } - - private static ClusterPrivilege resolve(Set name) { - final int size = name.size(); - if (size == 0) { - throw new IllegalArgumentException("empty set should not be used"); - } - - Set actions = new HashSet<>(); - Set automata = new HashSet<>(); - for (String part : name) { - part = part.toLowerCase(Locale.ROOT); - if (ACTION_MATCHER.test(part)) { - actions.add(actionToPattern(part)); - } else { - ClusterPrivilege privilege = VALUES.get(part); - if (privilege != null && size == 1) { - return privilege; - } else if (privilege != null) { - automata.add(privilege.automaton); - } else { - throw new IllegalArgumentException("unknown cluster privilege [" + name + "]. a privilege must be either " + - "one of the predefined fixed cluster privileges [" + - Strings.collectionToCommaDelimitedString(VALUES.entrySet()) + "] or a pattern over one of the available " + - "cluster actions"); - } - } - } - - if (actions.isEmpty() == false) { - automata.add(patterns(actions)); - } - return new ClusterPrivilege(name, Automatons.unionAndMinimize(automata)); - } - - public static Set names() { - return Collections.unmodifiableSet(VALUES.keySet()); - } +/** + * This interface represents a privilege that is used to control access to cluster level actions. + */ +public interface ClusterPrivilege { + /** + * Uses {@link ClusterPermission.Builder} to add predicate that later can be used to build a {@link ClusterPermission}. + * @param builder {@link ClusterPermission.Builder} + * @return an instance of {@link ClusterPermission.Builder} + */ + ClusterPermission.Builder buildPermission(ClusterPermission.Builder builder); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java new file mode 100644 index 00000000000..ff84b3fd5a8 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -0,0 +1,183 @@ +/* + * + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + * + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction; +import org.elasticsearch.xpack.core.ilm.action.GetStatusAction; +import org.elasticsearch.xpack.core.ilm.action.StartILMAction; +import org.elasticsearch.xpack.core.ilm.action.StopILMAction; +import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; +import org.elasticsearch.xpack.core.security.action.token.RefreshTokenAction; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; +import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleAction; + +import java.util.Collections; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Translates cluster privilege names into concrete implementations + */ +public class ClusterPrivilegeResolver { + // shared automatons + private static final Set MANAGE_SECURITY_PATTERN = Collections.singleton("cluster:admin/xpack/security/*"); + private static final Set MANAGE_SAML_PATTERN = Collections.unmodifiableSet( + Sets.newHashSet("cluster:admin/xpack/security/saml/*", + InvalidateTokenAction.NAME, RefreshTokenAction.NAME)); + private static final Set MANAGE_OIDC_PATTERN = Collections.singleton("cluster:admin/xpack/security/oidc/*"); + private static final Set MANAGE_TOKEN_PATTERN = Collections.singleton("cluster:admin/xpack/security/token/*"); + private static final Set MANAGE_API_KEY_PATTERN = Collections.singleton("cluster:admin/xpack/security/api_key/*"); + private static final Set MONITOR_PATTERN = Collections.singleton("cluster:monitor/*"); + private static final Set MONITOR_ML_PATTERN = Collections.singleton("cluster:monitor/xpack/ml/*"); + private static final Set MONITOR_DATA_FRAME_PATTERN = Collections.singleton("cluster:monitor/data_frame/*"); + private static final Set MONITOR_WATCHER_PATTERN = Collections.singleton("cluster:monitor/xpack/watcher/*"); + private static final Set MONITOR_ROLLUP_PATTERN = Collections.singleton("cluster:monitor/xpack/rollup/*"); + private static final Set ALL_CLUSTER_PATTERN = Collections.unmodifiableSet( + Sets.newHashSet("cluster:*", "indices:admin/template/*")); + private static final Set MANAGE_ML_PATTERN = Collections.unmodifiableSet( + Sets.newHashSet("cluster:admin/xpack/ml/*", "cluster:monitor/xpack/ml/*")); + private static final Set MANAGE_DATA_FRAME_PATTERN = Collections.unmodifiableSet( + Sets.newHashSet("cluster:admin/data_frame/*", "cluster:monitor/data_frame/*")); + private static final Set MANAGE_WATCHER_PATTERN = Collections.unmodifiableSet( + Sets.newHashSet("cluster:admin/xpack/watcher/*", "cluster:monitor/xpack/watcher/*")); + private static final Set TRANSPORT_CLIENT_PATTERN = Collections.unmodifiableSet( + Sets.newHashSet("cluster:monitor/nodes/liveness", "cluster:monitor/state")); + private static final Set MANAGE_IDX_TEMPLATE_PATTERN = Collections.singleton("indices:admin/template/*"); + private static final Set MANAGE_INGEST_PIPELINE_PATTERN = Collections.singleton("cluster:admin/ingest/pipeline/*"); + private static final Set MANAGE_ROLLUP_PATTERN = Collections.unmodifiableSet( + Sets.newHashSet("cluster:admin/xpack/rollup/*", "cluster:monitor/xpack/rollup/*")); + private static final Set MANAGE_CCR_PATTERN = + Collections.unmodifiableSet(Sets.newHashSet("cluster:admin/xpack/ccr/*", ClusterStateAction.NAME, HasPrivilegesAction.NAME)); + private static final Set CREATE_SNAPSHOT_PATTERN = Collections.unmodifiableSet( + Sets.newHashSet(CreateSnapshotAction.NAME, SnapshotsStatusAction.NAME + "*", + GetSnapshotsAction.NAME, SnapshotsStatusAction.NAME, GetRepositoriesAction.NAME)); + private static final Set READ_CCR_PATTERN = Collections.unmodifiableSet(Sets.newHashSet(ClusterStateAction.NAME, + HasPrivilegesAction.NAME)); + private static final Set MANAGE_ILM_PATTERN = Collections.singleton("cluster:admin/ilm/*"); + private static final Set READ_ILM_PATTERN = Collections.unmodifiableSet(Sets.newHashSet(GetLifecycleAction.NAME, + GetStatusAction.NAME)); + private static final Set MANAGE_SLM_PATTERN = + Collections.unmodifiableSet(Sets.newHashSet("cluster:admin/slm/*", StartILMAction.NAME, StopILMAction.NAME, GetStatusAction.NAME)); + private static final Set READ_SLM_PATTERN = Collections.unmodifiableSet(Sets.newHashSet(GetSnapshotLifecycleAction.NAME, + GetStatusAction.NAME)); + + public static final NamedClusterPrivilege NONE = new ActionClusterPrivilege("none", Collections.emptySet(), Collections.emptySet()); + public static final NamedClusterPrivilege ALL = new ActionClusterPrivilege("all", ALL_CLUSTER_PATTERN); + public static final NamedClusterPrivilege MONITOR = new ActionClusterPrivilege("monitor", MONITOR_PATTERN); + public static final NamedClusterPrivilege MONITOR_ML = new ActionClusterPrivilege("monitor_ml", MONITOR_ML_PATTERN); + public static final NamedClusterPrivilege MONITOR_DATA_FRAME = + new ActionClusterPrivilege("monitor_data_frame_transforms", MONITOR_DATA_FRAME_PATTERN); + public static final NamedClusterPrivilege MONITOR_WATCHER = new ActionClusterPrivilege("monitor_watcher", MONITOR_WATCHER_PATTERN); + public static final NamedClusterPrivilege MONITOR_ROLLUP = new ActionClusterPrivilege("monitor_rollup", MONITOR_ROLLUP_PATTERN); + public static final NamedClusterPrivilege MANAGE = new ActionClusterPrivilege("manage", + ALL_CLUSTER_PATTERN, MANAGE_SECURITY_PATTERN); + public static final NamedClusterPrivilege MANAGE_ML = new ActionClusterPrivilege("manage_ml", MANAGE_ML_PATTERN); + public static final NamedClusterPrivilege MANAGE_DATA_FRAME = + new ActionClusterPrivilege("manage_data_frame_transforms", MANAGE_DATA_FRAME_PATTERN); + public static final NamedClusterPrivilege MANAGE_TOKEN = new ActionClusterPrivilege("manage_token", MANAGE_TOKEN_PATTERN); + public static final NamedClusterPrivilege MANAGE_WATCHER = new ActionClusterPrivilege("manage_watcher", MANAGE_WATCHER_PATTERN); + public static final NamedClusterPrivilege MANAGE_ROLLUP = new ActionClusterPrivilege("manage_rollup", MANAGE_ROLLUP_PATTERN); + public static final NamedClusterPrivilege MANAGE_IDX_TEMPLATES = + new ActionClusterPrivilege("manage_index_templates", MANAGE_IDX_TEMPLATE_PATTERN); + public static final NamedClusterPrivilege MANAGE_INGEST_PIPELINES = + new ActionClusterPrivilege("manage_ingest_pipelines", MANAGE_INGEST_PIPELINE_PATTERN); + public static final NamedClusterPrivilege TRANSPORT_CLIENT = new ActionClusterPrivilege("transport_client", + TRANSPORT_CLIENT_PATTERN); + public static final NamedClusterPrivilege MANAGE_SECURITY = new ActionClusterPrivilege("manage_security", MANAGE_SECURITY_PATTERN); + public static final NamedClusterPrivilege MANAGE_SAML = new ActionClusterPrivilege("manage_saml", MANAGE_SAML_PATTERN); + public static final NamedClusterPrivilege MANAGE_OIDC = new ActionClusterPrivilege("manage_oidc", MANAGE_OIDC_PATTERN); + public static final NamedClusterPrivilege MANAGE_API_KEY = new ActionClusterPrivilege("manage_api_key", MANAGE_API_KEY_PATTERN); + public static final NamedClusterPrivilege MANAGE_PIPELINE = new ActionClusterPrivilege("manage_pipeline", + Collections.singleton("cluster:admin/ingest/pipeline/*")); + public static final NamedClusterPrivilege MANAGE_CCR = new ActionClusterPrivilege("manage_ccr", MANAGE_CCR_PATTERN); + public static final NamedClusterPrivilege READ_CCR = new ActionClusterPrivilege("read_ccr", READ_CCR_PATTERN); + public static final NamedClusterPrivilege CREATE_SNAPSHOT = new ActionClusterPrivilege("create_snapshot", CREATE_SNAPSHOT_PATTERN); + public static final NamedClusterPrivilege MANAGE_ILM = new ActionClusterPrivilege("manage_ilm", MANAGE_ILM_PATTERN); + public static final NamedClusterPrivilege READ_ILM = new ActionClusterPrivilege("read_ilm", READ_ILM_PATTERN); + public static final NamedClusterPrivilege MANAGE_SLM = new ActionClusterPrivilege("manage_slm", MANAGE_SLM_PATTERN); + public static final NamedClusterPrivilege READ_SLM = new ActionClusterPrivilege("read_slm", READ_SLM_PATTERN); + + private static final Map VALUES = Collections.unmodifiableMap( + Stream.of( + NONE, + ALL, + MONITOR, + MONITOR_ML, + MONITOR_DATA_FRAME, + MONITOR_WATCHER, + MONITOR_ROLLUP, + MANAGE, + MANAGE_ML, + MANAGE_DATA_FRAME, + MANAGE_TOKEN, + MANAGE_WATCHER, + MANAGE_IDX_TEMPLATES, + MANAGE_INGEST_PIPELINES, + TRANSPORT_CLIENT, + MANAGE_SECURITY, + MANAGE_SAML, + MANAGE_OIDC, + MANAGE_API_KEY, + MANAGE_PIPELINE, + MANAGE_ROLLUP, + MANAGE_CCR, + READ_CCR, + CREATE_SNAPSHOT, + MANAGE_ILM, + READ_ILM, + MANAGE_SLM, + READ_SLM).collect(Collectors.toMap(cp -> cp.name(), cp -> cp))); + + /** + * Resolves a {@link NamedClusterPrivilege} from a given name if it exists. + * If the name is a cluster action, then it converts the name to pattern and creates a {@link ActionClusterPrivilege} + * + * @param name either {@link ClusterPrivilegeResolver#names()} or cluster action {@link #isClusterAction(String)} + * @return instance of {@link NamedClusterPrivilege} + */ + public static NamedClusterPrivilege resolve(String name) { + name = Objects.requireNonNull(name).toLowerCase(Locale.ROOT); + if (isClusterAction(name)) { + return new ActionClusterPrivilege(name, Collections.singleton(actionToPattern(name))); + } + final NamedClusterPrivilege fixedPrivilege = VALUES.get(name); + if (fixedPrivilege != null) { + return fixedPrivilege; + } + throw new IllegalArgumentException("unknown cluster privilege [" + name + "]. a privilege must be either " + + "one of the predefined cluster privilege names [" + + Strings.collectionToCommaDelimitedString(VALUES.keySet()) + "] or a pattern over one of the available " + + "cluster actions"); + + } + + public static Set names() { + return Collections.unmodifiableSet(VALUES.keySet()); + } + + public static boolean isClusterAction(String actionName) { + return actionName.startsWith("cluster:") || actionName.startsWith("indices:admin/template/"); + } + + private static String actionToPattern(String text) { + return text + "*"; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java similarity index 52% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilege.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java index dd89c2bda70..405556671a1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java @@ -10,18 +10,15 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.transport.TransportRequest; import java.io.IOException; import java.util.Collection; -import java.util.function.Predicate; /** - * A ConditionalClusterPrivilege is a composition of a {@link ClusterPrivilege} (that determines which actions may be executed) - * with a {@link Predicate} for a {@link TransportRequest} (that determines which requests may be executed). - * The a given execution of an action is considered to be permitted if both the action and the request are permitted. + * A ConfigurableClusterPrivilege is a form of {@link ClusterPrivilege} that can be configured by an Elasticsearch security administrator + * within a {@link org.elasticsearch.xpack.core.security.authz.RoleDescriptor}. */ -public interface ConditionalClusterPrivilege extends NamedWriteable, ToXContentFragment { +public interface ConfigurableClusterPrivilege extends NamedWriteable, ToXContentFragment, ClusterPrivilege { /** * The category under which this privilege should be rendered when output as XContent. @@ -29,17 +26,7 @@ public interface ConditionalClusterPrivilege extends NamedWriteable, ToXContentF Category getCategory(); /** - * The action-level privilege that is required by this conditional privilege. - */ - ClusterPrivilege getPrivilege(); - - /** - * The request-level privilege (as a {@link Predicate}) that is required by this conditional privilege. - */ - Predicate getRequestPredicate(); - - /** - * A {@link ConditionalClusterPrivilege} should generate a fragment of {@code XContent}, which consists of + * A {@link ConfigurableClusterPrivilege} should generate a fragment of {@code XContent}, which consists of * a single field name, followed by its value (which may be an object, an array, or a simple value). */ @Override @@ -47,8 +34,8 @@ public interface ConditionalClusterPrivilege extends NamedWriteable, ToXContentF /** * Categories exist for to segment privileges for the purposes of rendering to XContent. - * {@link ConditionalClusterPrivileges#toXContent(XContentBuilder, Params, Collection)} builds one XContent - * object for a collection of {@link ConditionalClusterPrivilege} instances, with the top level fields built + * {@link ConfigurableClusterPrivileges#toXContent(XContentBuilder, Params, Collection)} builds one XContent + * object for a collection of {@link ConfigurableClusterPrivilege} instances, with the top level fields built * from the categories. */ enum Category { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java similarity index 80% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java index e5cfd2448aa..22ba4c1f2e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java @@ -17,7 +17,8 @@ import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.action.privilege.ApplicationPrivilegesRequest; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege.Category; +import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege.Category; import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.xcontent.XContentUtils; @@ -32,44 +33,44 @@ import java.util.Set; import java.util.function.Predicate; /** - * Static utility class for working with {@link ConditionalClusterPrivilege} instances + * Static utility class for working with {@link ConfigurableClusterPrivilege} instances */ -public final class ConditionalClusterPrivileges { +public final class ConfigurableClusterPrivileges { - public static final ConditionalClusterPrivilege[] EMPTY_ARRAY = new ConditionalClusterPrivilege[0]; + public static final ConfigurableClusterPrivilege[] EMPTY_ARRAY = new ConfigurableClusterPrivilege[0]; - public static final Writeable.Reader READER = - in1 -> in1.readNamedWriteable(ConditionalClusterPrivilege.class); - public static final Writeable.Writer WRITER = + public static final Writeable.Reader READER = + in1 -> in1.readNamedWriteable(ConfigurableClusterPrivilege.class); + public static final Writeable.Writer WRITER = (out1, value) -> out1.writeNamedWriteable(value); - private ConditionalClusterPrivileges() { + private ConfigurableClusterPrivileges() { } /** - * Utility method to read an array of {@link ConditionalClusterPrivilege} objects from a {@link StreamInput} + * Utility method to read an array of {@link ConfigurableClusterPrivilege} objects from a {@link StreamInput} */ - public static ConditionalClusterPrivilege[] readArray(StreamInput in) throws IOException { - return in.readArray(READER, ConditionalClusterPrivilege[]::new); + public static ConfigurableClusterPrivilege[] readArray(StreamInput in) throws IOException { + return in.readArray(READER, ConfigurableClusterPrivilege[]::new); } /** - * Utility method to write an array of {@link ConditionalClusterPrivilege} objects to a {@link StreamOutput} + * Utility method to write an array of {@link ConfigurableClusterPrivilege} objects to a {@link StreamOutput} */ - public static void writeArray(StreamOutput out, ConditionalClusterPrivilege[] privileges) throws IOException { + public static void writeArray(StreamOutput out, ConfigurableClusterPrivilege[] privileges) throws IOException { out.writeArray(WRITER, privileges); } /** * Writes a single object value to the {@code builder} that contains each of the provided privileges. - * The privileges are grouped according to their {@link ConditionalClusterPrivilege#getCategory() categories} + * The privileges are grouped according to their {@link ConfigurableClusterPrivilege#getCategory() categories} */ public static XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params, - Collection privileges) throws IOException { + Collection privileges) throws IOException { builder.startObject(); for (Category category : Category.values()) { builder.startObject(category.field.getPreferredName()); - for (ConditionalClusterPrivilege privilege : privileges) { + for (ConfigurableClusterPrivilege privilege : privileges) { if (category == privilege.getCategory()) { privilege.toXContent(builder, params); } @@ -83,8 +84,8 @@ public final class ConditionalClusterPrivileges { * Read a list of privileges from the parser. The parser should be positioned at the * {@link XContentParser.Token#START_OBJECT} token for the privileges value */ - public static List parse(XContentParser parser) throws IOException { - List privileges = new ArrayList<>(); + public static List parse(XContentParser parser) throws IOException { + List privileges = new ArrayList<>(); expectedToken(parser.currentToken(), parser, XContentParser.Token.START_OBJECT); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { @@ -119,15 +120,13 @@ public final class ConditionalClusterPrivileges { } /** - * The {@code ManageApplicationPrivileges} privilege is a {@link ConditionalClusterPrivilege} that grants the + * The {@code ManageApplicationPrivileges} privilege is a {@link ConfigurableClusterPrivilege} that grants the * ability to execute actions related to the management of application privileges (Get, Put, Delete) for a subset * of applications (identified by a wildcard-aware application-name). */ - public static class ManageApplicationPrivileges implements ConditionalClusterPrivilege { + public static class ManageApplicationPrivileges implements ConfigurableClusterPrivilege { - private static final ClusterPrivilege PRIVILEGE = ClusterPrivilege.get( - Collections.singleton("cluster:admin/xpack/security/privilege/*") - ); + private static final Predicate ACTION_PREDICATE = Automatons.predicate("cluster:admin/xpack/security/privilege/*"); public static final String WRITEABLE_NAME = "manage-application-privileges"; private final Set applicationNames; @@ -153,16 +152,6 @@ public final class ConditionalClusterPrivileges { return Category.APPLICATION; } - @Override - public ClusterPrivilege getPrivilege() { - return PRIVILEGE; - } - - @Override - public Predicate getRequestPredicate() { - return this.requestPredicate; - } - public Collection getApplicationNames() { return Collections.unmodifiableCollection(this.applicationNames); } @@ -224,6 +213,11 @@ public final class ConditionalClusterPrivileges { return applicationNames.hashCode(); } + @Override + public ClusterPermission.Builder buildPermission(final ClusterPermission.Builder builder) { + return builder.add(this, ACTION_PREDICATE, requestPredicate); + } + private interface Fields { ParseField MANAGE = new ParseField("manage"); ParseField APPLICATIONS = new ParseField("applications"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/NamedClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/NamedClusterPrivilege.java new file mode 100644 index 00000000000..fe2e16af620 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/NamedClusterPrivilege.java @@ -0,0 +1,19 @@ +/* + * + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + * + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + +/** + * A {@link ClusterPrivilege} that has a name. The named cluster privileges can be referred simply by name within a + * {@link RoleDescriptor#getClusterPrivileges()}. + */ +public interface NamedClusterPrivilege extends ClusterPrivilege { + String name(); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 9e7614965d8..009a4c6bd0f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -12,8 +12,8 @@ import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkAction; import org.elasticsearch.xpack.core.security.action.privilege.GetBuiltinPrivilegesAction; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges.ManageApplicationPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges.ManageApplicationPrivileges; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.UsernamesField; @@ -125,7 +125,7 @@ public class ReservedRolesStore implements BiConsumer, ActionListene .indices(".code-*", ".code_internal-*").privileges("all").build(), }, null, - new ConditionalClusterPrivilege[] { new ManageApplicationPrivileges(Collections.singleton("kibana-*")) }, + new ConfigurableClusterPrivilege[] { new ManageApplicationPrivileges(Collections.singleton("kibana-*")) }, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) .put("logstash_system", new RoleDescriptor("logstash_system", new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java index 2d038011e6c..569a392c034 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.ssl.rest; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -20,8 +19,6 @@ import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xpack.core.ssl.action.GetCertificateInfoAction; import org.elasticsearch.xpack.core.ssl.action.GetCertificateInfoAction.Response; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.GET; /** @@ -33,8 +30,7 @@ public class RestGetCertificateInfoAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetCertificateInfoAction.class)); - public RestGetCertificateInfoAction(Settings settings, RestController controller) { - super(settings); + public RestGetCertificateInfoAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/_ssl/certificates", this, @@ -47,7 +43,7 @@ public class RestGetCertificateInfoAction extends BaseRestHandler { } @Override - protected final RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + protected final RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { return channel -> new GetCertificateInfoAction.RequestBuilder(client, GetCertificateInfoAction.INSTANCE) .execute(new RestBuilderListener(channel) { @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformCheckpointStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformCheckpointStatsTests.java index 4ea93c4c519..4145d773eee 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformCheckpointStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformCheckpointStatsTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.dataframe.transforms; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.indexing.IndexerState; import java.io.IOException; @@ -16,7 +15,6 @@ public class DataFrameTransformCheckpointStatsTests extends AbstractSerializingD { public static DataFrameTransformCheckpointStats randomDataFrameTransformCheckpointStats() { return new DataFrameTransformCheckpointStats(randomLongBetween(1, 1_000_000), - randomBoolean() ? null : randomFrom(IndexerState.values()), DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(), randomBoolean() ? null : DataFrameTransformProgressTests.randomDataFrameTransformProgress(), randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfigUpdateTests.java index 85e031e1f89..1cc7574be19 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformConfigUpdateTests.java @@ -87,7 +87,7 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData PivotConfigTests.randomPivotConfig(), randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000), randomBoolean() ? null : Instant.now(), - randomBoolean() ? null : Version.CURRENT.toString()); + randomBoolean() ? null : Version.V_7_2_0.toString()); DataFrameTransformConfigUpdate update = new DataFrameTransformConfigUpdate(null, null, null, null, null); assertThat(config, equalTo(update.apply(config))); @@ -108,6 +108,7 @@ public class DataFrameTransformConfigUpdateTests extends AbstractSerializingData assertThat(updatedConfig.getSyncConfig(), equalTo(syncConfig)); assertThat(updatedConfig.getDescription(), equalTo(newDescription)); assertThat(updatedConfig.getHeaders(), equalTo(headers)); + assertThat(updatedConfig.getVersion(), equalTo(Version.CURRENT)); } public void testApplyWithSyncChange() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStatsTests.java index d9409bcec45..f438d6cfcf6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStatsTests.java @@ -17,7 +17,7 @@ public class DataFrameTransformStatsTests extends AbstractSerializingTestCase { + + @Override + protected Regression doParseInstance(XContentParser parser) throws IOException { + return Regression.fromXContent(parser, false); + } + + @Override + protected Regression createTestInstance() { + return createRandom(); + } + + public static Regression createRandom() { + Double lambda = randomBoolean() ? null : randomDoubleBetween(0.0, Double.MAX_VALUE, true); + Double gamma = randomBoolean() ? null : randomDoubleBetween(0.0, Double.MAX_VALUE, true); + Double eta = randomBoolean() ? null : randomDoubleBetween(0.001, 1.0, true); + Integer maximumNumberTrees = randomBoolean() ? null : randomIntBetween(1, 2000); + Double featureBagFraction = randomBoolean() ? null : randomDoubleBetween(0.0, 1.0, false); + String predictionFieldName = randomBoolean() ? null : randomAlphaOfLength(10); + return new Regression(randomAlphaOfLength(10), lambda, gamma, eta, maximumNumberTrees, featureBagFraction, + predictionFieldName); + } + + @Override + protected Writeable.Reader instanceReader() { + return Regression::new; + } + + public void testRegression_GivenNegativeLambda() { + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> new Regression("foo", -0.00001, 0.0, 0.5, 500, 0.3, "result")); + + assertThat(e.getMessage(), equalTo("[lambda] must be a non-negative double")); + } + + public void testRegression_GivenNegativeGamma() { + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> new Regression("foo", 0.0, -0.00001, 0.5, 500, 0.3, "result")); + + assertThat(e.getMessage(), equalTo("[gamma] must be a non-negative double")); + } + + public void testRegression_GivenEtaIsZero() { + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> new Regression("foo", 0.0, 0.0, 0.0, 500, 0.3, "result")); + + assertThat(e.getMessage(), equalTo("[eta] must be a double in [0.001, 1]")); + } + + public void testRegression_GivenEtaIsGreaterThanOne() { + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> new Regression("foo", 0.0, 0.0, 1.00001, 500, 0.3, "result")); + + assertThat(e.getMessage(), equalTo("[eta] must be a double in [0.001, 1]")); + } + + public void testRegression_GivenMaximumNumberTreesIsZero() { + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> new Regression("foo", 0.0, 0.0, 0.5, 0, 0.3, "result")); + + assertThat(e.getMessage(), equalTo("[maximum_number_trees] must be an integer in [1, 2000]")); + } + + public void testRegression_GivenMaximumNumberTreesIsGreaterThan2k() { + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> new Regression("foo", 0.0, 0.0, 0.5, 2001, 0.3, "result")); + + assertThat(e.getMessage(), equalTo("[maximum_number_trees] must be an integer in [1, 2000]")); + } + + public void testRegression_GivenFeatureBagFractionIsLessThanZero() { + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> new Regression("foo", 0.0, 0.0, 0.5, 500, -0.00001, "result")); + + assertThat(e.getMessage(), equalTo("[feature_bag_fraction] must be a double in (0, 1]")); + } + + public void testRegression_GivenFeatureBagFractionIsGreaterThanOne() { + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> new Regression("foo", 0.0, 0.0, 0.5, 500, 1.00001, "result")); + + assertThat(e.getMessage(), equalTo("[feature_bag_fraction] must be a double in (0, 1]")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java index 11daf8e29a3..731109c523b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import java.io.IOException; import java.util.Arrays; @@ -189,7 +189,7 @@ public class PutRoleRequestTests extends ESTestCase { if (randomBoolean()) { final String[] appNames = randomArray(1, 4, String[]::new, stringWithInitialLowercase); - request.conditionalCluster(new ConditionalClusterPrivileges.ManageApplicationPrivileges(Sets.newHashSet(appNames))); + request.conditionalCluster(new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Sets.newHashSet(appNames))); } request.runAs(generateRandomStringArray(4, 3, false, true)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java index d9d7f559c4b..36c1167a06c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java @@ -21,8 +21,8 @@ import org.elasticsearch.test.EqualsHashCodeTestUtils; import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition.FieldGrantExcludeGroup; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges.ManageApplicationPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges.ManageApplicationPrivileges; import java.io.IOException; import java.util.ArrayList; @@ -75,7 +75,7 @@ public class GetUserPrivilegesResponseTests extends ESTestCase { public GetUserPrivilegesResponse mutate(GetUserPrivilegesResponse original) { final int random = randomIntBetween(1, 0b11111); final Set cluster = maybeMutate(random, 0, original.getClusterPrivileges(), () -> randomAlphaOfLength(5)); - final Set conditionalCluster = maybeMutate(random, 1, + final Set conditionalCluster = maybeMutate(random, 1, original.getConditionalClusterPrivileges(), () -> new ManageApplicationPrivileges(randomStringSet(3))); final Set index = maybeMutate(random, 2, original.getIndexPrivileges(), () -> new GetUserPrivilegesResponse.Indices(randomStringSet(1), randomStringSet(1), emptySet(), emptySet(), @@ -103,7 +103,7 @@ public class GetUserPrivilegesResponseTests extends ESTestCase { private GetUserPrivilegesResponse randomResponse() { final Set cluster = randomStringSet(5); - final Set conditionalCluster = Sets.newHashSet(randomArray(3, ConditionalClusterPrivilege[]::new, + final Set conditionalCluster = Sets.newHashSet(randomArray(3, ConfigurableClusterPrivilege[]::new, () -> new ManageApplicationPrivileges( randomStringSet(3) ))); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestTests.java index 32ff134002d..6148e6fed71 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import java.io.IOException; import java.util.Arrays; @@ -98,9 +98,11 @@ public class HasPrivilegesRequestTests extends ESTestCase { final HasPrivilegesRequest request = new HasPrivilegesRequest(); request.username(randomAlphaOfLength(8)); - final List clusterPrivileges = randomSubsetOf(Arrays.asList(ClusterPrivilege.MONITOR, ClusterPrivilege.MANAGE, - ClusterPrivilege.MANAGE_ML, ClusterPrivilege.MANAGE_SECURITY, ClusterPrivilege.MANAGE_PIPELINE, ClusterPrivilege.ALL)) - .stream().flatMap(p -> p.name().stream()).collect(Collectors.toList()); + final List clusterPrivileges = randomSubsetOf(Arrays.asList(ClusterPrivilegeResolver.MONITOR, + ClusterPrivilegeResolver.MANAGE, + ClusterPrivilegeResolver.MANAGE_ML, ClusterPrivilegeResolver.MANAGE_SECURITY, ClusterPrivilegeResolver.MANAGE_PIPELINE, + ClusterPrivilegeResolver.ALL)) + .stream().map(p -> p.name()).collect(Collectors.toList()); request.clusterPrivileges(clusterPrivileges.toArray(Strings.EMPTY_ARRAY)); IndicesPrivileges[] indicesPrivileges = new IndicesPrivileges[randomInt(5)]; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermissionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermissionTests.java new file mode 100644 index 00000000000..2e7a8878d86 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermissionTests.java @@ -0,0 +1,282 @@ +/* + * + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + * + */ + +package org.elasticsearch.xpack.core.security.authz.permission; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.support.Automatons; +import org.junit.Before; +import org.mockito.Mockito; + +import java.io.IOException; +import java.util.Collections; +import java.util.Objects; +import java.util.Set; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.is; + +public class ClusterPermissionTests extends ESTestCase { + private TransportRequest mockTransportRequest = Mockito.mock(TransportRequest.class); + private ClusterPrivilege cpThatDoesNothing = new ClusterPrivilege() { + @Override + public ClusterPermission.Builder buildPermission(ClusterPermission.Builder builder) { + return builder; + } + }; + + @Before + public void setup() { + mockTransportRequest = Mockito.mock(TransportRequest.class); + } + + public void testClusterPermissionBuilder() { + ClusterPermission.Builder builder = ClusterPermission.builder(); + assertNotNull(builder); + assertThat(builder.build(), is(ClusterPermission.NONE)); + + builder = ClusterPrivilegeResolver.MANAGE_SECURITY.buildPermission(builder); + builder = ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(builder); + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege1 = + new MockConfigurableClusterPrivilege(r -> r == mockTransportRequest); + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege2 = + new MockConfigurableClusterPrivilege(r -> false); + builder = mockConfigurableClusterPrivilege1.buildPermission(builder); + builder = mockConfigurableClusterPrivilege2.buildPermission(builder); + final ClusterPermission clusterPermission = builder.build(); + + assertNotNull(clusterPermission); + assertNotNull(clusterPermission.privileges()); + final Set privileges = clusterPermission.privileges(); + assertNotNull(privileges); + assertThat(privileges.size(), is(4)); + assertThat(privileges, containsInAnyOrder(ClusterPrivilegeResolver.MANAGE_SECURITY, ClusterPrivilegeResolver.MANAGE_ILM, + mockConfigurableClusterPrivilege1, mockConfigurableClusterPrivilege2)); + } + + public void testClusterPermissionCheck() { + ClusterPermission.Builder builder = ClusterPermission.builder(); + builder = ClusterPrivilegeResolver.MANAGE_SECURITY.buildPermission(builder); + builder = ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(builder); + + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege1 = + new MockConfigurableClusterPrivilege(r -> r == mockTransportRequest); + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege2 = + new MockConfigurableClusterPrivilege(r -> false); + builder = mockConfigurableClusterPrivilege1.buildPermission(builder); + builder = mockConfigurableClusterPrivilege2.buildPermission(builder); + final ClusterPermission clusterPermission = builder.build(); + + assertThat(clusterPermission.check("cluster:admin/xpack/security/token/invalidate", mockTransportRequest), is(true)); + assertThat(clusterPermission.check("cluster:admin/ilm/stop", mockTransportRequest), is(true)); + assertThat(clusterPermission.check("cluster:admin/xpack/security/privilege/get", mockTransportRequest), is(true)); + assertThat(clusterPermission.check("cluster:admin/snapshot/status", mockTransportRequest), is(false)); + } + + public void testClusterPermissionCheckWithEmptyActionPatterns() { + final ClusterPermission.Builder builder = ClusterPermission.builder(); + builder.add(cpThatDoesNothing, Collections.emptySet(), Collections.emptySet()); + final ClusterPermission clusterPermission = builder.build(); + + assertThat(clusterPermission.check("cluster:admin/ilm/start", mockTransportRequest), is(false)); + assertThat(clusterPermission.check("cluster:admin/xpack/security/token/invalidate", mockTransportRequest), is(false)); + } + + public void testClusterPermissionCheckWithExcludeOnlyActionPatterns() { + final ClusterPermission.Builder builder = ClusterPermission.builder(); + builder.add(cpThatDoesNothing, Collections.emptySet(), Collections.singleton("cluster:some/thing/to/exclude")); + final ClusterPermission clusterPermission = builder.build(); + + assertThat(clusterPermission.check("cluster:admin/ilm/start", mockTransportRequest), is(false)); + assertThat(clusterPermission.check("cluster:admin/xpack/security/token/invalidate", mockTransportRequest), is(false)); + } + + public void testClusterPermissionCheckWithActionPatterns() { + final ClusterPermission.Builder builder = ClusterPermission.builder(); + builder.add(cpThatDoesNothing, Collections.singleton("cluster:admin/*"), Collections.singleton("cluster:admin/ilm/*")); + final ClusterPermission clusterPermission = builder.build(); + + assertThat(clusterPermission.check("cluster:admin/ilm/start", mockTransportRequest), is(false)); + assertThat(clusterPermission.check("cluster:admin/xpack/security/token/invalidate", mockTransportRequest), is(true)); + } + + public void testClusterPermissionCheckWithActionPatternsAndNoExludePatterns() { + final ClusterPermission.Builder builder = ClusterPermission.builder(); + builder.add(cpThatDoesNothing, Collections.singleton("cluster:admin/*"), Collections.emptySet()); + final ClusterPermission clusterPermission = builder.build(); + + assertThat(clusterPermission.check("cluster:admin/ilm/start", mockTransportRequest), is(true)); + assertThat(clusterPermission.check("cluster:admin/xpack/security/token/invalidate", mockTransportRequest), is(true)); + } + + public void testNoneClusterPermissionIsImpliedByNone() { + assertThat(ClusterPermission.NONE.implies(ClusterPermission.NONE), is(true)); + } + + public void testNoneClusterPermissionIsImpliedByAny() { + ClusterPermission.Builder builder = ClusterPermission.builder(); + builder = ClusterPrivilegeResolver.MANAGE_SECURITY.buildPermission(builder); + builder = ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(builder); + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege1 = + new MockConfigurableClusterPrivilege(r -> r == mockTransportRequest); + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege2 = + new MockConfigurableClusterPrivilege(r -> false); + builder = mockConfigurableClusterPrivilege1.buildPermission(builder); + builder = mockConfigurableClusterPrivilege2.buildPermission(builder); + final ClusterPermission clusterPermission = builder.build(); + + assertThat(clusterPermission.implies(ClusterPermission.NONE), is(true)); + } + + public void testClusterPermissionSubsetWithConfigurableClusterPrivilegeIsImpliedByClusterPermission() { + ClusterPermission.Builder builder = ClusterPermission.builder(); + builder = ClusterPrivilegeResolver.MANAGE_ML.buildPermission(builder); + builder = ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(builder); + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege1 = + new MockConfigurableClusterPrivilege(r -> r == mockTransportRequest); + builder = mockConfigurableClusterPrivilege1.buildPermission(builder); + final ClusterPermission clusterPermission = builder.build(); + + ClusterPermission.Builder builder1 = ClusterPermission.builder(); + builder1 = ClusterPrivilegeResolver.MANAGE_ML.buildPermission(builder1); + builder1 = mockConfigurableClusterPrivilege1.buildPermission(builder1); + final ClusterPermission otherClusterPermission = builder1.build(); + + assertThat(clusterPermission.implies(otherClusterPermission), is(true)); + } + + public void testClusterPermissionNonSubsetWithConfigurableClusterPrivilegeIsImpliedByClusterPermission() { + ClusterPermission.Builder builder = ClusterPermission.builder(); + builder = ClusterPrivilegeResolver.MANAGE_ML.buildPermission(builder); + builder = ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(builder); + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege1 = + new MockConfigurableClusterPrivilege(r -> r == mockTransportRequest); + builder = mockConfigurableClusterPrivilege1.buildPermission(builder); + final ClusterPermission clusterPermission = builder.build(); + + ClusterPermission.Builder builder1 = ClusterPermission.builder(); + builder1 = ClusterPrivilegeResolver.MANAGE_ML.buildPermission(builder1); + builder1 = mockConfigurableClusterPrivilege1.buildPermission(builder1); + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege2 = + new MockConfigurableClusterPrivilege(r -> false); + builder1 = mockConfigurableClusterPrivilege2.buildPermission(builder1); + final ClusterPermission otherClusterPermission = builder1.build(); + + assertThat(clusterPermission.implies(otherClusterPermission), is(false)); + } + + public void testClusterPermissionNonSubsetIsNotImpliedByClusterPermission() { + ClusterPermission.Builder builder = ClusterPermission.builder(); + builder = ClusterPrivilegeResolver.MANAGE_ML.buildPermission(builder); + builder = ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(builder); + final ClusterPermission clusterPermission = builder.build(); + + ClusterPermission.Builder builder1 = ClusterPermission.builder(); + builder1 = ClusterPrivilegeResolver.MANAGE_API_KEY.buildPermission(builder1); + final ClusterPermission otherClusterPermission = builder1.build(); + + assertThat(clusterPermission.implies(otherClusterPermission), is(false)); + } + + public void testClusterPermissionSubsetIsImpliedByClusterPermission() { + ClusterPermission.Builder builder = ClusterPermission.builder(); + builder = ClusterPrivilegeResolver.MANAGE_ML.buildPermission(builder); + builder = ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(builder); + final ClusterPermission clusterPermission = builder.build(); + + ClusterPermission.Builder builder1 = ClusterPermission.builder(); + builder1 = ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(builder1); + final ClusterPermission otherClusterPermission = builder1.build(); + + assertThat(clusterPermission.implies(otherClusterPermission), is(true)); + } + + public void testClusterPermissionIsImpliedBySameClusterPermission() { + ClusterPermission.Builder builder = ClusterPermission.builder(); + builder = ClusterPrivilegeResolver.MANAGE_ML.buildPermission(builder); + builder = ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(builder); + final MockConfigurableClusterPrivilege mockConfigurableClusterPrivilege1 = + new MockConfigurableClusterPrivilege(r -> r == mockTransportRequest); + builder = mockConfigurableClusterPrivilege1.buildPermission(builder); + final ClusterPermission clusterPermission = builder.build(); + + assertThat(clusterPermission.implies(clusterPermission), is(true)); + } + + public void testClusterPermissionSubsetIsImpliedByAllClusterPermission() { + final ClusterPermission allClusterPermission = ClusterPrivilegeResolver.ALL.buildPermission(ClusterPermission.builder()).build(); + ClusterPermission otherClusterPermission = + ClusterPrivilegeResolver.MANAGE_ILM.buildPermission(ClusterPermission.builder()).build(); + + assertThat(allClusterPermission.implies(otherClusterPermission), is(true)); + } + + private static class MockConfigurableClusterPrivilege implements ConfigurableClusterPrivilege { + static final Predicate ACTION_PREDICATE = Automatons.predicate("cluster:admin/xpack/security/privilege/*"); + private Predicate requestPredicate; + + MockConfigurableClusterPrivilege(Predicate requestPredicate) { + this.requestPredicate = requestPredicate; + } + + @Override + public Category getCategory() { + return Category.APPLICATION; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder; + } + + @Override + public String getWriteableName() { + return "mock-ccp"; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final MockConfigurableClusterPrivilege that = (MockConfigurableClusterPrivilege) o; + return requestPredicate.equals(that.requestPredicate); + } + + @Override + public int hashCode() { + return Objects.hash(requestPredicate); + } + + @Override + public String toString() { + return "MockConfigurableClusterPrivilege{" + + "requestPredicate=" + requestPredicate + + '}'; + } + + @Override + public ClusterPermission.Builder buildPermission(ClusterPermission.Builder builder) { + return builder.add(this, ACTION_PREDICATE, requestPredicate); + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java index 121f40c44d9..4bcc581d072 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.junit.Before; @@ -200,27 +200,27 @@ public class LimitedRoleTests extends ESTestCase { public void testCheckClusterPrivilege() { Role fromRole = Role.builder("a-role").cluster(Collections.singleton("manage_security"), Collections.emptyList()) .build(); - assertThat(fromRole.grants(ClusterPrivilege.ALL), is(false)); - assertThat(fromRole.grants(ClusterPrivilege.MANAGE_SECURITY), is(true)); + assertThat(fromRole.grants(ClusterPrivilegeResolver.ALL), is(false)); + assertThat(fromRole.grants(ClusterPrivilegeResolver.MANAGE_SECURITY), is(true)); { Role limitedByRole = Role.builder("scoped-role") .cluster(Collections.singleton("all"), Collections.emptyList()).build(); - assertThat(limitedByRole.grants(ClusterPrivilege.ALL), is(true)); - assertThat(limitedByRole.grants(ClusterPrivilege.MANAGE_SECURITY), is(true)); + assertThat(limitedByRole.grants(ClusterPrivilegeResolver.ALL), is(true)); + assertThat(limitedByRole.grants(ClusterPrivilegeResolver.MANAGE_SECURITY), is(true)); Role role = LimitedRole.createLimitedRole(fromRole, limitedByRole); - assertThat(role.grants(ClusterPrivilege.ALL), is(false)); - assertThat(role.grants(ClusterPrivilege.MANAGE_SECURITY), is(true)); + assertThat(role.grants(ClusterPrivilegeResolver.ALL), is(false)); + assertThat(role.grants(ClusterPrivilegeResolver.MANAGE_SECURITY), is(true)); } { Role limitedByRole = Role.builder("scoped-role") .cluster(Collections.singleton("monitor"), Collections.emptyList()).build(); - assertThat(limitedByRole.grants(ClusterPrivilege.ALL), is(false)); - assertThat(limitedByRole.grants(ClusterPrivilege.MONITOR), is(true)); + assertThat(limitedByRole.grants(ClusterPrivilegeResolver.ALL), is(false)); + assertThat(limitedByRole.grants(ClusterPrivilegeResolver.MONITOR), is(true)); Role role = LimitedRole.createLimitedRole(fromRole, limitedByRole); - assertThat(role.grants(ClusterPrivilege.ALL), is(false)); - assertThat(role.grants(ClusterPrivilege.MANAGE_SECURITY), is(false)); - assertThat(role.grants(ClusterPrivilege.MONITOR), is(false)); + assertThat(role.grants(ClusterPrivilegeResolver.ALL), is(false)); + assertThat(role.grants(ClusterPrivilegeResolver.MANAGE_SECURITY), is(false)); + assertThat(role.grants(ClusterPrivilegeResolver.MONITOR), is(false)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java similarity index 75% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilegesTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java index ebcd70869cb..2ba495bce98 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilegesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java @@ -27,15 +27,15 @@ import java.util.List; import static org.elasticsearch.common.xcontent.DeprecationHandler.THROW_UNSUPPORTED_OPERATION; import static org.hamcrest.Matchers.equalTo; -public class ConditionalClusterPrivilegesTests extends ESTestCase { +public class ConfigurableClusterPrivilegesTests extends ESTestCase { public void testSerialization() throws Exception { - final ConditionalClusterPrivilege[] original = buildSecurityPrivileges(); + final ConfigurableClusterPrivilege[] original = buildSecurityPrivileges(); try (BytesStreamOutput out = new BytesStreamOutput()) { - ConditionalClusterPrivileges.writeArray(out, original); + ConfigurableClusterPrivileges.writeArray(out, original); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin(Settings.EMPTY).getNamedWriteables()); try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry)) { - final ConditionalClusterPrivilege[] copy = ConditionalClusterPrivileges.readArray(in); + final ConfigurableClusterPrivilege[] copy = ConfigurableClusterPrivileges.readArray(in); assertThat(copy, equalTo(original)); assertThat(original, equalTo(copy)); } @@ -47,26 +47,26 @@ public class ConditionalClusterPrivilegesTests extends ESTestCase { try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { final XContentBuilder builder = new XContentBuilder(xContent, out); - final List original = Arrays.asList(buildSecurityPrivileges()); - ConditionalClusterPrivileges.toXContent(builder, ToXContent.EMPTY_PARAMS, original); + final List original = Arrays.asList(buildSecurityPrivileges()); + ConfigurableClusterPrivileges.toXContent(builder, ToXContent.EMPTY_PARAMS, original); builder.flush(); final byte[] bytes = out.toByteArray(); try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, THROW_UNSUPPORTED_OPERATION, bytes)) { assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - final List clone = ConditionalClusterPrivileges.parse(parser); + final List clone = ConfigurableClusterPrivileges.parse(parser); assertThat(clone, equalTo(original)); assertThat(original, equalTo(clone)); } } } - private ConditionalClusterPrivilege[] buildSecurityPrivileges() { + private ConfigurableClusterPrivilege[] buildSecurityPrivileges() { return buildSecurityPrivileges(randomIntBetween(4, 7)); } - private ConditionalClusterPrivilege[] buildSecurityPrivileges(int applicationNameLength) { - return new ConditionalClusterPrivilege[] { + private ConfigurableClusterPrivilege[] buildSecurityPrivileges(int applicationNameLength) { + return new ConfigurableClusterPrivilege[] { ManageApplicationPrivilegesTests.buildPrivileges(applicationNameLength) }; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java index 9c113d4ff0f..dfe1147fb2c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java @@ -20,19 +20,12 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; -import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.XPackClientPlugin; -import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesRequest; -import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesRequest; -import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequest; -import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; -import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; -import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges.ManageApplicationPrivileges; +import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges.ManageApplicationPrivileges; import java.io.ByteArrayOutputStream; import java.util.ArrayList; @@ -42,13 +35,9 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Set; -import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.DeprecationHandler.THROW_UNSUPPORTED_OPERATION; -import static org.elasticsearch.test.TestMatchers.predicateMatches; -import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -100,34 +89,22 @@ public class ManageApplicationPrivilegesTests extends ESTestCase { EqualsHashCodeTestUtils.checkEqualsAndHashCode(privileges, this::clone, mutate); } - public void testPrivilege() { - final ManageApplicationPrivileges privileges = buildPrivileges(); - assertThat(privileges.getPrivilege(), instanceOf(ClusterPrivilege.class)); - for (String actionName : Arrays.asList(GetPrivilegesAction.NAME, PutPrivilegesAction.NAME, DeletePrivilegesAction.NAME)) { - assertThat(privileges.getPrivilege().predicate(), predicateMatches(actionName)); - } - for (String actionName : Arrays.asList(GetUsersAction.NAME, PutRoleAction.NAME, DeleteRoleMappingAction.NAME, - HasPrivilegesAction.NAME)) { - assertThat(privileges.getPrivilege().predicate(), not(predicateMatches(actionName))); - } - } - - public void testRequestPredicate() { + public void testActionAndRequestPredicate() { final ManageApplicationPrivileges kibanaAndLogstash = new ManageApplicationPrivileges(Sets.newHashSet("kibana-*", "logstash")); final ManageApplicationPrivileges cloudAndSwiftype = new ManageApplicationPrivileges(Sets.newHashSet("cloud-*", "swiftype")); - final Predicate kibanaAndLogstashPredicate = kibanaAndLogstash.getRequestPredicate(); - final Predicate cloudAndSwiftypePredicate = cloudAndSwiftype.getRequestPredicate(); - assertThat(kibanaAndLogstashPredicate, notNullValue()); - assertThat(cloudAndSwiftypePredicate, notNullValue()); + final ClusterPermission kibanaAndLogstashPermission = kibanaAndLogstash.buildPermission(ClusterPermission.builder()).build(); + final ClusterPermission cloudAndSwiftypePermission = cloudAndSwiftype.buildPermission(ClusterPermission.builder()).build(); + assertThat(kibanaAndLogstashPermission, notNullValue()); + assertThat(cloudAndSwiftypePermission, notNullValue()); final GetPrivilegesRequest getKibana1 = new GetPrivilegesRequest(); getKibana1.application("kibana-1"); - assertThat(kibanaAndLogstashPredicate, predicateMatches(getKibana1)); - assertThat(cloudAndSwiftypePredicate, not(predicateMatches(getKibana1))); + assertTrue(kibanaAndLogstashPermission.check("cluster:admin/xpack/security/privilege/get", getKibana1)); + assertFalse(cloudAndSwiftypePermission.check("cluster:admin/xpack/security/privilege/get", getKibana1)); final DeletePrivilegesRequest deleteLogstash = new DeletePrivilegesRequest("logstash", new String[]{"all"}); - assertThat(kibanaAndLogstashPredicate, predicateMatches(deleteLogstash)); - assertThat(cloudAndSwiftypePredicate, not(predicateMatches(deleteLogstash))); + assertTrue(kibanaAndLogstashPermission.check("cluster:admin/xpack/security/privilege/get", deleteLogstash)); + assertFalse(cloudAndSwiftypePermission.check("cluster:admin/xpack/security/privilege/get", deleteLogstash)); final PutPrivilegesRequest putKibana = new PutPrivilegesRequest(); @@ -137,8 +114,8 @@ public class ManageApplicationPrivilegesTests extends ESTestCase { randomAlphaOfLengthBetween(3, 6).toLowerCase(Locale.ROOT), Collections.emptySet(), Collections.emptyMap())); } putKibana.setPrivileges(kibanaPrivileges); - assertThat(kibanaAndLogstashPredicate, predicateMatches(putKibana)); - assertThat(cloudAndSwiftypePredicate, not(predicateMatches(putKibana))); + assertTrue(kibanaAndLogstashPermission.check("cluster:admin/xpack/security/privilege/get", putKibana)); + assertFalse(cloudAndSwiftypePermission.check("cluster:admin/xpack/security/privilege/get", putKibana)); } public void testSecurityForGetAllApplicationPrivileges() { @@ -151,8 +128,10 @@ public class ManageApplicationPrivilegesTests extends ESTestCase { final ManageApplicationPrivileges kibanaOnly = new ManageApplicationPrivileges(Sets.newHashSet("kibana-*")); final ManageApplicationPrivileges allApps = new ManageApplicationPrivileges(Sets.newHashSet("*")); - assertThat(kibanaOnly.getRequestPredicate(), not(predicateMatches(getAll))); - assertThat(allApps.getRequestPredicate(), predicateMatches(getAll)); + final ClusterPermission kibanaOnlyPermission = kibanaOnly.buildPermission(ClusterPermission.builder()).build(); + final ClusterPermission allAppsPermission = allApps.buildPermission(ClusterPermission.builder()).build(); + assertFalse(kibanaOnlyPermission.check("cluster:admin/xpack/security/privilege/get", getAll)); + assertTrue(allAppsPermission.check("cluster:admin/xpack/security/privilege/get", getAll)); } private ManageApplicationPrivileges clone(ManageApplicationPrivileges original) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 920cc26a6ff..489e31ed1f7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -13,9 +13,12 @@ import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.ListEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; import org.elasticsearch.xpack.core.security.support.Automatons; import org.junit.Rule; import org.junit.rules.ExpectedException; +import org.mockito.Mockito; import java.util.Set; import java.util.function.Predicate; @@ -34,55 +37,78 @@ public class PrivilegeTests extends ESTestCase { assertThat(predicate.test("bar[n][nodes]"), is(false)); assertThat(predicate.test("[n][nodes]"), is(false)); } - + private void verifyClusterActionAllowed(ClusterPrivilege clusterPrivilege, String... actions) { + ClusterPermission clusterPermission = clusterPrivilege.buildPermission(ClusterPermission.builder()).build(); + for (String action: actions) { + assertTrue(clusterPermission.check(action, Mockito.mock(TransportRequest.class))); + } + } + private void verifyClusterActionDenied(ClusterPrivilege clusterPrivilege, String... actions) { + ClusterPermission clusterPermission = clusterPrivilege.buildPermission(ClusterPermission.builder()).build(); + for (String action: actions) { + assertFalse(clusterPermission.check(action, Mockito.mock(TransportRequest.class))); + } + } public void testCluster() throws Exception { - Set name = Sets.newHashSet("monitor"); - ClusterPrivilege cluster = ClusterPrivilege.get(name); - assertThat(cluster, is(ClusterPrivilege.MONITOR)); + ClusterPrivilege allClusterPrivilege = ClusterPrivilegeResolver.resolve("all"); + assertThat(allClusterPrivilege, is(ClusterPrivilegeResolver.ALL)); + verifyClusterActionAllowed(allClusterPrivilege, "cluster:admin/xpack/security/*"); - // since "all" implies "monitor", this should be the same language as All - name = Sets.newHashSet("monitor", "all"); - cluster = ClusterPrivilege.get(name); - assertTrue(Operations.sameLanguage(ClusterPrivilege.ALL.automaton, cluster.automaton)); + ClusterPrivilege monitorClusterPrivilege = ClusterPrivilegeResolver.resolve("monitor"); + assertThat(monitorClusterPrivilege, is(ClusterPrivilegeResolver.MONITOR)); + verifyClusterActionAllowed(monitorClusterPrivilege, "cluster:monitor/*"); + verifyClusterActionDenied(monitorClusterPrivilege, "cluster:admin/xpack/security/*"); - name = Sets.newHashSet("monitor", "none"); - cluster = ClusterPrivilege.get(name); - assertTrue(Operations.sameLanguage(ClusterPrivilege.MONITOR.automaton, cluster.automaton)); + ClusterPrivilege noneClusterPrivilege = ClusterPrivilegeResolver.resolve("none"); + assertThat(noneClusterPrivilege, is(ClusterPrivilegeResolver.NONE)); + verifyClusterActionDenied(noneClusterPrivilege, "cluster:admin/xpack/security/*"); + verifyClusterActionDenied(noneClusterPrivilege, "cluster:monitor/*"); + verifyClusterActionDenied(noneClusterPrivilege, "*"); - Set name2 = Sets.newHashSet("none", "monitor"); - ClusterPrivilege cluster2 = ClusterPrivilege.get(name2); - assertThat(cluster, is(cluster2)); + ClusterPermission monitorClusterPermission = monitorClusterPrivilege.buildPermission(ClusterPermission.builder()).build(); + ClusterPermission allClusterPermission = allClusterPrivilege.buildPermission(ClusterPermission.builder()).build(); + + // all implies monitor + assertTrue(allClusterPermission.implies(monitorClusterPermission)); + + ClusterPermission.Builder builder = ClusterPermission.builder(); + builder = allClusterPrivilege.buildPermission(builder); + builder = noneClusterPrivilege.buildPermission(builder); + ClusterPermission combinedPermission = builder.build(); + assertTrue(combinedPermission.implies(monitorClusterPermission)); } public void testClusterTemplateActions() throws Exception { - Set name = Sets.newHashSet("indices:admin/template/delete"); - ClusterPrivilege cluster = ClusterPrivilege.get(name); - assertThat(cluster, notNullValue()); - assertThat(cluster.predicate().test("indices:admin/template/delete"), is(true)); + ClusterPrivilege clusterPrivilegeTemplateDelete = ClusterPrivilegeResolver.resolve("indices:admin/template/delete"); + assertThat(clusterPrivilegeTemplateDelete, notNullValue()); + verifyClusterActionAllowed(clusterPrivilegeTemplateDelete, "indices:admin/template/delete"); + verifyClusterActionDenied(clusterPrivilegeTemplateDelete, "indices:admin/template/get", "indices:admin/template/put"); - name = Sets.newHashSet("indices:admin/template/get"); - cluster = ClusterPrivilege.get(name); - assertThat(cluster, notNullValue()); - assertThat(cluster.predicate().test("indices:admin/template/get"), is(true)); + ClusterPrivilege clusterPrivilegeTemplateGet = ClusterPrivilegeResolver.resolve("indices:admin/template/get"); + assertThat(clusterPrivilegeTemplateGet, notNullValue()); + verifyClusterActionAllowed(clusterPrivilegeTemplateGet, "indices:admin/template/get"); + verifyClusterActionDenied(clusterPrivilegeTemplateGet, "indices:admin/template/delete", "indices:admin/template/put"); - name = Sets.newHashSet("indices:admin/template/put"); - cluster = ClusterPrivilege.get(name); - assertThat(cluster, notNullValue()); - assertThat(cluster.predicate().test("indices:admin/template/put"), is(true)); + ClusterPrivilege clusterPrivilegeTemplatePut = ClusterPrivilegeResolver.resolve("indices:admin/template/put"); + assertThat(clusterPrivilegeTemplatePut, notNullValue()); + verifyClusterActionAllowed(clusterPrivilegeTemplatePut, "indices:admin/template/put"); + verifyClusterActionDenied(clusterPrivilegeTemplatePut, "indices:admin/template/get", "indices:admin/template/delete"); } public void testClusterInvalidName() throws Exception { thrown.expect(IllegalArgumentException.class); - Set actionName = Sets.newHashSet("foobar"); - ClusterPrivilege.get(actionName); + ClusterPrivilegeResolver.resolve("foobar"); + } public void testClusterAction() throws Exception { - Set actionName = Sets.newHashSet("cluster:admin/snapshot/delete"); - ClusterPrivilege cluster = ClusterPrivilege.get(actionName); - assertThat(cluster, notNullValue()); - assertThat(cluster.predicate().test("cluster:admin/snapshot/delete"), is(true)); - assertThat(cluster.predicate().test("cluster:admin/snapshot/dele"), is(false)); + // ClusterPrivilegeResolver.resolve() for a cluster action converts action name into a pattern by adding "*" + ClusterPrivilege clusterPrivilegeSnapshotDelete = ClusterPrivilegeResolver.resolve("cluster:admin/snapshot/delete"); + assertThat(clusterPrivilegeSnapshotDelete, notNullValue()); + verifyClusterActionAllowed(clusterPrivilegeSnapshotDelete, "cluster:admin/snapshot/delete", "cluster:admin/snapshot/delete[n]", + "cluster:admin/snapshot/delete/non-existing"); + verifyClusterActionDenied(clusterPrivilegeSnapshotDelete, "cluster:admin/snapshot/dele", "cluster:admin/snapshot/dele[n]", + "cluster:admin/snapshot/dele/non-existing"); } public void testIndexAction() throws Exception { @@ -149,11 +175,10 @@ public class PrivilegeTests extends ESTestCase { } public void testManageCcrPrivilege() { - Predicate predicate = ClusterPrivilege.MANAGE_CCR.predicate(); - assertThat(predicate.test("cluster:admin/xpack/ccr/follow_index"), is(true)); - assertThat(predicate.test("cluster:admin/xpack/ccr/unfollow_index"), is(true)); - assertThat(predicate.test("cluster:admin/xpack/ccr/brand_new_api"), is(true)); - assertThat(predicate.test("cluster:admin/xpack/whatever"), is(false)); + verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_CCR, "cluster:admin/xpack/ccr/follow_index", + "cluster:admin/xpack/ccr/unfollow_index", "cluster:admin/xpack/ccr/brand_new_api"); + verifyClusterActionDenied(ClusterPrivilegeResolver.MANAGE_CCR, "cluster:admin/xpack/whatever"); + } public void testManageEnrichPrivilege() { @@ -169,33 +194,20 @@ public class PrivilegeTests extends ESTestCase { public void testIlmPrivileges() { { - Predicate predicate = ClusterPrivilege.MANAGE_ILM.predicate(); - // check cluster actions - assertThat(predicate.test("cluster:admin/ilm/delete"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/_move/post"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/put"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/start"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/stop"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/brand_new_api"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/get"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/operation_mode/get"), is(true)); - // check non-ilm action - assertThat(predicate.test("cluster:admin/whatever"), is(false)); + verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ILM, "cluster:admin/ilm/delete", + "cluster:admin/ilm/_move/post", "cluster:admin/ilm/put", "cluster:admin/ilm/start", + "cluster:admin/ilm/stop", "cluster:admin/ilm/brand_new_api", "cluster:admin/ilm/get", + "cluster:admin/ilm/operation_mode/get" + ); + verifyClusterActionDenied(ClusterPrivilegeResolver.MANAGE_ILM, "cluster:admin/whatever"); + } { - Predicate predicate = ClusterPrivilege.READ_ILM.predicate(); - // check cluster actions - assertThat(predicate.test("cluster:admin/ilm/delete"), is(false)); - assertThat(predicate.test("cluster:admin/ilm/_move/post"), is(false)); - assertThat(predicate.test("cluster:admin/ilm/put"), is(false)); - assertThat(predicate.test("cluster:admin/ilm/start"), is(false)); - assertThat(predicate.test("cluster:admin/ilm/stop"), is(false)); - assertThat(predicate.test("cluster:admin/ilm/brand_new_api"), is(false)); - assertThat(predicate.test("cluster:admin/ilm/get"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/operation_mode/get"), is(true)); - // check non-ilm action - assertThat(predicate.test("cluster:admin/whatever"), is(false)); + verifyClusterActionAllowed(ClusterPrivilegeResolver.READ_ILM, "cluster:admin/ilm/get", "cluster:admin/ilm/operation_mode/get"); + verifyClusterActionDenied(ClusterPrivilegeResolver.READ_ILM, "cluster:admin/ilm/delete", "cluster:admin/ilm/_move/post", + "cluster:admin/ilm/put", "cluster:admin/ilm/start", "cluster:admin/ilm/stop", + "cluster:admin/ilm/brand_new_api", "cluster:admin/whatever"); } { @@ -221,33 +233,29 @@ public class PrivilegeTests extends ESTestCase { } } - public void testSlmPriviledges() { + public void testSlmPrivileges() { { - Predicate predicate = ClusterPrivilege.MANAGE_SLM.predicate(); - // check cluster actions - assertThat(predicate.test("cluster:admin/slm/delete"), is(true)); - assertThat(predicate.test("cluster:admin/slm/put"), is(true)); - assertThat(predicate.test("cluster:admin/slm/get"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/start"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/stop"), is(true)); - assertThat(predicate.test("cluster:admin/slm/execute"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/operation_mode/get"), is(true)); - // check non-slm action - assertThat(predicate.test("cluster:admin/whatever"), is(false)); + verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_SLM, "cluster:admin/slm/delete", + "cluster:admin/slm/put", + "cluster:admin/slm/get", + "cluster:admin/ilm/start", + "cluster:admin/ilm/stop", + "cluster:admin/slm/execute", + "cluster:admin/ilm/operation_mode/get"); + verifyClusterActionDenied(ClusterPrivilegeResolver.MANAGE_SLM, "cluster:admin/whatever"); } { - Predicate predicate = ClusterPrivilege.READ_SLM.predicate(); - // check cluster actions - assertThat(predicate.test("cluster:admin/slm/delete"), is(false)); - assertThat(predicate.test("cluster:admin/slm/put"), is(false)); - assertThat(predicate.test("cluster:admin/slm/get"), is(true)); - assertThat(predicate.test("cluster:admin/ilm/start"), is(false)); - assertThat(predicate.test("cluster:admin/ilm/stop"), is(false)); - assertThat(predicate.test("cluster:admin/slm/execute"), is(false)); - assertThat(predicate.test("cluster:admin/ilm/operation_mode/get"), is(true)); - // check non-slm action - assertThat(predicate.test("cluster:admin/whatever"), is(false)); + verifyClusterActionAllowed(ClusterPrivilegeResolver.READ_SLM, + "cluster:admin/slm/get", + "cluster:admin/ilm/operation_mode/get"); + verifyClusterActionDenied(ClusterPrivilegeResolver.READ_SLM,"cluster:admin/slm/delete", + "cluster:admin/slm/put", + "cluster:admin/ilm/start", + "cluster:admin/ilm/stop", + "cluster:admin/slm/execute", + "cluster:admin/whatever"); + } } } diff --git a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java index a48777d79b0..b239d461b1c 100644 --- a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java +++ b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformIT.java @@ -18,7 +18,7 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfigUpdate; -import org.elasticsearch.client.dataframe.transforms.DataFrameTransformTaskState; +import org.elasticsearch.client.dataframe.transforms.DataFrameTransformStats; import org.elasticsearch.client.dataframe.transforms.DestConfig; import org.elasticsearch.client.dataframe.transforms.TimeSyncConfig; import org.elasticsearch.client.dataframe.transforms.pivot.SingleGroupSource; @@ -43,6 +43,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.oneOf; public class DataFrameTransformIT extends DataFrameIntegTestCase { @@ -110,8 +111,8 @@ public class DataFrameTransformIT extends DataFrameIntegTestCase { assertTrue(startDataFrameTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged()); waitUntilCheckpoint(config.getId(), 1L); - assertThat(getDataFrameTransformStats(config.getId()).getTransformsStats().get(0).getTaskState(), - equalTo(DataFrameTransformTaskState.STARTED)); + assertThat(getDataFrameTransformStats(config.getId()).getTransformsStats().get(0).getState(), + equalTo(DataFrameTransformStats.State.STARTED)); long docsIndexed = getDataFrameTransformStats(config.getId()) .getTransformsStats() @@ -167,8 +168,8 @@ public class DataFrameTransformIT extends DataFrameIntegTestCase { assertTrue(startDataFrameTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged()); waitUntilCheckpoint(config.getId(), 1L); - assertThat(getDataFrameTransformStats(config.getId()).getTransformsStats().get(0).getTaskState(), - equalTo(DataFrameTransformTaskState.STARTED)); + assertThat(getDataFrameTransformStats(config.getId()).getTransformsStats().get(0).getState(), + oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); long docsIndexed = getDataFrameTransformStats(config.getId()) .getTransformsStats() diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java index b680a50b662..2476e634f85 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java @@ -22,6 +22,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.oneOf; public class DataFrameGetAndGetStatsIT extends DataFrameRestTestCase { @@ -114,7 +115,7 @@ public class DataFrameGetAndGetStatsIT extends DataFrameRestTestCase { transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); assertEquals(1, transformsStats.size()); - assertEquals("stopped", XContentMapValues.extractValue("task_state", transformsStats.get(0))); + assertEquals("stopped", XContentMapValues.extractValue("state", transformsStats.get(0))); assertNull(XContentMapValues.extractValue("checkpointing.next.position", transformsStats.get(0))); assertEquals(1, XContentMapValues.extractValue("checkpointing.last.checkpoint", transformsStats.get(0))); @@ -125,7 +126,7 @@ public class DataFrameGetAndGetStatsIT extends DataFrameRestTestCase { transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); assertEquals(1, transformsStats.size()); - assertEquals("started", XContentMapValues.extractValue("task_state", transformsStats.get(0))); + assertThat(XContentMapValues.extractValue("state", transformsStats.get(0)), oneOf("started", "indexing")); assertEquals(1, XContentMapValues.extractValue("checkpointing.last.checkpoint", transformsStats.get(0))); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 6ca60bd6654..09a6f1ee56a 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -300,8 +300,7 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase { void waitForDataFrameStopped(String transformId) throws Exception { assertBusy(() -> { - assertEquals("stopped", getDataFrameTaskState(transformId)); - assertEquals("stopped", getDataFrameIndexerState(transformId)); + assertEquals("stopped", getDataFrameTransformState(transformId)); }, 15, TimeUnit.SECONDS); } @@ -326,19 +325,9 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase { return transformConfigs == null ? Collections.emptyList() : transformConfigs; } - protected static String getDataFrameIndexerState(String transformId) throws IOException { + protected static String getDataFrameTransformState(String transformId) throws IOException { Map transformStatsAsMap = getDataFrameState(transformId); - if (transformStatsAsMap == null) { - return null; - } - String indexerState = (String) XContentMapValues.extractValue("checkpointing.next.indexer_state", transformStatsAsMap); - // If the transform is stopped then it might not have an indexer state, but logically that's the same as the indexer being stopped - return indexerState == null ? "stopped" : indexerState; - } - - protected static String getDataFrameTaskState(String transformId) throws IOException { - Map transformStatsAsMap = getDataFrameState(transformId); - return transformStatsAsMap == null ? null : (String) XContentMapValues.extractValue("task_state", transformStatsAsMap); + return transformStatsAsMap == null ? null : (String) XContentMapValues.extractValue("state", transformStatsAsMap); } protected static Map getDataFrameState(String transformId) throws IOException { @@ -378,10 +367,12 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase { request.addParameter("timeout", "10s"); request.addParameter("ignore", "404"); adminClient().performRequest(request); - String state = getDataFrameIndexerState(transformId); - if (state != null) { - assertEquals("stopped", getDataFrameIndexerState(transformId)); - } + } + + for (Map transformConfig : transformConfigs) { + String transformId = (String) transformConfig.get("id"); + String state = getDataFrameTransformState(transformId); + assertEquals("Transform [" + transformId + "] is not in the stopped state", "stopped", state); } for (Map transformConfig : transformConfigs) { diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTaskFailedStateIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTaskFailedStateIT.java index e0300c96713..9551e4bf854 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTaskFailedStateIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTaskFailedStateIT.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStats; import org.junit.After; import org.junit.Before; @@ -29,6 +29,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.oneOf; public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase { @@ -60,7 +61,7 @@ public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase { createContinuousPivotReviewsTransform(transformId, dataFrameIndex, null); failureTransforms.add(transformId); startDataframeTransform(transformId, false); - awaitState(transformId, DataFrameTransformTaskState.FAILED); + awaitState(transformId, DataFrameTransformStats.State.FAILED); Map fullState = getDataFrameState(transformId); final String failureReason = "task encountered more than 0 failures; latest failure: " + "Bulk index experienced failures. See the logs of the node running the transform for details."; @@ -78,7 +79,7 @@ public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase { // Verify that we can force stop a failed transform stopDataFrameTransform(transformId, true); - awaitState(transformId, DataFrameTransformTaskState.STOPPED); + awaitState(transformId, DataFrameTransformStats.State.STOPPED); fullState = getDataFrameState(transformId); assertThat(XContentMapValues.extractValue("reason", fullState), is(nullValue())); } @@ -91,7 +92,7 @@ public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase { createContinuousPivotReviewsTransform(transformId, dataFrameIndex, null); failureTransforms.add(transformId); startDataframeTransform(transformId, false); - awaitState(transformId, DataFrameTransformTaskState.FAILED); + awaitState(transformId, DataFrameTransformStats.State.FAILED); Map fullState = getDataFrameState(transformId); final String failureReason = "task encountered more than 0 failures; latest failure: " + "Bulk index experienced failures. See the logs of the node running the transform for details."; @@ -114,15 +115,15 @@ public class DataFrameTaskFailedStateIT extends DataFrameRestTestCase { // Verify that we have started and that our reason is cleared fullState = getDataFrameState(transformId); assertThat(XContentMapValues.extractValue("reason", fullState), is(nullValue())); - assertThat(XContentMapValues.extractValue("task_state", fullState), equalTo("started")); + assertThat(XContentMapValues.extractValue("state", fullState), oneOf("started", "indexing")); assertThat((Integer)XContentMapValues.extractValue("stats.index_failures", fullState), greaterThanOrEqualTo(1)); stopDataFrameTransform(transformId, true); } - private void awaitState(String transformId, DataFrameTransformTaskState state) throws Exception { + private void awaitState(String transformId, DataFrameTransformStats.State state) throws Exception { assertBusy(() -> { - String currentState = getDataFrameTaskState(transformId); + String currentState = getDataFrameTransformState(transformId); assertThat(currentState, equalTo(state.value())); }, 180, TimeUnit.SECONDS); // It should not take this long, but if the scheduler gets deferred, it could } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java index bede560a93d..df78c54b535 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java @@ -136,14 +136,14 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlu } return Arrays.asList( - new RestPutDataFrameTransformAction(settings, restController), - new RestStartDataFrameTransformAction(settings, restController), - new RestStopDataFrameTransformAction(settings, restController), - new RestDeleteDataFrameTransformAction(settings, restController), - new RestGetDataFrameTransformsAction(settings, restController), - new RestGetDataFrameTransformsStatsAction(settings, restController), - new RestPreviewDataFrameTransformAction(settings, restController), - new RestUpdateDataFrameTransformAction(settings, restController) + new RestPutDataFrameTransformAction(restController), + new RestStartDataFrameTransformAction(restController), + new RestStopDataFrameTransformAction(restController), + new RestDeleteDataFrameTransformAction(restController), + new RestGetDataFrameTransformsAction(restController), + new RestGetDataFrameTransformsStatsAction(restController), + new RestPreviewDataFrameTransformAction(restController), + new RestUpdateDataFrameTransformAction(restController) ); } @@ -187,7 +187,9 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlu } dataFrameAuditor.set(new DataFrameAuditor(client, clusterService.getNodeName())); dataFrameTransformsConfigManager.set(new DataFrameTransformsConfigManager(client, xContentRegistry)); - dataFrameTransformsCheckpointService.set(new DataFrameTransformsCheckpointService(client, dataFrameTransformsConfigManager.get())); + dataFrameTransformsCheckpointService.set(new DataFrameTransformsCheckpointService(client, + dataFrameTransformsConfigManager.get(), + dataFrameAuditor.get())); return Arrays.asList(dataFrameTransformsConfigManager.get(), dataFrameAuditor.get(), dataFrameTransformsCheckpointService.get()); } @@ -235,6 +237,7 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlu settingsModule.getSettings())); } + @Override public List> getSettings() { return Collections.singletonList(DataFrameTransformTask.NUM_FAILURE_RETRIES_SETTING); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java index 1b51e0e4018..c3b6bd39564 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheck import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStoredDoc; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState; import org.elasticsearch.xpack.core.dataframe.transforms.NodeAttributes; import org.elasticsearch.xpack.dataframe.checkpoint.DataFrameTransformsCheckpointService; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; @@ -90,7 +89,7 @@ public class TransportGetDataFrameTransformsStatsAction extends task.getCheckpointingInfo(transformsCheckpointService, ActionListener.wrap( checkpointingInfo -> listener.onResponse(new Response( Collections.singletonList(new DataFrameTransformStats(task.getTransformId(), - transformState.getTaskState(), + DataFrameTransformStats.State.fromComponents(transformState.getTaskState(), transformState.getIndexerState()), transformState.getReason(), null, task.getStats(), @@ -100,7 +99,7 @@ public class TransportGetDataFrameTransformsStatsAction extends logger.warn("Failed to retrieve checkpointing info for transform [" + task.getTransformId() + "]", e); listener.onResponse(new Response( Collections.singletonList(new DataFrameTransformStats(task.getTransformId(), - transformState.getTaskState(), + DataFrameTransformStats.State.fromComponents(transformState.getTaskState(), transformState.getIndexerState()), transformState.getReason(), null, task.getStats(), @@ -223,7 +222,6 @@ public class TransportGetDataFrameTransformsStatsAction extends transformsCheckpointService.getCheckpointingInfo( transform.getId(), transform.getTransformState().getCheckpoint(), - transform.getTransformState().getIndexerState(), transform.getTransformState().getPosition(), transform.getTransformState().getProgress(), ActionListener.wrap( @@ -254,7 +252,7 @@ public class TransportGetDataFrameTransformsStatsAction extends synchronized (allStateAndStats) { allStateAndStats.add(new DataFrameTransformStats( stat.getId(), - DataFrameTransformTaskState.STOPPED, + DataFrameTransformStats.State.STOPPED, null, null, stat.getTransformStats(), diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/CheckpointProvider.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/CheckpointProvider.java index a42f5f4bbae..8dcab5879fb 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/CheckpointProvider.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/CheckpointProvider.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerPositio import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpoint; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpointingInfo; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformProgress; -import org.elasticsearch.xpack.core.indexing.IndexerState; /** * Interface for checkpoint creation, checking for changes and getting statistics about checkpoints @@ -41,14 +40,12 @@ public interface CheckpointProvider { * * @param lastCheckpoint the last checkpoint * @param nextCheckpoint the next checkpoint - * @param nextCheckpointIndexerState indexer state for the next checkpoint * @param nextCheckpointPosition position for the next checkpoint * @param nextCheckpointProgress progress for the next checkpoint * @param listener listener to retrieve the result */ void getCheckpointingInfo(DataFrameTransformCheckpoint lastCheckpoint, DataFrameTransformCheckpoint nextCheckpoint, - IndexerState nextCheckpointIndexerState, DataFrameIndexerPosition nextCheckpointPosition, DataFrameTransformProgress nextCheckpointProgress, ActionListener listener); @@ -59,13 +56,11 @@ public interface CheckpointProvider { * For stopped data frames we need to do lookups in the internal index. * * @param lastCheckpointNumber the last checkpoint number - * @param nextCheckpointIndexerState indexer state for the next checkpoint * @param nextCheckpointPosition position for the next checkpoint * @param nextCheckpointProgress progress for the next checkpoint * @param listener listener to retrieve the result */ void getCheckpointingInfo(long lastCheckpointNumber, - IndexerState nextCheckpointIndexerState, DataFrameIndexerPosition nextCheckpointPosition, DataFrameTransformProgress nextCheckpointProgress, ActionListener listener); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformsCheckpointService.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformsCheckpointService.java index 24e03114948..85ebcde85c0 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformsCheckpointService.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformsCheckpointService.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheck import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformProgress; import org.elasticsearch.xpack.core.dataframe.transforms.TimeSyncConfig; -import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.dataframe.notifications.DataFrameAuditor; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; /** @@ -32,19 +32,21 @@ public class DataFrameTransformsCheckpointService { private final Client client; private final DataFrameTransformsConfigManager dataFrameTransformsConfigManager; + private final DataFrameAuditor dataFrameAuditor; public DataFrameTransformsCheckpointService(final Client client, - final DataFrameTransformsConfigManager dataFrameTransformsConfigManager) { + final DataFrameTransformsConfigManager dataFrameTransformsConfigManager, DataFrameAuditor dataFrameAuditor) { this.client = client; this.dataFrameTransformsConfigManager = dataFrameTransformsConfigManager; + this.dataFrameAuditor = dataFrameAuditor; } public CheckpointProvider getCheckpointProvider(final DataFrameTransformConfig transformConfig) { if (transformConfig.getSyncConfig() instanceof TimeSyncConfig) { - return new TimeBasedCheckpointProvider(client, dataFrameTransformsConfigManager, transformConfig); + return new TimeBasedCheckpointProvider(client, dataFrameTransformsConfigManager, dataFrameAuditor, transformConfig); } - return new DefaultCheckpointProvider(client, dataFrameTransformsConfigManager, transformConfig); + return new DefaultCheckpointProvider(client, dataFrameTransformsConfigManager, dataFrameAuditor, transformConfig); } /** @@ -52,14 +54,12 @@ public class DataFrameTransformsCheckpointService { * * @param transformId The data frame task * @param lastCheckpointNumber the last checkpoint - * @param nextCheckpointIndexerState indexer state for the next checkpoint * @param nextCheckpointPosition position for the next checkpoint * @param nextCheckpointProgress progress for the next checkpoint * @param listener listener to retrieve the result */ public void getCheckpointingInfo(final String transformId, final long lastCheckpointNumber, - final IndexerState nextCheckpointIndexerState, final DataFrameIndexerPosition nextCheckpointPosition, final DataFrameTransformProgress nextCheckpointProgress, final ActionListener listener) { @@ -67,7 +67,7 @@ public class DataFrameTransformsCheckpointService { // we need to retrieve the config first before we can defer the rest to the corresponding provider dataFrameTransformsConfigManager.getTransformConfiguration(transformId, ActionListener.wrap( transformConfig -> { - getCheckpointProvider(transformConfig).getCheckpointingInfo(lastCheckpointNumber, nextCheckpointIndexerState, + getCheckpointProvider(transformConfig).getCheckpointingInfo(lastCheckpointNumber, nextCheckpointPosition, nextCheckpointProgress, listener); }, transformError -> { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/DefaultCheckpointProvider.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/DefaultCheckpointProvider.java index a6e1e2dae74..23b1bdde12b 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/DefaultCheckpointProvider.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/DefaultCheckpointProvider.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerPosition; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpoint; @@ -25,10 +26,11 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheck import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpointingInfo; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformProgress; -import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.dataframe.notifications.DataFrameAuditor; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -36,11 +38,13 @@ import java.util.TreeMap; public class DefaultCheckpointProvider implements CheckpointProvider { + // threshold when to audit concrete index names, above this threshold we only report the number of changes + private static final int AUDIT_CONCRETED_SOURCE_INDEX_CHANGES = 10; + /** * Builder for collecting checkpointing information for the purpose of _stats */ private static class DataFrameTransformCheckpointingInfoBuilder { - private IndexerState nextCheckpointIndexerState; private DataFrameIndexerPosition nextCheckpointPosition; private DataFrameTransformProgress nextCheckpointProgress; private DataFrameTransformCheckpoint lastCheckpoint; @@ -66,9 +70,9 @@ public class DefaultCheckpointProvider implements CheckpointProvider { long nextCheckpointNumber = nextCheckpoint.getCheckpoint() > 0 ? nextCheckpoint.getCheckpoint() : 0; return new DataFrameTransformCheckpointingInfo( - new DataFrameTransformCheckpointStats(lastCheckpointNumber, null, null, null, + new DataFrameTransformCheckpointStats(lastCheckpointNumber, null, null, lastCheckpoint.getTimestamp(), lastCheckpoint.getTimeUpperBound()), - new DataFrameTransformCheckpointStats(nextCheckpointNumber, nextCheckpointIndexerState, nextCheckpointPosition, + new DataFrameTransformCheckpointStats(nextCheckpointNumber, nextCheckpointPosition, nextCheckpointProgress, nextCheckpoint.getTimestamp(), nextCheckpoint.getTimeUpperBound()), DataFrameTransformCheckpoint.getBehind(lastCheckpoint, sourceCheckpoint)); } @@ -97,25 +101,22 @@ public class DefaultCheckpointProvider implements CheckpointProvider { this.nextCheckpointPosition = nextCheckpointPosition; return this; } - - public DataFrameTransformCheckpointingInfoBuilder setNextCheckpointIndexerState(IndexerState nextCheckpointIndexerState) { - this.nextCheckpointIndexerState = nextCheckpointIndexerState; - return this; - } - } private static final Logger logger = LogManager.getLogger(DefaultCheckpointProvider.class); protected final Client client; protected final DataFrameTransformsConfigManager dataFrameTransformsConfigManager; + protected final DataFrameAuditor dataFrameAuditor; protected final DataFrameTransformConfig transformConfig; public DefaultCheckpointProvider(final Client client, final DataFrameTransformsConfigManager dataFrameTransformsConfigManager, + final DataFrameAuditor dataFrameAuditor, final DataFrameTransformConfig transformConfig) { this.client = client; this.dataFrameTransformsConfigManager = dataFrameTransformsConfigManager; + this.dataFrameAuditor = dataFrameAuditor; this.transformConfig = transformConfig; } @@ -131,6 +132,9 @@ public class DefaultCheckpointProvider implements CheckpointProvider { final long checkpoint = lastCheckpoint != null ? lastCheckpoint.getCheckpoint() + 1 : 1; getIndexCheckpoints(ActionListener.wrap(checkpointsByIndex -> { + reportSourceIndexChanges(lastCheckpoint != null ? lastCheckpoint.getIndicesCheckpoints().keySet() : Collections.emptySet(), + checkpointsByIndex.keySet()); + listener.onResponse(new DataFrameTransformCheckpoint(transformConfig.getId(), timestamp, checkpoint, checkpointsByIndex, 0L)); }, listener::onFailure)); } @@ -144,7 +148,9 @@ public class DefaultCheckpointProvider implements CheckpointProvider { ClientHelper.executeWithHeadersAsync(transformConfig.getHeaders(), ClientHelper.DATA_FRAME_ORIGIN, client, GetIndexAction.INSTANCE, getIndexRequest, ActionListener.wrap(getIndexResponse -> { - Set userIndices = new HashSet<>(Arrays.asList(getIndexResponse.getIndices())); + Set userIndices = getIndexResponse.getIndices() != null + ? new HashSet<>(Arrays.asList(getIndexResponse.getIndices())) + : Collections.emptySet(); // 2nd get stats request ClientHelper.executeAsyncWithOrigin(client, ClientHelper.DATA_FRAME_ORIGIN, @@ -226,17 +232,15 @@ public class DefaultCheckpointProvider implements CheckpointProvider { @Override public void getCheckpointingInfo(DataFrameTransformCheckpoint lastCheckpoint, - DataFrameTransformCheckpoint nextCheckpoint, - IndexerState nextCheckpointIndexerState, - DataFrameIndexerPosition nextCheckpointPosition, - DataFrameTransformProgress nextCheckpointProgress, - ActionListener listener) { + DataFrameTransformCheckpoint nextCheckpoint, + DataFrameIndexerPosition nextCheckpointPosition, + DataFrameTransformProgress nextCheckpointProgress, + ActionListener listener) { DataFrameTransformCheckpointingInfoBuilder checkpointingInfoBuilder = new DataFrameTransformCheckpointingInfoBuilder(); checkpointingInfoBuilder.setLastCheckpoint(lastCheckpoint) .setNextCheckpoint(nextCheckpoint) - .setNextCheckpointIndexerState(nextCheckpointIndexerState) .setNextCheckpointPosition(nextCheckpointPosition) .setNextCheckpointProgress(nextCheckpointProgress); @@ -250,15 +254,13 @@ public class DefaultCheckpointProvider implements CheckpointProvider { } @Override - public void getCheckpointingInfo(long lastCheckpointNumber, IndexerState nextCheckpointIndexerState, - DataFrameIndexerPosition nextCheckpointPosition, DataFrameTransformProgress nextCheckpointProgress, - ActionListener listener) { + public void getCheckpointingInfo(long lastCheckpointNumber, DataFrameIndexerPosition nextCheckpointPosition, + DataFrameTransformProgress nextCheckpointProgress, + ActionListener listener) { DataFrameTransformCheckpointingInfoBuilder checkpointingInfoBuilder = new DataFrameTransformCheckpointingInfoBuilder(); - checkpointingInfoBuilder.setNextCheckpointIndexerState(nextCheckpointIndexerState) - .setNextCheckpointPosition(nextCheckpointPosition) - .setNextCheckpointProgress(nextCheckpointProgress); + checkpointingInfoBuilder.setNextCheckpointPosition(nextCheckpointPosition).setNextCheckpointProgress(nextCheckpointProgress); long timestamp = System.currentTimeMillis(); @@ -311,4 +313,34 @@ public class DefaultCheckpointProvider implements CheckpointProvider { getIndexCheckpoints(checkpointsByIndexListener); } } + + /** + * Inspect source changes and report differences + * + * @param lastSourceIndexes the set of indexes seen in the previous checkpoint + * @param newSourceIndexes the set of indexes seen in the new checkpoint + */ + void reportSourceIndexChanges(final Set lastSourceIndexes, final Set newSourceIndexes) { + // spam protection: only warn the first time + if (newSourceIndexes.isEmpty() && lastSourceIndexes.isEmpty() == false) { + String message = "Source did not resolve to any open indexes"; + logger.warn("{} for transform [{}]", message, transformConfig.getId()); + dataFrameAuditor.warning(transformConfig.getId(), message); + } else { + Set removedIndexes = Sets.difference(lastSourceIndexes, newSourceIndexes); + Set addedIndexes = Sets.difference(newSourceIndexes, lastSourceIndexes); + + if (removedIndexes.size() + addedIndexes.size() > AUDIT_CONCRETED_SOURCE_INDEX_CHANGES) { + String message = "Source index resolve found more than " + AUDIT_CONCRETED_SOURCE_INDEX_CHANGES + " changes, [" + + removedIndexes.size() + "] removed indexes, [" + addedIndexes.size() + "] new indexes"; + logger.debug("{} for transform [{}]", message, transformConfig.getId()); + dataFrameAuditor.info(transformConfig.getId(), message); + } else if (removedIndexes.size() + addedIndexes.size() > 0) { + String message = "Source index resolve found changes, removedIndexes: " + removedIndexes + ", new indexes: " + addedIndexes; + logger.debug("{} for transform [{}]", message, transformConfig.getId()); + dataFrameAuditor.info(transformConfig.getId(), message); + } + } + } + } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/TimeBasedCheckpointProvider.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/TimeBasedCheckpointProvider.java index d9c4d8ce158..276d3fe3bcc 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/TimeBasedCheckpointProvider.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/checkpoint/TimeBasedCheckpointProvider.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpoint; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.xpack.core.dataframe.transforms.TimeSyncConfig; +import org.elasticsearch.xpack.dataframe.notifications.DataFrameAuditor; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; public class TimeBasedCheckpointProvider extends DefaultCheckpointProvider { @@ -31,8 +32,9 @@ public class TimeBasedCheckpointProvider extends DefaultCheckpointProvider { TimeBasedCheckpointProvider(final Client client, final DataFrameTransformsConfigManager dataFrameTransformsConfigManager, + final DataFrameAuditor dataFrameAuditor, final DataFrameTransformConfig transformConfig) { - super(client, dataFrameTransformsConfigManager, transformConfig); + super(client, dataFrameTransformsConfigManager, dataFrameAuditor, transformConfig); timeSyncConfig = (TimeSyncConfig) transformConfig.getSyncConfig(); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java index 6b5b91a6d1c..4d891427e4d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -17,8 +16,7 @@ import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAct public class RestDeleteDataFrameTransformAction extends BaseRestHandler { - public RestDeleteDataFrameTransformAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteDataFrameTransformAction(RestController controller) { controller.registerHandler(RestRequest.Method.DELETE, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID, this); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java index 95f6ec79fb1..7dce1c7789a 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -20,8 +19,7 @@ import static org.elasticsearch.xpack.core.dataframe.DataFrameField.ALLOW_NO_MAT public class RestGetDataFrameTransformsAction extends BaseRestHandler { - public RestGetDataFrameTransformsAction(Settings settings, RestController controller) { - super(settings); + public RestGetDataFrameTransformsAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_TRANSFORMS, this); controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID, this); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java index 32167202902..7e96f34fa3f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -20,8 +19,7 @@ import static org.elasticsearch.xpack.core.dataframe.DataFrameField.ALLOW_NO_MAT public class RestGetDataFrameTransformsStatsAction extends BaseRestHandler { - public RestGetDataFrameTransformsStatsAction(Settings settings, RestController controller) { - super(settings); + public RestGetDataFrameTransformsStatsAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_TRANSFORMS + "_stats", this); controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID + "_stats", this); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPreviewDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPreviewDataFrameTransformAction.java index 93251ecbc50..6f8df5cad47 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPreviewDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPreviewDataFrameTransformAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -20,8 +19,7 @@ import java.io.IOException; public class RestPreviewDataFrameTransformAction extends BaseRestHandler { - public RestPreviewDataFrameTransformAction(Settings settings, RestController controller) { - super(settings); + public RestPreviewDataFrameTransformAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH + "transforms/_preview", this); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameTransformAction.java index 10320024456..5bd2c9fe479 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameTransformAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -20,8 +19,7 @@ import java.io.IOException; public class RestPutDataFrameTransformAction extends BaseRestHandler { - public RestPutDataFrameTransformAction(Settings settings, RestController controller) { - super(settings); + public RestPutDataFrameTransformAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID, this); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java index cd1289cb639..44c2c66fbb7 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -16,17 +15,14 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.StartDataFrameTransformAction; -import java.io.IOException; - public class RestStartDataFrameTransformAction extends BaseRestHandler { - public RestStartDataFrameTransformAction(Settings settings, RestController controller) { - super(settings); + public RestStartDataFrameTransformAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID + "_start", this); } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(DataFrameField.ID.getPreferredName()); boolean force = restRequest.paramAsBoolean(DataFrameField.FORCE.getPreferredName(), false); StartDataFrameTransformAction.Request request = new StartDataFrameTransformAction.Request(id, force); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java index ab7b1b464d6..112fe708d59 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -15,17 +14,14 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.StopDataFrameTransformAction; -import java.io.IOException; - public class RestStopDataFrameTransformAction extends BaseRestHandler { - public RestStopDataFrameTransformAction(Settings settings, RestController controller) { - super(settings); + public RestStopDataFrameTransformAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID + "_stop", this); } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(DataFrameField.ID.getPreferredName()); TimeValue timeout = restRequest.paramAsTime(DataFrameField.TIMEOUT.getPreferredName(), StopDataFrameTransformAction.DEFAULT_TIMEOUT); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestUpdateDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestUpdateDataFrameTransformAction.java index 7dea1aa60c3..5449f3e8d9e 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestUpdateDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestUpdateDataFrameTransformAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -20,8 +19,7 @@ import java.io.IOException; public class RestUpdateDataFrameTransformAction extends BaseRestHandler { - public RestUpdateDataFrameTransformAction(Settings settings, RestController controller) { - super(settings); + public RestUpdateDataFrameTransformAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID + "_update", this); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java index 6e9fe01a65e..a75c2d4b022 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java @@ -146,11 +146,20 @@ public class DataFrameTransformPersistentTasksExecutor extends PersistentTasksEx // <5> load next checkpoint ActionListener getTransformNextCheckpointListener = ActionListener.wrap( nextCheckpoint -> { - indexerBuilder.setNextCheckpoint(nextCheckpoint); + + if (nextCheckpoint.isEmpty()) { + // extra safety: reset position and progress if next checkpoint is empty + // prevents a failure if for some reason the next checkpoint has been deleted + indexerBuilder.setInitialPosition(null); + indexerBuilder.setProgress(null); + } else { + logger.trace("[{}] Loaded next checkpoint [{}] found, starting the task", transformId, + nextCheckpoint.getCheckpoint()); + indexerBuilder.setNextCheckpoint(nextCheckpoint); + } final long lastCheckpoint = stateHolder.get().getCheckpoint(); - logger.trace("[{}] No next checkpoint found, starting the task", transformId); startTask(buildTask, indexerBuilder, lastCheckpoint, startTaskListener); }, error -> { @@ -166,14 +175,10 @@ public class DataFrameTransformPersistentTasksExecutor extends PersistentTasksEx lastCheckpoint -> { indexerBuilder.setLastCheckpoint(lastCheckpoint); - final long nextCheckpoint = stateHolder.get().getInProgressCheckpoint(); - - if (nextCheckpoint > 0) { - transformsConfigManager.getTransformCheckpoint(transformId, nextCheckpoint, getTransformNextCheckpointListener); - } else { - logger.trace("[{}] No next checkpoint found, starting the task", transformId); - startTask(buildTask, indexerBuilder, lastCheckpoint.getCheckpoint(), startTaskListener); - } + logger.trace("[{}] Loaded last checkpoint [{}], looking for next checkpoint", transformId, + lastCheckpoint.getCheckpoint()); + transformsConfigManager.getTransformCheckpoint(transformId, lastCheckpoint.getCheckpoint() + 1, + getTransformNextCheckpointListener); }, error -> { String msg = DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_LOAD_TRANSFORM_CHECKPOINT, transformId); @@ -201,8 +206,8 @@ public class DataFrameTransformPersistentTasksExecutor extends PersistentTasksEx final long lastCheckpoint = stateHolder.get().getCheckpoint(); if (lastCheckpoint == 0) { - logger.trace("[{}] No checkpoint found, starting the task", transformId); - startTask(buildTask, indexerBuilder, lastCheckpoint, startTaskListener); + logger.trace("[{}] No last checkpoint found, looking for next checkpoint", transformId); + transformsConfigManager.getTransformCheckpoint(transformId, lastCheckpoint + 1, getTransformNextCheckpointListener); } else { logger.trace ("[{}] Restore last checkpoint: [{}]", transformId, lastCheckpoint); transformsConfigManager.getTransformCheckpoint(transformId, lastCheckpoint, getTransformLastCheckpointListener); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java index 14edf8774e0..e70687592e9 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java @@ -187,7 +187,6 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S transformsCheckpointService.getCheckpointingInfo( transform.getId(), currentCheckpoint.get(), - initialIndexerState, initialPosition, null, listener); @@ -196,7 +195,6 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S indexer.getCheckpointProvider().getCheckpointingInfo( indexer.getLastCheckpoint(), indexer.getNextCheckpoint(), - indexer.getState(), indexer.getPosition(), indexer.getProgress(), listener); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameSingleNodeTestCase.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameSingleNodeTestCase.java index cdd54d86c7e..b3c087983e8 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameSingleNodeTestCase.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameSingleNodeTestCase.java @@ -70,7 +70,7 @@ public abstract class DataFrameSingleNodeTestCase extends ESSingleNodeTestCase { }), latch); function.accept(listener); - latch.await(10, TimeUnit.SECONDS); + assertTrue("timed out after 20s", latch.await(20, TimeUnit.SECONDS)); } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformCheckpointServiceNodeTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformCheckpointServiceNodeTests.java index 9ed64de3e02..fe6cc7c7525 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformCheckpointServiceNodeTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformCheckpointServiceNodeTests.java @@ -47,8 +47,8 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheck import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfigTests; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformProgress; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformProgressTests; -import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.dataframe.DataFrameSingleNodeTestCase; +import org.elasticsearch.xpack.dataframe.notifications.DataFrameAuditor; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import org.junit.After; import org.junit.Before; @@ -73,8 +73,8 @@ public class DataFrameTransformCheckpointServiceNodeTests extends DataFrameSingl private class MockClientForCheckpointing extends NoOpClient { - private ShardStats[] shardStats; - private String[] indices; + private volatile ShardStats[] shardStats; + private volatile String[] indices; MockClientForCheckpointing(String testName) { super(testName); @@ -98,6 +98,7 @@ public class DataFrameTransformCheckpointServiceNodeTests extends DataFrameSingl if (request instanceof GetIndexRequest) { // for this test we only need the indices + assert(indices != null); final GetIndexResponse indexResponse = new GetIndexResponse(indices, null, null, null, null); listener.onResponse((Response) indexResponse); @@ -124,7 +125,10 @@ public class DataFrameTransformCheckpointServiceNodeTests extends DataFrameSingl // use a mock for the checkpoint service mockClientForCheckpointing = new MockClientForCheckpointing(getTestName()); - transformsCheckpointService = new DataFrameTransformsCheckpointService(mockClientForCheckpointing, transformsConfigManager); + DataFrameAuditor mockAuditor = mock(DataFrameAuditor.class); + transformsCheckpointService = new DataFrameTransformsCheckpointService(mockClientForCheckpointing, + transformsConfigManager, + mockAuditor); } @After @@ -198,31 +202,31 @@ public class DataFrameTransformCheckpointServiceNodeTests extends DataFrameSingl mockClientForCheckpointing.setShardStats(createShardStats(createCheckPointMap(transformId, 20, 20, 20))); DataFrameTransformCheckpointingInfo checkpointInfo = new DataFrameTransformCheckpointingInfo( - new DataFrameTransformCheckpointStats(1, null, null, null, timestamp, 0L), - new DataFrameTransformCheckpointStats(2, IndexerState.STARTED, position, progress, timestamp + 100L, 0L), + new DataFrameTransformCheckpointStats(1, null, null, timestamp, 0L), + new DataFrameTransformCheckpointStats(2, position, progress, timestamp + 100L, 0L), 30L); assertAsync(listener -> - transformsCheckpointService.getCheckpointingInfo(transformId, 1, IndexerState.STARTED, position, progress, listener), + transformsCheckpointService.getCheckpointingInfo(transformId, 1, position, progress, listener), checkpointInfo, null, null); mockClientForCheckpointing.setShardStats(createShardStats(createCheckPointMap(transformId, 10, 50, 33))); checkpointInfo = new DataFrameTransformCheckpointingInfo( - new DataFrameTransformCheckpointStats(1, null, null, null, timestamp, 0L), - new DataFrameTransformCheckpointStats(2, IndexerState.INDEXING, position, progress, timestamp + 100L, 0L), + new DataFrameTransformCheckpointStats(1, null, null, timestamp, 0L), + new DataFrameTransformCheckpointStats(2, position, progress, timestamp + 100L, 0L), 63L); assertAsync(listener -> - transformsCheckpointService.getCheckpointingInfo(transformId, 1, IndexerState.INDEXING, position, progress, listener), + transformsCheckpointService.getCheckpointingInfo(transformId, 1, position, progress, listener), checkpointInfo, null, null); // same as current mockClientForCheckpointing.setShardStats(createShardStats(createCheckPointMap(transformId, 10, 10, 10))); checkpointInfo = new DataFrameTransformCheckpointingInfo( - new DataFrameTransformCheckpointStats(1, null, null, null, timestamp, 0L), - new DataFrameTransformCheckpointStats(2, IndexerState.STOPPING, position, progress, timestamp + 100L, 0L), + new DataFrameTransformCheckpointStats(1, null, null, timestamp, 0L), + new DataFrameTransformCheckpointStats(2, position, progress, timestamp + 100L, 0L), 0L); assertAsync(listener -> - transformsCheckpointService.getCheckpointingInfo(transformId, 1, IndexerState.STOPPING, position, progress, listener), + transformsCheckpointService.getCheckpointingInfo(transformId, 1, position, progress, listener), checkpointInfo, null, null); } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DefaultCheckpointProviderTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DefaultCheckpointProviderTests.java new file mode 100644 index 00000000000..0912724e702 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DefaultCheckpointProviderTests.java @@ -0,0 +1,191 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.checkpoint; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.MockLogAppender.LoggingExpectation; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.xpack.dataframe.notifications.MockDataFrameAuditor; +import org.elasticsearch.xpack.dataframe.notifications.MockDataFrameAuditor.AuditExpectation; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; +import org.junit.Before; + +import java.util.Collections; +import java.util.HashSet; + +import static org.mockito.Mockito.mock; + +public class DefaultCheckpointProviderTests extends ESTestCase { + + private Client client; + + private MockDataFrameAuditor dataFrameAuditor; + private DataFrameTransformsConfigManager dataFrameTransformsConfigManager; + private Logger checkpointProviderlogger = LogManager.getLogger(DefaultCheckpointProvider.class); + + @Before + public void setUpMocks() throws IllegalAccessException { + client = mock(Client.class); + dataFrameTransformsConfigManager = mock(DataFrameTransformsConfigManager.class); + dataFrameAuditor = new MockDataFrameAuditor(); + } + + public void testReportSourceIndexChangesRunsEmpty() throws Exception { + String transformId = getTestName(); + DataFrameTransformConfig transformConfig = DataFrameTransformConfigTests.randomDataFrameTransformConfig(transformId); + + DefaultCheckpointProvider provider = new DefaultCheckpointProvider( + client, + dataFrameTransformsConfigManager, + dataFrameAuditor, + transformConfig); + + assertExpectation( + new MockLogAppender.SeenEventExpectation("warn when source is empty", + checkpointProviderlogger.getName(), + Level.WARN, + "Source did not resolve to any open indexes for transform [" + transformId + "]"), + new MockDataFrameAuditor.SeenAuditExpectation("warn when source is empty", + org.elasticsearch.xpack.core.common.notifications.Level.WARNING, + transformId, + "Source did not resolve to any open indexes"), + () -> { + provider.reportSourceIndexChanges(Collections.singleton("index"), Collections.emptySet()); + }); + + assertExpectation( + new MockLogAppender.UnseenEventExpectation("do not warn if empty again", + checkpointProviderlogger.getName(), + Level.WARN, + "Source did not resolve to any concrete indexes"), + new MockDataFrameAuditor.UnseenAuditExpectation("do not warn if empty again", + org.elasticsearch.xpack.core.common.notifications.Level.WARNING, + transformId, + "Source did not resolve to any concrete indexes"), + () -> { + provider.reportSourceIndexChanges(Collections.emptySet(), Collections.emptySet()); + }); + } + + public void testReportSourceIndexChangesAddDelete() throws Exception { + String transformId = getTestName(); + DataFrameTransformConfig transformConfig = DataFrameTransformConfigTests.randomDataFrameTransformConfig(transformId); + + DefaultCheckpointProvider provider = new DefaultCheckpointProvider( + client, + dataFrameTransformsConfigManager, + dataFrameAuditor, + transformConfig); + + assertExpectation( + new MockLogAppender.SeenEventExpectation("info about adds/removal", + checkpointProviderlogger.getName(), + Level.DEBUG, + "Source index resolve found changes, removedIndexes: [index], new indexes: [other_index] for transform [" + + transformId + "]"), + new MockDataFrameAuditor.SeenAuditExpectation("info about adds/removal", + org.elasticsearch.xpack.core.common.notifications.Level.INFO, + transformId, + "Source index resolve found changes, removedIndexes: [index], new indexes: [other_index]"), + () -> { + provider.reportSourceIndexChanges(Collections.singleton("index"), Collections.singleton("other_index")); + }); + + assertExpectation( + new MockLogAppender.SeenEventExpectation("info about adds/removal", + checkpointProviderlogger.getName(), + Level.DEBUG, + "Source index resolve found changes, removedIndexes: [index], new indexes: [] for transform [" + + transformId + "]"), + new MockDataFrameAuditor.SeenAuditExpectation("info about adds/removal", + org.elasticsearch.xpack.core.common.notifications.Level.INFO, + transformId, + "Source index resolve found changes, removedIndexes: [index], new indexes: []"), + () -> { + provider.reportSourceIndexChanges(Sets.newHashSet("index", "other_index"), Collections.singleton("other_index")); + }); + assertExpectation( + new MockLogAppender.SeenEventExpectation("info about adds/removal", + checkpointProviderlogger.getName(), + Level.DEBUG, + "Source index resolve found changes, removedIndexes: [], new indexes: [other_index] for transform [" + + transformId + "]"), + new MockDataFrameAuditor.SeenAuditExpectation("info about adds/removal", + org.elasticsearch.xpack.core.common.notifications.Level.INFO, + transformId, + "Source index resolve found changes, removedIndexes: [], new indexes: [other_index]"), + () -> { + provider.reportSourceIndexChanges(Collections.singleton("index"), Sets.newHashSet("index", "other_index")); + }); + } + + public void testReportSourceIndexChangesAddDeleteMany() throws Exception { + String transformId = getTestName(); + DataFrameTransformConfig transformConfig = DataFrameTransformConfigTests.randomDataFrameTransformConfig(transformId); + + DefaultCheckpointProvider provider = new DefaultCheckpointProvider( + client, + dataFrameTransformsConfigManager, + dataFrameAuditor, + transformConfig); + + HashSet oldSet = new HashSet<>(); + for (int i = 0; i < 100; ++i) { + oldSet.add(String.valueOf(i)); + } + HashSet newSet = new HashSet<>(); + for (int i = 50; i < 150; ++i) { + newSet.add(String.valueOf(i)); + } + + assertExpectation( + new MockLogAppender.SeenEventExpectation("info about adds/removal", + checkpointProviderlogger.getName(), + Level.DEBUG, + "Source index resolve found more than 10 changes, [50] removed indexes, [50] new indexes for transform [" + + transformId + "]"), + new MockDataFrameAuditor.SeenAuditExpectation("info about adds/removal", + org.elasticsearch.xpack.core.common.notifications.Level.INFO, + transformId, + "Source index resolve found more than 10 changes, [50] removed indexes, [50] new indexes"), + () -> { + provider.reportSourceIndexChanges(oldSet, newSet); + }); + } + + private void assertExpectation(LoggingExpectation loggingExpectation, + AuditExpectation auditExpectation, + Runnable codeBlock) throws IllegalAccessException { + MockLogAppender mockLogAppender = new MockLogAppender(); + mockLogAppender.start(); + + Loggers.setLevel(checkpointProviderlogger, Level.DEBUG); + mockLogAppender.addExpectation(loggingExpectation); + + // always start fresh + dataFrameAuditor.reset(); + dataFrameAuditor.addExpectation(auditExpectation); + try { + Loggers.addAppender(checkpointProviderlogger, mockLogAppender); + codeBlock.run(); + mockLogAppender.assertAllExpectationsMatched(); + dataFrameAuditor.assertAllExpectationsMatched(); + } finally { + Loggers.removeAppender(checkpointProviderlogger, mockLogAppender); + mockLogAppender.stop(); + } + } + +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/notifications/MockDataFrameAuditor.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/notifications/MockDataFrameAuditor.java new file mode 100644 index 00000000000..41a499aa6e1 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/notifications/MockDataFrameAuditor.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.notifications; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.xpack.core.common.notifications.Level; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.mock; + +/* + * Test mock auditor to verify audit expectations. + * + * Shamelessly cop...inspired by {@link org.elasticsearch.test.MockLogAppender} + * + * TODO: ideally this would be a generalized MockAuditor, but the current inheritance doesn't let us + */ +public class MockDataFrameAuditor extends DataFrameAuditor { + + private List expectations; + + public MockDataFrameAuditor() { + super(mock(Client.class), "mock_node_name"); + expectations = new CopyOnWriteArrayList<>(); + } + + public void addExpectation(AuditExpectation expectation) { + expectations.add(expectation); + } + + // we can dynamically change the auditor, like attaching and removing the log appender + public void reset() { + expectations.clear(); + } + + @Override + public void info(String resourceId, String message) { + audit(Level.INFO, resourceId, message); + } + + @Override + public void warning(String resourceId, String message) { + audit(Level.WARNING, resourceId, message); + } + + @Override + public void error(String resourceId, String message) { + audit(Level.ERROR, resourceId, message); + } + + public void assertAllExpectationsMatched() { + for (AuditExpectation expectation : expectations) { + expectation.assertMatched(); + } + } + + public interface AuditExpectation { + void match(Level level, String resourceId, String message); + + void assertMatched(); + } + + public abstract static class AbstractAuditExpectation implements AuditExpectation { + protected final String expectedName; + protected final Level expectedLevel; + protected final String expectedResourceId; + protected final String expectedMessage; + volatile boolean saw; + + public AbstractAuditExpectation(String expectedName, Level expectedLevel, String expectedResourceId, String expectedMessage) { + this.expectedName = expectedName; + this.expectedLevel = expectedLevel; + this.expectedResourceId = expectedResourceId; + this.expectedMessage = expectedMessage; + this.saw = false; + } + + @Override + public void match(final Level level, final String resourceId, final String message) { + if (level.equals(expectedLevel) && resourceId.equals(expectedResourceId) && innerMatch(level, resourceId, message)) { + if (Regex.isSimpleMatchPattern(expectedMessage)) { + if (Regex.simpleMatch(expectedMessage, message)) { + saw = true; + } + } else { + if (message.contains(expectedMessage)) { + saw = true; + } + } + } + } + + public boolean innerMatch(final Level level, final String resourceId, final String message) { + return true; + } + } + + public static class SeenAuditExpectation extends AbstractAuditExpectation { + + public SeenAuditExpectation(String expectedName, Level expectedLevel, String expectedResourceId, String expectedMessage) { + super(expectedName, expectedLevel, expectedResourceId, expectedMessage); + } + + @Override + public void assertMatched() { + assertThat("expected to see " + expectedName + " but did not", saw, equalTo(true)); + } + } + + public static class UnseenAuditExpectation extends AbstractAuditExpectation { + + public UnseenAuditExpectation(String expectedName, Level expectedLevel, String expectedResourceId, String expectedMessage) { + super(expectedName, expectedLevel, expectedResourceId, expectedMessage); + } + + @Override + public void assertMatched() { + assertThat("expected not to see " + expectedName + " but did", saw, equalTo(false)); + } + } + + + private void audit(Level level, String resourceId, String message) { + for (AuditExpectation expectation : expectations) { + expectation.match(level, resourceId, message); + } + } + +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformActionTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformActionTests.java index 7f0158548c1..6a41ff8cd2c 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformActionTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformActionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -23,8 +22,8 @@ import static org.mockito.Mockito.mock; public class RestDeleteDataFrameTransformActionTests extends ESTestCase { public void testBodyRejection() throws Exception { - final RestDeleteDataFrameTransformAction handler = new RestDeleteDataFrameTransformAction(Settings.EMPTY, - mock(RestController.class)); + final RestDeleteDataFrameTransformAction handler = new RestDeleteDataFrameTransformAction( + mock(RestController.class)); try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.startObject(); { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutorTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutorTests.java index 3d8b290ad80..25ef6e43cbe 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutorTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutorTests.java @@ -99,9 +99,10 @@ public class DataFrameTransformPersistentTasksExecutorTests extends ESTestCase { ClusterState cs = csBuilder.build(); Client client = mock(Client.class); + DataFrameAuditor mockAuditor = mock(DataFrameAuditor.class); DataFrameTransformsConfigManager transformsConfigManager = new DataFrameTransformsConfigManager(client, xContentRegistry()); DataFrameTransformsCheckpointService dataFrameTransformsCheckpointService = new DataFrameTransformsCheckpointService(client, - transformsConfigManager); + transformsConfigManager, mockAuditor); ClusterSettings cSettings = new ClusterSettings(Settings.EMPTY, Collections.singleton(DataFrameTransformTask.NUM_FAILURE_RETRIES_SETTING)); ClusterService clusterService = mock(ClusterService.class); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java index 9bfbe352f83..a131ffd62f9 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java @@ -45,6 +45,6 @@ public class Deprecation extends Plugin implements ActionPlugin { Supplier nodesInCluster) { - return Collections.singletonList(new RestDeprecationInfoAction(settings, restController)); + return Collections.singletonList(new RestDeprecationInfoAction(restController)); } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java index f1670694387..1ffd19c46b4 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/RestDeprecationInfoAction.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -24,8 +23,7 @@ public class RestDeprecationInfoAction extends BaseRestHandler { private static final Logger logger = LogManager.getLogger(RestDeprecationInfoAction.class); private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); - public RestDeprecationInfoAction(Settings settings, RestController controller) { - super(settings); + public RestDeprecationInfoAction(RestController controller) { controller.registerWithDeprecatedHandler( RestRequest.Method.GET, "/_migration/deprecations", this, RestRequest.Method.GET, "/_xpack/migration/deprecations", deprecationLogger); diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java index 95198c89939..1953889df5d 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java @@ -92,6 +92,6 @@ public class FrozenIndices extends Plugin implements ActionPlugin, EnginePlugin IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { - return Collections.singletonList(new RestFreezeIndexAction(settings, restController)); + return Collections.singletonList(new RestFreezeIndexAction(restController)); } } diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java index b8271724177..43429a98aaf 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.frozen.rest.action; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.protocol.xpack.frozen.FreezeRequest; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -17,8 +16,8 @@ import org.elasticsearch.xpack.core.XPackClient; import org.elasticsearch.xpack.core.rest.XPackRestHandler; public final class RestFreezeIndexAction extends XPackRestHandler { - public RestFreezeIndexAction(Settings settings, RestController controller) { - super(settings); + + public RestFreezeIndexAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/{index}/_freeze", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_unfreeze", this); } diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java index eba19eec807..f273d25d630 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/Graph.java @@ -64,6 +64,6 @@ public class Graph extends Plugin implements ActionPlugin { if (false == enabled) { return emptyList(); } - return singletonList(new RestGraphAction(settings, restController)); + return singletonList(new RestGraphAction(restController)); } } diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index 130d6deed56..39a293a6cf6 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; @@ -63,8 +62,7 @@ public class RestGraphAction extends XPackRestHandler { public static final ParseField BOOST_FIELD = new ParseField("boost"); public static final ParseField TERM_FIELD = new ParseField("term"); - public RestGraphAction(Settings settings, RestController controller) { - super(settings); + public RestGraphAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/{index}/_graph/explore", this, diff --git a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java index 486ac4e70e3..fb91e6fc5ee 100644 --- a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java +++ b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.graph.rest.action; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.rest.FakeRestRequest; @@ -18,7 +17,7 @@ public class RestGraphActionTests extends RestActionTestCase { @Before public void setUpAction() { - new RestGraphAction(Settings.EMPTY, controller()); + new RestGraphAction(controller()); } public void testTypeInPath() { diff --git a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle index 8ba0a758f73..a9eca7c4176 100644 --- a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle @@ -35,8 +35,8 @@ testClusters.'leader-cluster' { task 'follow-cluster'(type: RestIntegTestTask) { dependsOn 'leader-cluster' - useCluster testClusters.'leader-cluster' runner { + useCluster testClusters.'leader-cluster' systemProperty 'tests.target_cluster', 'follow' nonInputProperties.systemProperty 'tests.leader_host', "${-> testClusters."leader-cluster".getAllHttpSocketURI().get(0)}" diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index eb92f2c9535..0e196b47fc2 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -205,21 +205,21 @@ public class IndexLifecycle extends Plugin implements ActionPlugin { return emptyList(); } return Arrays.asList( - new RestPutLifecycleAction(settings, restController), - new RestGetLifecycleAction(settings, restController), - new RestDeleteLifecycleAction(settings, restController), - new RestExplainLifecycleAction(settings, restController), - new RestRemoveIndexLifecyclePolicyAction(settings, restController), - new RestMoveToStepAction(settings, restController), - new RestRetryAction(settings, restController), - new RestStopAction(settings, restController), - new RestStartILMAction(settings, restController), - new RestGetStatusAction(settings, restController), + new RestPutLifecycleAction(restController), + new RestGetLifecycleAction(restController), + new RestDeleteLifecycleAction(restController), + new RestExplainLifecycleAction(restController), + new RestRemoveIndexLifecyclePolicyAction(restController), + new RestMoveToStepAction(restController), + new RestRetryAction(restController), + new RestStopAction(restController), + new RestStartILMAction(restController), + new RestGetStatusAction(restController), // Snapshot lifecycle actions - new RestPutSnapshotLifecycleAction(settings, restController), - new RestDeleteSnapshotLifecycleAction(settings, restController), - new RestGetSnapshotLifecycleAction(settings, restController), - new RestExecuteSnapshotLifecycleAction(settings, restController) + new RestPutSnapshotLifecycleAction(restController), + new RestDeleteSnapshotLifecycleAction(restController), + new RestGetSnapshotLifecycleAction(restController), + new RestExecuteSnapshotLifecycleAction(restController) ); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestDeleteLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestDeleteLifecycleAction.java index 622d772669f..999eaafdb03 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestDeleteLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestDeleteLifecycleAction.java @@ -7,19 +7,15 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ilm.action.DeleteLifecycleAction; -import java.io.IOException; - public class RestDeleteLifecycleAction extends BaseRestHandler { - public RestDeleteLifecycleAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.DELETE, "/_ilm/policy/{name}", this); } @@ -29,7 +25,7 @@ public class RestDeleteLifecycleAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String lifecycleName = restRequest.param("name"); DeleteLifecycleAction.Request deleteLifecycleRequest = new DeleteLifecycleAction.Request(lifecycleName); deleteLifecycleRequest.timeout(restRequest.paramAsTime("timeout", deleteLifecycleRequest.timeout())); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestExplainLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestExplainLifecycleAction.java index bb855acf8cc..d8fb1277552 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestExplainLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestExplainLifecycleAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -17,12 +16,9 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ilm.ExplainLifecycleRequest; import org.elasticsearch.xpack.core.ilm.action.ExplainLifecycleAction; -import java.io.IOException; - public class RestExplainLifecycleAction extends BaseRestHandler { - public RestExplainLifecycleAction(Settings settings, RestController controller) { - super(settings); + public RestExplainLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/{index}/_ilm/explain", this); } @@ -32,7 +28,7 @@ public class RestExplainLifecycleAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String[] indexes = Strings.splitStringByCommaToArray(restRequest.param("index")); ExplainLifecycleRequest explainLifecycleRequest = new ExplainLifecycleRequest(); explainLifecycleRequest.indices(indexes); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetLifecycleAction.java index 607bc4e22f9..794de5d5763 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetLifecycleAction.java @@ -8,19 +8,15 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction; -import java.io.IOException; - public class RestGetLifecycleAction extends BaseRestHandler { - public RestGetLifecycleAction(Settings settings, RestController controller) { - super(settings); + public RestGetLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_ilm/policy", this); controller.registerHandler(RestRequest.Method.GET, "/_ilm/policy/{name}", this); } @@ -31,7 +27,7 @@ public class RestGetLifecycleAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String[] lifecycleNames = Strings.splitStringByCommaToArray(restRequest.param("name")); GetLifecycleAction.Request getLifecycleRequest = new GetLifecycleAction.Request(lifecycleNames); getLifecycleRequest.timeout(restRequest.paramAsTime("timeout", getLifecycleRequest.timeout())); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java index 2b6ac1ea61f..5ff3c046321 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -16,8 +15,7 @@ import org.elasticsearch.xpack.core.ilm.action.GetStatusAction; public class RestGetStatusAction extends BaseRestHandler { - public RestGetStatusAction(Settings settings, RestController controller) { - super(settings); + public RestGetStatusAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_ilm/status", this); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java index 7ed54bb3e80..44a765b0c6a 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -20,8 +19,7 @@ import java.io.IOException; public class RestMoveToStepAction extends BaseRestHandler { - public RestMoveToStepAction(Settings settings, RestController controller) { - super(settings); + public RestMoveToStepAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST,"/_ilm/move/{name}", this); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestPutLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestPutLifecycleAction.java index c8cf871c9f5..5e357c9535e 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestPutLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestPutLifecycleAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -19,8 +18,7 @@ import java.io.IOException; public class RestPutLifecycleAction extends BaseRestHandler { - public RestPutLifecycleAction(Settings settings, RestController controller) { - super(settings); + public RestPutLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/_ilm/policy/{name}", this); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRemoveIndexLifecyclePolicyAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRemoveIndexLifecyclePolicyAction.java index 1c537ba5c9a..7bca784878b 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRemoveIndexLifecyclePolicyAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRemoveIndexLifecyclePolicyAction.java @@ -9,19 +9,15 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.ilm.action.RemoveIndexLifecyclePolicyAction; -import java.io.IOException; - public class RestRemoveIndexLifecyclePolicyAction extends BaseRestHandler { - public RestRemoveIndexLifecyclePolicyAction(Settings settings, RestController controller) { - super(settings); + public RestRemoveIndexLifecyclePolicyAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/{index}/_ilm/remove", this); } @@ -31,7 +27,7 @@ public class RestRemoveIndexLifecyclePolicyAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String[] indexes = Strings.splitStringByCommaToArray(restRequest.param("index")); RemoveIndexLifecyclePolicyAction.Request changePolicyRequest = new RemoveIndexLifecyclePolicyAction.Request(indexes); changePolicyRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", changePolicyRequest.masterNodeTimeout())); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRetryAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRetryAction.java index d595540d67f..e14e1dac049 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRetryAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRetryAction.java @@ -10,7 +10,6 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -19,8 +18,7 @@ import org.elasticsearch.xpack.core.ilm.action.RetryAction; public class RestRetryAction extends BaseRestHandler { - public RestRetryAction(Settings settings, RestController controller) { - super(settings); + public RestRetryAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/{index}/_ilm/retry", this); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStartILMAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStartILMAction.java index 6c2262b9564..b240a5e274e 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStartILMAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStartILMAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -17,8 +16,7 @@ import org.elasticsearch.xpack.core.ilm.action.StartILMAction; public class RestStartILMAction extends BaseRestHandler { - public RestStartILMAction(Settings settings, RestController controller) { - super(settings); + public RestStartILMAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/_ilm/start", this); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStopAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStopAction.java index bd0ebb9d5b2..3843d4da773 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStopAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStopAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ilm.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -17,8 +16,7 @@ import org.elasticsearch.xpack.core.ilm.action.StopILMAction; public class RestStopAction extends BaseRestHandler { - public RestStopAction(Settings settings, RestController controller) { - super(settings); + public RestStopAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, "/_ilm/stop", this); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestDeleteSnapshotLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestDeleteSnapshotLifecycleAction.java index a95ec91b802..658816d2bab 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestDeleteSnapshotLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestDeleteSnapshotLifecycleAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.slm.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -16,8 +15,7 @@ import org.elasticsearch.xpack.core.slm.action.DeleteSnapshotLifecycleAction; public class RestDeleteSnapshotLifecycleAction extends BaseRestHandler { - public RestDeleteSnapshotLifecycleAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteSnapshotLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.DELETE, "/_slm/policy/{name}", this); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java index c7082866bc0..a644df789d0 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java @@ -7,19 +7,15 @@ package org.elasticsearch.xpack.slm.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.slm.action.ExecuteSnapshotLifecycleAction; -import java.io.IOException; - public class RestExecuteSnapshotLifecycleAction extends BaseRestHandler { - public RestExecuteSnapshotLifecycleAction(Settings settings, RestController controller) { - super(settings); + public RestExecuteSnapshotLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/_slm/policy/{name}/_execute", this); } @@ -29,7 +25,7 @@ public class RestExecuteSnapshotLifecycleAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { String snapLifecycleId = request.param("name"); ExecuteSnapshotLifecycleAction.Request req = new ExecuteSnapshotLifecycleAction.Request(snapLifecycleId); req.timeout(request.paramAsTime("timeout", req.timeout())); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleAction.java index fc1dd48ea45..cd2e67bb732 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.slm.action; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -17,8 +16,7 @@ import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleAction; public class RestGetSnapshotLifecycleAction extends BaseRestHandler { - public RestGetSnapshotLifecycleAction(Settings settings, RestController controller) { - super(settings); + public RestGetSnapshotLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "/_slm/policy", this); controller.registerHandler(RestRequest.Method.GET, "/_slm/policy/{name}", this); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestPutSnapshotLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestPutSnapshotLifecycleAction.java index e0b4f24d985..a5183739f3f 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestPutSnapshotLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestPutSnapshotLifecycleAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.slm.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -19,8 +18,7 @@ import java.io.IOException; public class RestPutSnapshotLifecycleAction extends BaseRestHandler { - public RestPutSnapshotLifecycleAction(Settings settings, RestController controller) { - super(settings); + public RestPutSnapshotLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/_slm/policy/{name}", this); } diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index 2fa1d8d4098..8342a0f9430 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -61,14 +61,16 @@ integTest.runner { 'ml/data_frame_analytics_crud/Test put config given dest without index', 'ml/data_frame_analytics_crud/Test put config given missing analysis', 'ml/data_frame_analytics_crud/Test put config given empty analysis', - 'ml/data_frame_analytics_crud/Test get given missing analytics', - 'ml/data_frame_analytics_crud/Test get given missing analytics and allow_no_match is false', - 'ml/data_frame_analytics_crud/Test get given expression without matches and allow_no_match is false', - 'ml/data_frame_analytics_crud/Test get stats given missing analytics', - 'ml/data_frame_analytics_crud/Test get stats given missing analytics and allow_no_match is false', - 'ml/data_frame_analytics_crud/Test get stats given expression without matches and allow_no_match is false', - 'ml/data_frame_analytics_crud/Test delete given missing config', 'ml/data_frame_analytics_crud/Test max model memory limit', + 'ml/data_frame_analytics_crud/Test put regression given dependent_variable is not defined', + 'ml/data_frame_analytics_crud/Test put regression given negative lambda', + 'ml/data_frame_analytics_crud/Test put regression given negative gamma', + 'ml/data_frame_analytics_crud/Test put regression given eta less than 1e-3', + 'ml/data_frame_analytics_crud/Test put regression given eta greater than one', + 'ml/data_frame_analytics_crud/Test put regression given maximum_number_trees is zero', + 'ml/data_frame_analytics_crud/Test put regression given maximum_number_trees is greater than 2k', + 'ml/data_frame_analytics_crud/Test put regression given feature_bag_fraction is negative', + 'ml/data_frame_analytics_crud/Test put regression given feature_bag_fraction is greater than one', 'ml/evaluate_data_frame/Test given missing index', 'ml/evaluate_data_frame/Test given index does not exist', 'ml/evaluate_data_frame/Test given missing evaluation', @@ -135,7 +137,6 @@ integTest.runner { 'ml/post_data/Test POST data with invalid parameters', 'ml/preview_datafeed/Test preview missing datafeed', 'ml/revert_model_snapshot/Test revert model with invalid snapshotId', - 'ml/start_data_frame_analytics/Test start given missing config', 'ml/start_data_frame_analytics/Test start given missing source index', 'ml/start_data_frame_analytics/Test start given source index has no compatible fields', 'ml/start_data_frame_analytics/Test start with inconsistent body/param ids', @@ -143,10 +144,6 @@ integTest.runner { 'ml/start_stop_datafeed/Test start datafeed job, but not open', 'ml/start_stop_datafeed/Test start non existing datafeed', 'ml/start_stop_datafeed/Test stop non existing datafeed', - 'ml/stop_data_frame_analytics/Test stop given missing config and allow_no_match is true', - 'ml/stop_data_frame_analytics/Test stop given missing config and allow_no_match is false', - 'ml/stop_data_frame_analytics/Test stop with expression that does not match and allow_no_match is false', - 'ml/stop_data_frame_analytics/Test stop with inconsistent body/param ids', 'ml/update_model_snapshot/Test without description', 'ml/validate/Test invalid job config', 'ml/validate/Test job config is invalid because model snapshot id set', diff --git a/x-pack/plugin/ml/qa/ml-with-security/roles.yml b/x-pack/plugin/ml/qa/ml-with-security/roles.yml index 8533b81c073..48c4abb9f42 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/roles.yml +++ b/x-pack/plugin/ml/qa/ml-with-security/roles.yml @@ -13,5 +13,6 @@ minimal: - indices:admin/refresh - read - index + - view_index_metadata - indices:data/write/bulk - indices:data/write/index diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index 69732625111..6eece2b0ec2 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -24,6 +24,10 @@ task copyKeyCerts(type: Copy) { sourceSets.test.resources.srcDir(keystoreDir) processTestResources.dependsOn(copyKeyCerts) +// Disabled and tracked here https://github.com/elastic/elasticsearch/issues/45405 +integTest.enabled = false +testingConventions.enabled = false + integTest { dependsOn copyKeyCerts runner { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java index 56ea04793c3..520f7a30ece 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState; import org.elasticsearch.xpack.core.ml.dataframe.analyses.OutlierDetection; +import org.elasticsearch.xpack.core.ml.dataframe.analyses.Regression; import java.io.IOException; import java.util.ArrayList; @@ -118,4 +119,13 @@ abstract class MlNativeDataFrameAnalyticsIntegTestCase extends MlNativeIntegTest assertThat(stats.get(0).getId(), equalTo(id)); assertThat(stats.get(0).getState(), equalTo(state)); } + + protected static DataFrameAnalyticsConfig buildRegressionAnalytics(String id, String[] sourceIndex, String destIndex, + @Nullable String resultsField, String dependentVariable) { + DataFrameAnalyticsConfig.Builder configBuilder = new DataFrameAnalyticsConfig.Builder(id); + configBuilder.setSource(new DataFrameAnalyticsSource(sourceIndex, null)); + configBuilder.setDest(new DataFrameAnalyticsDest(destIndex, resultsField)); + configBuilder.setAnalysis(new Regression(dependentVariable)); + return configBuilder.build(); + } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index 3e4fd4f7003..e78c6015ec1 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -21,9 +21,12 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState; import org.junit.After; +import java.util.Arrays; +import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -362,4 +365,69 @@ public class RunDataFrameAnalyticsIT extends MlNativeDataFrameAnalyticsIntegTest .setQuery(QueryBuilders.existsQuery("ml.outlier_score")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) bulkRequestBuilder.numberOfActions())); } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/45425") + public void testRegressionWithNumericFeatureAndFewDocuments() throws Exception { + String sourceIndex = "test-regression-with-numeric-feature-and-few-docs"; + + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + List featureValues = Arrays.asList(1.0, 2.0, 3.0); + List dependentVariableValues = Arrays.asList(10.0, 20.0, 30.0); + + for (int i = 0; i < 350; i++) { + Double field = featureValues.get(i % 3); + Double value = dependentVariableValues.get(i % 3); + + IndexRequest indexRequest = new IndexRequest(sourceIndex); + if (i < 300) { + indexRequest.source("feature", field, "variable", value); + } else { + indexRequest.source("feature", field); + } + bulkRequestBuilder.add(indexRequest); + } + BulkResponse bulkResponse = bulkRequestBuilder.get(); + if (bulkResponse.hasFailures()) { + fail("Failed to index data: " + bulkResponse.buildFailureMessage()); + } + + String id = "test_regression_with_numeric_feature_and_few_docs"; + DataFrameAnalyticsConfig config = buildRegressionAnalytics(id, new String[] {sourceIndex}, + sourceIndex + "-results", null, "variable"); + registerAnalytics(config); + putAnalytics(config); + + assertState(id, DataFrameAnalyticsState.STOPPED); + + startAnalytics(id); + waitUntilAnalyticsIsStopped(id); + + int resultsWithPrediction = 0; + SearchResponse sourceData = client().prepareSearch(sourceIndex).get(); + for (SearchHit hit : sourceData.getHits()) { + GetResponse destDocGetResponse = client().prepareGet().setIndex(config.getDest().getIndex()).setId(hit.getId()).get(); + assertThat(destDocGetResponse.isExists(), is(true)); + Map sourceDoc = hit.getSourceAsMap(); + Map destDoc = destDocGetResponse.getSource(); + for (String field : sourceDoc.keySet()) { + assertThat(destDoc.containsKey(field), is(true)); + assertThat(destDoc.get(field), equalTo(sourceDoc.get(field))); + } + assertThat(destDoc.containsKey("ml"), is(true)); + + @SuppressWarnings("unchecked") + Map resultsObject = (Map) destDoc.get("ml"); + + if (resultsObject.containsKey("variable_prediction")) { + resultsWithPrediction++; + double featureValue = (double) destDoc.get("feature"); + double predictionValue = (double) resultsObject.get("variable_prediction"); + // it seems for this case values can be as far off as 2.0 + assertThat(predictionValue, closeTo(10 * featureValue, 2.0)); + } + } + assertThat(resultsWithPrediction, greaterThan(0)); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 1e4a65c4e50..a56317193a7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -624,59 +624,59 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu return emptyList(); } return Arrays.asList( - new RestGetJobsAction(settings, restController), - new RestGetJobStatsAction(settings, restController), - new RestMlInfoAction(settings, restController), - new RestPutJobAction(settings, restController), - new RestPostJobUpdateAction(settings, restController), - new RestDeleteJobAction(settings, restController), - new RestOpenJobAction(settings, restController), - new RestGetFiltersAction(settings, restController), - new RestPutFilterAction(settings, restController), - new RestUpdateFilterAction(settings, restController), - new RestDeleteFilterAction(settings, restController), - new RestGetInfluencersAction(settings, restController), - new RestGetRecordsAction(settings, restController), - new RestGetBucketsAction(settings, restController), - new RestGetOverallBucketsAction(settings, restController), - new RestPostDataAction(settings, restController), - new RestCloseJobAction(settings, restController), - new RestFlushJobAction(settings, restController), - new RestValidateDetectorAction(settings, restController), - new RestValidateJobConfigAction(settings, restController), - new RestGetCategoriesAction(settings, restController), - new RestGetModelSnapshotsAction(settings, restController), - new RestRevertModelSnapshotAction(settings, restController), - new RestUpdateModelSnapshotAction(settings, restController), - new RestGetDatafeedsAction(settings, restController), - new RestGetDatafeedStatsAction(settings, restController), - new RestPutDatafeedAction(settings, restController), - new RestUpdateDatafeedAction(settings, restController), - new RestDeleteDatafeedAction(settings, restController), - new RestPreviewDatafeedAction(settings, restController), - new RestStartDatafeedAction(settings, restController), - new RestStopDatafeedAction(settings, restController), - new RestDeleteModelSnapshotAction(settings, restController), - new RestDeleteExpiredDataAction(settings, restController), - new RestForecastJobAction(settings, restController), - new RestDeleteForecastAction(settings, restController), - new RestGetCalendarsAction(settings, restController), - new RestPutCalendarAction(settings, restController), - new RestDeleteCalendarAction(settings, restController), - new RestDeleteCalendarEventAction(settings, restController), - new RestDeleteCalendarJobAction(settings, restController), - new RestPutCalendarJobAction(settings, restController), - new RestGetCalendarEventsAction(settings, restController), - new RestPostCalendarEventAction(settings, restController), - new RestFindFileStructureAction(settings, restController), - new RestSetUpgradeModeAction(settings, restController), - new RestGetDataFrameAnalyticsAction(settings, restController), - new RestGetDataFrameAnalyticsStatsAction(settings, restController), - new RestPutDataFrameAnalyticsAction(settings, restController), - new RestDeleteDataFrameAnalyticsAction(settings, restController), - new RestStartDataFrameAnalyticsAction(settings, restController), - new RestStopDataFrameAnalyticsAction(settings, restController), - new RestEvaluateDataFrameAction(settings, restController) + new RestGetJobsAction(restController), + new RestGetJobStatsAction(restController), + new RestMlInfoAction(restController), + new RestPutJobAction(restController), + new RestPostJobUpdateAction(restController), + new RestDeleteJobAction(restController), + new RestOpenJobAction(restController), + new RestGetFiltersAction(restController), + new RestPutFilterAction(restController), + new RestUpdateFilterAction(restController), + new RestDeleteFilterAction(restController), + new RestGetInfluencersAction(restController), + new RestGetRecordsAction(restController), + new RestGetBucketsAction(restController), + new RestGetOverallBucketsAction(restController), + new RestPostDataAction(restController), + new RestCloseJobAction(restController), + new RestFlushJobAction(restController), + new RestValidateDetectorAction(restController), + new RestValidateJobConfigAction(restController), + new RestGetCategoriesAction(restController), + new RestGetModelSnapshotsAction(restController), + new RestRevertModelSnapshotAction(restController), + new RestUpdateModelSnapshotAction(restController), + new RestGetDatafeedsAction(restController), + new RestGetDatafeedStatsAction(restController), + new RestPutDatafeedAction(restController), + new RestUpdateDatafeedAction(restController), + new RestDeleteDatafeedAction(restController), + new RestPreviewDatafeedAction(restController), + new RestStartDatafeedAction(restController), + new RestStopDatafeedAction(restController), + new RestDeleteModelSnapshotAction(restController), + new RestDeleteExpiredDataAction(restController), + new RestForecastJobAction(restController), + new RestDeleteForecastAction(restController), + new RestGetCalendarsAction(restController), + new RestPutCalendarAction(restController), + new RestDeleteCalendarAction(restController), + new RestDeleteCalendarEventAction(restController), + new RestDeleteCalendarJobAction(restController), + new RestPutCalendarJobAction(restController), + new RestGetCalendarEventsAction(restController), + new RestPostCalendarEventAction(restController), + new RestFindFileStructureAction(restController), + new RestSetUpgradeModeAction(restController), + new RestGetDataFrameAnalyticsAction(restController), + new RestGetDataFrameAnalyticsStatsAction(restController), + new RestPutDataFrameAnalyticsAction(restController), + new RestDeleteDataFrameAnalyticsAction(restController), + new RestStartDataFrameAnalyticsAction(restController), + new RestStopDataFrameAnalyticsAction(restController), + new RestEvaluateDataFrameAction(restController) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java index 7dafbb5f4dc..8c741e3c535 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java @@ -16,10 +16,12 @@ import org.elasticsearch.search.SearchHit; import java.io.IOException; import java.text.ParseException; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Set; /** * Represents a field to be extracted by the datafeed. @@ -37,11 +39,14 @@ public abstract class ExtractedField { /** The name of the field we extract */ protected final String name; + private final Set types; + private final ExtractionMethod extractionMethod; - protected ExtractedField(String alias, String name, ExtractionMethod extractionMethod) { + protected ExtractedField(String alias, String name, Set types, ExtractionMethod extractionMethod) { this.alias = Objects.requireNonNull(alias); this.name = Objects.requireNonNull(name); + this.types = Objects.requireNonNull(types); this.extractionMethod = Objects.requireNonNull(extractionMethod); } @@ -53,6 +58,10 @@ public abstract class ExtractedField { return name; } + public Set getTypes() { + return types; + } + public ExtractionMethod getExtractionMethod() { return extractionMethod; } @@ -65,32 +74,32 @@ public abstract class ExtractedField { return null; } - public static ExtractedField newTimeField(String name, ExtractionMethod extractionMethod) { + public static ExtractedField newTimeField(String name, Set types, ExtractionMethod extractionMethod) { if (extractionMethod == ExtractionMethod.SOURCE) { throw new IllegalArgumentException("time field cannot be extracted from source"); } - return new TimeField(name, extractionMethod); + return new TimeField(name, types, extractionMethod); } public static ExtractedField newGeoShapeField(String alias, String name) { - return new GeoShapeField(alias, name); + return new GeoShapeField(alias, name, Collections.singleton("geo_shape")); } public static ExtractedField newGeoPointField(String alias, String name) { - return new GeoPointField(alias, name); + return new GeoPointField(alias, name, Collections.singleton("geo_point")); } - public static ExtractedField newField(String name, ExtractionMethod extractionMethod) { - return newField(name, name, extractionMethod); + public static ExtractedField newField(String name, Set types, ExtractionMethod extractionMethod) { + return newField(name, name, types, extractionMethod); } - public static ExtractedField newField(String alias, String name, ExtractionMethod extractionMethod) { + public static ExtractedField newField(String alias, String name, Set types, ExtractionMethod extractionMethod) { switch (extractionMethod) { case DOC_VALUE: case SCRIPT_FIELD: - return new FromFields(alias, name, extractionMethod); + return new FromFields(alias, name, types, extractionMethod); case SOURCE: - return new FromSource(alias, name); + return new FromSource(alias, name, types); default: throw new IllegalArgumentException("Invalid extraction method [" + extractionMethod + "]"); } @@ -98,7 +107,7 @@ public abstract class ExtractedField { public ExtractedField newFromSource() { if (supportsFromSource()) { - return new FromSource(alias, name); + return new FromSource(alias, name, types); } throw new IllegalStateException("Field (alias [" + alias + "], name [" + name + "]) should be extracted via [" + extractionMethod + "] and cannot be extracted from source"); @@ -106,8 +115,8 @@ public abstract class ExtractedField { private static class FromFields extends ExtractedField { - FromFields(String alias, String name, ExtractionMethod extractionMethod) { - super(alias, name, extractionMethod); + FromFields(String alias, String name, Set types, ExtractionMethod extractionMethod) { + super(alias, name, types, extractionMethod); } @Override @@ -129,8 +138,8 @@ public abstract class ExtractedField { private static class GeoShapeField extends FromSource { private static final WellKnownText wkt = new WellKnownText(true, new StandardValidator(true)); - GeoShapeField(String alias, String name) { - super(alias, name); + GeoShapeField(String alias, String name, Set types) { + super(alias, name, types); } @Override @@ -186,8 +195,8 @@ public abstract class ExtractedField { private static class GeoPointField extends FromFields { - GeoPointField(String alias, String name) { - super(alias, name, ExtractionMethod.DOC_VALUE); + GeoPointField(String alias, String name, Set types) { + super(alias, name, types, ExtractionMethod.DOC_VALUE); } @Override @@ -222,8 +231,8 @@ public abstract class ExtractedField { private static final String EPOCH_MILLIS_FORMAT = "epoch_millis"; - TimeField(String name, ExtractionMethod extractionMethod) { - super(name, name, extractionMethod); + TimeField(String name, Set types, ExtractionMethod extractionMethod) { + super(name, name, types, extractionMethod); } @Override @@ -255,8 +264,8 @@ public abstract class ExtractedField { private String[] namePath; - FromSource(String alias, String name) { - super(alias, name, ExtractionMethod.SOURCE); + FromSource(String alias, String name, Set types) { + super(alias, name, types, ExtractionMethod.SOURCE); namePath = name.split("\\."); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFields.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFields.java index 9495c5a2b40..6c90d2c7db2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFields.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFields.java @@ -47,15 +47,6 @@ public class ExtractedFields { return docValueFields; } - /** - * Returns a new instance which only contains fields matching the given extraction method - * @param method the extraction method to filter fields on - * @return a new instance which only contains fields matching the given extraction method - */ - public ExtractedFields filterFields(ExtractedField.ExtractionMethod method) { - return new ExtractedFields(filterFields(method, allFields)); - } - private static List filterFields(ExtractedField.ExtractionMethod method, List fields) { return fields.stream().filter(field -> field.getExtractionMethod() == method).collect(Collectors.toList()); } @@ -79,12 +70,13 @@ public class ExtractedFields { protected ExtractedField detect(String field) { String internalField = field; ExtractedField.ExtractionMethod method = ExtractedField.ExtractionMethod.SOURCE; + Set types = getTypes(field); if (scriptFields.contains(field)) { method = ExtractedField.ExtractionMethod.SCRIPT_FIELD; } else if (isAggregatable(field)) { method = ExtractedField.ExtractionMethod.DOC_VALUE; if (isFieldOfType(field, "date")) { - return ExtractedField.newTimeField(field, method); + return ExtractedField.newTimeField(field, types, method); } } else if (isFieldOfType(field, TEXT)) { String parentField = MlStrings.getParentField(field); @@ -107,7 +99,12 @@ public class ExtractedFields { return ExtractedField.newGeoShapeField(field, internalField); } - return ExtractedField.newField(field, internalField, method); + return ExtractedField.newField(field, internalField, types, method); + } + + private Set getTypes(String field) { + Map fieldCaps = fieldsCapabilities.getField(field); + return fieldCaps == null ? Collections.emptySet() : fieldCaps.keySet(); } protected boolean isAggregatable(String field) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFields.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFields.java index cf87671bf33..1067ef63007 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFields.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFields.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Set; @@ -55,12 +56,20 @@ public class TimeBasedExtractedFields extends ExtractedFields { if (scriptFields.contains(timeField) == false && extractionMethodDetector.isAggregatable(timeField) == false) { throw new IllegalArgumentException("cannot retrieve time field [" + timeField + "] because it is not aggregatable"); } - ExtractedField timeExtractedField = ExtractedField.newTimeField(timeField, scriptFields.contains(timeField) ? - ExtractedField.ExtractionMethod.SCRIPT_FIELD : ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField timeExtractedField = extractedTimeField(timeField, scriptFields, fieldsCapabilities); List remainingFields = job.allInputFields().stream().filter(f -> !f.equals(timeField)).collect(Collectors.toList()); List allExtractedFields = new ArrayList<>(remainingFields.size() + 1); allExtractedFields.add(timeExtractedField); remainingFields.stream().forEach(field -> allExtractedFields.add(extractionMethodDetector.detect(field))); return new TimeBasedExtractedFields(timeExtractedField, allExtractedFields); } + + private static ExtractedField extractedTimeField(String timeField, Set scriptFields, + FieldCapabilitiesResponse fieldCapabilities) { + if (scriptFields.contains(timeField)) { + return ExtractedField.newTimeField(timeField, Collections.emptySet(), ExtractedField.ExtractionMethod.SCRIPT_FIELD); + } + return ExtractedField.newTimeField(timeField, fieldCapabilities.getField(timeField).keySet(), + ExtractedField.ExtractionMethod.DOC_VALUE); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index fa18f3bb25b..d9f1aa994d5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -29,11 +29,13 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -179,7 +181,7 @@ public class DataFrameDataExtractor { for (int i = 0; i < extractedValues.length; ++i) { ExtractedField field = context.extractedFields.getAllFields().get(i); Object[] values = field.value(hit); - if (values.length == 1 && values[0] instanceof Number) { + if (values.length == 1 && (values[0] instanceof Number || values[0] instanceof String)) { extractedValues[i] = Objects.toString(values[0]); } else { extractedValues = null; @@ -233,6 +235,17 @@ public class DataFrameDataExtractor { return new DataSummary(searchResponse.getHits().getTotalHits().value, context.extractedFields.getAllFields().size()); } + public Set getCategoricalFields() { + Set categoricalFields = new HashSet<>(); + for (ExtractedField extractedField : context.extractedFields.getAllFields()) { + String fieldName = extractedField.getName(); + if (ExtractedFieldsDetector.CATEGORICAL_TYPES.containsAll(extractedField.getTypes())) { + categoricalFields.add(fieldName); + } + } + return categoricalFields; + } + public static class DataSummary { public final long rows; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetector.java index d58eaebe353..3ff8c8a4923 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetector.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.ml.dataframe.extractor; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; @@ -20,6 +22,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.NameResolver; import org.elasticsearch.xpack.ml.datafeed.extractor.fields.ExtractedField; import org.elasticsearch.xpack.ml.datafeed.extractor.fields.ExtractedFields; +import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsIndex; import java.util.ArrayList; import java.util.Arrays; @@ -35,24 +38,24 @@ import java.util.stream.Stream; public class ExtractedFieldsDetector { + private static final Logger LOGGER = LogManager.getLogger(ExtractedFieldsDetector.class); + /** * Fields to ignore. These are mostly internal meta fields. */ private static final List IGNORE_FIELDS = Arrays.asList("_id", "_field_names", "_index", "_parent", "_routing", "_seq_no", - "_source", "_type", "_uid", "_version", "_feature", "_ignored"); + "_source", "_type", "_uid", "_version", "_feature", "_ignored", DataFrameAnalyticsIndex.ID_COPY); - /** - * The types supported by data frames - */ - private static final Set COMPATIBLE_FIELD_TYPES; + public static final Set CATEGORICAL_TYPES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("text", "keyword", "ip"))); + + private static final Set NUMERICAL_TYPES; static { - Set compatibleTypes = Stream.of(NumberFieldMapper.NumberType.values()) + Set numericalTypes = Stream.of(NumberFieldMapper.NumberType.values()) .map(NumberFieldMapper.NumberType::typeName) .collect(Collectors.toSet()); - compatibleTypes.add("scaled_float"); // have to add manually since scaled_float is in a module - - COMPATIBLE_FIELD_TYPES = Collections.unmodifiableSet(compatibleTypes); + numericalTypes.add("scaled_float"); + NUMERICAL_TYPES = Collections.unmodifiableSet(numericalTypes); } private final String[] index; @@ -79,16 +82,18 @@ public class ExtractedFieldsDetector { // Ignore fields under the results object fields.removeIf(field -> field.startsWith(config.getDest().getResultsField() + ".")); + includeAndExcludeFields(fields); removeFieldsWithIncompatibleTypes(fields); - includeAndExcludeFields(fields, index); + checkRequiredFieldsArePresent(fields); + + if (fields.isEmpty()) { + throw ExceptionsHelper.badRequestException("No compatible fields could be detected in index {}", Arrays.toString(index)); + } + List sortedFields = new ArrayList<>(fields); // We sort the fields to ensure the checksum for each document is deterministic Collections.sort(sortedFields); - ExtractedFields extractedFields = ExtractedFields.build(sortedFields, Collections.emptySet(), fieldCapabilitiesResponse) - .filterFields(ExtractedField.ExtractionMethod.DOC_VALUE); - if (extractedFields.getAllFields().isEmpty()) { - throw ExceptionsHelper.badRequestException("No compatible fields could be detected in index {}", Arrays.toString(index)); - } + ExtractedFields extractedFields = ExtractedFields.build(sortedFields, Collections.emptySet(), fieldCapabilitiesResponse); if (extractedFields.getDocValueFields().size() > docValueFieldsLimit) { extractedFields = fetchFromSourceIfSupported(extractedFields); if (extractedFields.getDocValueFields().size() > docValueFieldsLimit) { @@ -120,13 +125,25 @@ public class ExtractedFieldsDetector { while (fieldsIterator.hasNext()) { String field = fieldsIterator.next(); Map fieldCaps = fieldCapabilitiesResponse.getField(field); - if (fieldCaps == null || COMPATIBLE_FIELD_TYPES.containsAll(fieldCaps.keySet()) == false) { + if (fieldCaps == null) { + LOGGER.debug("[{}] Removing field [{}] because it is missing from mappings", config.getId(), field); fieldsIterator.remove(); + } else { + Set fieldTypes = fieldCaps.keySet(); + if (NUMERICAL_TYPES.containsAll(fieldTypes)) { + LOGGER.debug("[{}] field [{}] is compatible as it is numerical", config.getId(), field); + } else if (config.getAnalysis().supportsCategoricalFields() && CATEGORICAL_TYPES.containsAll(fieldTypes)) { + LOGGER.debug("[{}] field [{}] is compatible as it is categorical", config.getId(), field); + } else { + LOGGER.debug("[{}] Removing field [{}] because its types are not supported; types {}", + config.getId(), field, fieldTypes); + fieldsIterator.remove(); + } } } } - private void includeAndExcludeFields(Set fields, String[] index) { + private void includeAndExcludeFields(Set fields) { FetchSourceContext analyzedFields = config.getAnalyzedFields(); if (analyzedFields == null) { return; @@ -159,6 +176,16 @@ public class ExtractedFieldsDetector { } } + private void checkRequiredFieldsArePresent(Set fields) { + List missingFields = config.getAnalysis().getRequiredFields() + .stream() + .filter(f -> fields.contains(f) == false) + .collect(Collectors.toList()); + if (missingFields.isEmpty() == false) { + throw ExceptionsHelper.badRequestException("required fields {} are missing", missingFields); + } + } + private ExtractedFields fetchFromSourceIfSupported(ExtractedFields extractedFields) { List adjusted = new ArrayList<>(extractedFields.getAllFields().size()); for (ExtractedField field : extractedFields.getDocValueFields()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessConfig.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessConfig.java index 226498376bb..70a2e213fb6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessConfig.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessConfig.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.analyses.DataFrameAnalysis; import java.io.IOException; import java.util.Objects; +import java.util.Set; public class AnalyticsProcessConfig implements ToXContentObject { @@ -21,21 +22,24 @@ public class AnalyticsProcessConfig implements ToXContentObject { private static final String THREADS = "threads"; private static final String ANALYSIS = "analysis"; private static final String RESULTS_FIELD = "results_field"; + private static final String CATEGORICAL_FIELDS = "categorical_fields"; private final long rows; private final int cols; private final ByteSizeValue memoryLimit; private final int threads; - private final DataFrameAnalysis analysis; private final String resultsField; + private final Set categoricalFields; + private final DataFrameAnalysis analysis; public AnalyticsProcessConfig(long rows, int cols, ByteSizeValue memoryLimit, int threads, String resultsField, - DataFrameAnalysis analysis) { + Set categoricalFields, DataFrameAnalysis analysis) { this.rows = rows; this.cols = cols; this.memoryLimit = Objects.requireNonNull(memoryLimit); this.threads = threads; this.resultsField = Objects.requireNonNull(resultsField); + this.categoricalFields = Objects.requireNonNull(categoricalFields); this.analysis = Objects.requireNonNull(analysis); } @@ -51,6 +55,7 @@ public class AnalyticsProcessConfig implements ToXContentObject { builder.field(MEMORY_LIMIT, memoryLimit.getBytes()); builder.field(THREADS, threads); builder.field(RESULTS_FIELD, resultsField); + builder.field(CATEGORICAL_FIELDS, categoricalFields); builder.field(ANALYSIS, new DataFrameAnalysisWrapper(analysis)); builder.endObject(); return builder; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java index cb000a15496..f04ba577be4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.util.List; import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; @@ -283,8 +284,9 @@ public class AnalyticsProcessManager { private AnalyticsProcessConfig createProcessConfig(DataFrameAnalyticsConfig config, DataFrameDataExtractor dataExtractor) { DataFrameDataExtractor.DataSummary dataSummary = dataExtractor.collectDataSummary(); + Set categoricalFields = dataExtractor.getCategoricalFields(); AnalyticsProcessConfig processConfig = new AnalyticsProcessConfig(dataSummary.rows, dataSummary.cols, - config.getModelMemoryLimit(), 1, config.getDest().getResultsField(), config.getAnalysis()); + config.getModelMemoryLimit(), 1, config.getDest().getResultsField(), categoricalFields, config.getAnalysis()); return processConfig; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java index b3b6d43229e..d3f024e27e8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java @@ -8,13 +8,12 @@ package org.elasticsearch.xpack.ml.rest; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.DeleteExpiredDataAction; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -25,8 +24,7 @@ public class RestDeleteExpiredDataAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteExpiredDataAction.class)); - public RestDeleteExpiredDataAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteExpiredDataAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, MachineLearning.BASE_PATH + "_delete_expired_data", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java index 03c3fb2a39f..b0e564095ec 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -34,8 +33,7 @@ public class RestFindFileStructureAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestFindFileStructureAction.class)); - public RestFindFileStructureAction(Settings settings, RestController controller) { - super(settings); + public RestFindFileStructureAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "find_file_structure", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java index 3ffdf03ed63..1d775d156ed 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlInfoAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -25,8 +24,7 @@ public class RestMlInfoAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestMlInfoAction.class)); - public RestMlInfoAction(Settings settings, RestController controller) { - super(settings); + public RestMlInfoAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "info", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java index af988a5a680..1d8d28a4d44 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -25,8 +24,7 @@ public class RestSetUpgradeModeAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSetUpgradeModeAction.class)); - public RestSetUpgradeModeAction(Settings settings, RestController controller) { - super(settings); + public RestSetUpgradeModeAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "set_upgrade_mode", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java index 2ab68052d8e..95ec620e017 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -26,8 +25,7 @@ public class RestDeleteCalendarAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteCalendarAction.class)); - public RestDeleteCalendarAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteCalendarAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java index 446c642ca0a..da30978461d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarEventAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -27,8 +26,7 @@ public class RestDeleteCalendarEventAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteCalendarEventAction.class)); - public RestDeleteCalendarEventAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteCalendarEventAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events/{" + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java index ca7993d9cf0..213d3f712f9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarJobAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -27,8 +26,7 @@ public class RestDeleteCalendarJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteCalendarJobAction.class)); - public RestDeleteCalendarJobAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteCalendarJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java index bf784b2a927..c299d23ac58 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarEventsAction.java @@ -8,17 +8,16 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.core.ml.action.GetCalendarEventsAction; import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.action.GetCalendarEventsAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -29,8 +28,7 @@ public class RestGetCalendarEventsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetCalendarEventsAction.class)); - public RestGetCalendarEventsAction(Settings settings, RestController controller) { - super(settings); + public RestGetCalendarEventsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java index 35c5bb5720f..46e4d23cd68 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java @@ -9,16 +9,15 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -30,8 +29,7 @@ public class RestGetCalendarsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetCalendarsAction.class)); - public RestGetCalendarsAction(Settings settings, RestController controller) { - super(settings); + public RestGetCalendarsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java index 3c0092b9c22..5ad91ffd92f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPostCalendarEventAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -27,8 +26,7 @@ public class RestPostCalendarEventAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPostCalendarEventAction.class)); - public RestPostCalendarEventAction(Settings settings, RestController controller) { - super(settings); + public RestPostCalendarEventAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java index aa14f302804..553bb81cbf9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -28,8 +27,7 @@ public class RestPutCalendarAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutCalendarAction.class)); - public RestPutCalendarAction(Settings settings, RestController controller) { - super(settings); + public RestPutCalendarAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( PUT, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java index 164218820e4..ce47bc3a53a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarJobAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.calendar; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -27,8 +26,7 @@ public class RestPutCalendarJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutCalendarJobAction.class)); - public RestPutCalendarJobAction(Settings settings, RestController controller) { - super(settings); + public RestPutCalendarJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( PUT, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java index 07dcfd00348..000b92426d8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -27,8 +26,7 @@ public class RestDeleteDatafeedAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteDatafeedAction.class)); - public RestDeleteDatafeedAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteDatafeedAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java index d31594e1642..65866074dfa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedStatsAction.java @@ -9,14 +9,13 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -27,8 +26,7 @@ public class RestGetDatafeedStatsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetDatafeedStatsAction.class)); - public RestGetDatafeedStatsAction(Settings settings, RestController controller) { - super(settings); + public RestGetDatafeedStatsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stats", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java index 31b2256c0c6..1ed7ce33ef8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestGetDatafeedsAction.java @@ -8,14 +8,13 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.GetDatafeedsAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -26,8 +25,7 @@ public class RestGetDatafeedsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetDatafeedsAction.class)); - public RestGetDatafeedsAction(Settings settings, RestController controller) { - super(settings); + public RestGetDatafeedsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java index 9c8b562267f..6088f42f2d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPreviewDatafeedAction.java @@ -8,14 +8,13 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.PreviewDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -26,8 +25,7 @@ public class RestPreviewDatafeedAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPreviewDatafeedAction.class)); - public RestPreviewDatafeedAction(Settings settings, RestController controller) { - super(settings); + public RestPreviewDatafeedAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_preview", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java index fb7e7d25b1d..12e8863f681 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -27,8 +26,7 @@ public class RestPutDatafeedAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutDatafeedAction.class)); - public RestPutDatafeedAction(Settings settings, RestController controller) { - super(settings); + public RestPutDatafeedAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( PUT, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java index 1802158284a..3dbe4ceb100 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedAction.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -35,8 +34,7 @@ public class RestStartDatafeedAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestStartDatafeedAction.class)); - public RestStartDatafeedAction(Settings settings, RestController controller) { - super(settings); + public RestStartDatafeedAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_start", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java index 2eb34d7786d..f1cc37e61a8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStopDatafeedAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -19,10 +18,10 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction.Response; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -33,8 +32,7 @@ public class RestStopDatafeedAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestStopDatafeedAction.class)); - public RestStopDatafeedAction(Settings settings, RestController controller) { - super(settings); + public RestStopDatafeedAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stop", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java index 06c887c7bee..b36eb5e0078 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -27,8 +26,7 @@ public class RestUpdateDatafeedAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestUpdateDatafeedAction.class)); - public RestUpdateDatafeedAction(Settings settings, RestController controller) { - super(settings); + public RestUpdateDatafeedAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_update", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestDeleteDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestDeleteDataFrameAnalyticsAction.java index 31a9ba690a9..26edf340b2b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestDeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestDeleteDataFrameAnalyticsAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.rest.dataframe; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -19,8 +18,7 @@ import java.io.IOException; public class RestDeleteDataFrameAnalyticsAction extends BaseRestHandler { - public RestDeleteDataFrameAnalyticsAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteDataFrameAnalyticsAction(RestController controller) { controller.registerHandler(RestRequest.Method.DELETE, MachineLearning.BASE_PATH + "data_frame/analytics/{" + DataFrameAnalyticsConfig.ID.getPreferredName() + "}", this); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestEvaluateDataFrameAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestEvaluateDataFrameAction.java index 3b514e12838..b991fec70e5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestEvaluateDataFrameAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestEvaluateDataFrameAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.rest.dataframe; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -18,8 +17,7 @@ import java.io.IOException; public class RestEvaluateDataFrameAction extends BaseRestHandler { - public RestEvaluateDataFrameAction(Settings settings, RestController controller) { - super(settings); + public RestEvaluateDataFrameAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "data_frame/_evaluate", this); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestGetDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestGetDataFrameAnalyticsAction.java index b37ff2b7e59..3c959056b2a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestGetDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestGetDataFrameAnalyticsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.dataframe; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -21,8 +20,7 @@ import java.io.IOException; public class RestGetDataFrameAnalyticsAction extends BaseRestHandler { - public RestGetDataFrameAnalyticsAction(Settings settings, RestController controller) { - super(settings); + public RestGetDataFrameAnalyticsAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "data_frame/analytics", this); controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "data_frame/analytics/{" + DataFrameAnalyticsConfig.ID.getPreferredName() + "}", this); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestGetDataFrameAnalyticsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestGetDataFrameAnalyticsStatsAction.java index 3c363762817..44d43153e75 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestGetDataFrameAnalyticsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestGetDataFrameAnalyticsStatsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.dataframe; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -21,8 +20,7 @@ import java.io.IOException; public class RestGetDataFrameAnalyticsStatsAction extends BaseRestHandler { - public RestGetDataFrameAnalyticsStatsAction(Settings settings, RestController controller) { - super(settings); + public RestGetDataFrameAnalyticsStatsAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "data_frame/analytics/_stats", this); controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "data_frame/analytics/{" + DataFrameAnalyticsConfig.ID.getPreferredName() + "}/_stats", this); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java index e2422c6cdeb..11a25e76a6c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.rest.dataframe; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -20,8 +19,7 @@ import java.io.IOException; public class RestPutDataFrameAnalyticsAction extends BaseRestHandler { - public RestPutDataFrameAnalyticsAction(Settings settings, RestController controller) { - super(settings); + public RestPutDataFrameAnalyticsAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, MachineLearning.BASE_PATH + "data_frame/analytics/{" + DataFrameAnalyticsConfig.ID.getPreferredName() + "}", this); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestStartDataFrameAnalyticsAction.java index 7502f31375f..df98e7bc402 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestStartDataFrameAnalyticsAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.rest.dataframe; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -20,8 +19,7 @@ import java.io.IOException; public class RestStartDataFrameAnalyticsAction extends BaseRestHandler { - public RestStartDataFrameAnalyticsAction(Settings settings, RestController controller) { - super(settings); + public RestStartDataFrameAnalyticsAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "data_frame/analytics/{" + DataFrameAnalyticsConfig.ID.getPreferredName() + "}/_start", this); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestStopDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestStopDataFrameAnalyticsAction.java index 65f1d402735..aadc11a659d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestStopDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestStopDataFrameAnalyticsAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.rest.dataframe; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -19,8 +18,7 @@ import java.io.IOException; public class RestStopDataFrameAnalyticsAction extends BaseRestHandler { - public RestStopDataFrameAnalyticsAction(Settings settings, RestController controller) { - super(settings); + public RestStopDataFrameAnalyticsAction(RestController controller) { controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "data_frame/analytics/{" + DataFrameAnalyticsConfig.ID.getPreferredName() + "}/_stop", this); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java index 44ede7e75ec..cee57458661 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.filter; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -26,8 +25,7 @@ public class RestDeleteFilterAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteFilterAction.class)); - public RestDeleteFilterAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteFilterAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, MachineLearning.BASE_PATH + "filters/{" + Request.FILTER_ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java index 13dffb0b953..558da54b7f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestGetFiltersAction.java @@ -9,15 +9,14 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -28,8 +27,7 @@ public class RestGetFiltersAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetFiltersAction.class)); - public RestGetFiltersAction(Settings settings, RestController controller) { - super(settings); + public RestGetFiltersAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java index 098a18cb724..b902c099853 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestPutFilterAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.filter; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -27,8 +26,7 @@ public class RestPutFilterAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutFilterAction.class)); - public RestPutFilterAction(Settings settings, RestController controller) { - super(settings); + public RestPutFilterAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( PUT, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java index 68f1d40d1f6..b9c544f30c2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.filter; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -27,8 +26,7 @@ public class RestUpdateFilterAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestUpdateFilterAction.class)); - public RestUpdateFilterAction(Settings settings, RestController controller) { - super(settings); + public RestUpdateFilterAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java index 2d477778b2d..9e8a2eaf7a3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestCloseJobAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.job; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -28,8 +27,7 @@ public class RestCloseJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestCloseJobAction.class)); - public RestCloseJobAction(Settings settings, RestController controller) { - super(settings); + public RestCloseJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_close", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java index 85d6281377f..61b15f7794c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -28,8 +27,7 @@ public class RestDeleteForecastAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteForecastAction.class)); - public RestDeleteForecastAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteForecastAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java index 68231bb385c..9f8aed81b6b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.job; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -32,8 +31,7 @@ public class RestDeleteJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteJobAction.class)); - public RestDeleteJobAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java index 8503408cd38..a1e6d09285a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestFlushJobAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.job; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.FlushJobAction; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -33,8 +32,7 @@ public class RestFlushJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestFlushJobAction.class)); - public RestFlushJobAction(Settings settings, RestController controller) { - super(settings); + public RestFlushJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_flush", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java index 5fb0bfa5499..12c508dacee 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestForecastJobAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.job; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.ForecastJobAction; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -27,8 +26,7 @@ public class RestForecastJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestForecastJobAction.class)); - public RestForecastJobAction(Settings settings, RestController controller) { - super(settings); + public RestForecastJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_forecast", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java index 7fe908c131a..16b540d04fd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobStatsAction.java @@ -10,14 +10,13 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -28,8 +27,7 @@ public class RestGetJobStatsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetJobStatsAction.class)); - public RestGetJobStatsAction(Settings settings, RestController controller) { - super(settings); + public RestGetJobStatsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_stats", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java index c791dfeeb88..5c9bc484002 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestGetJobsAction.java @@ -10,14 +10,13 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.GetJobsAction; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -28,9 +27,7 @@ public class RestGetJobsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetJobsAction.class)); - public RestGetJobsAction(Settings settings, RestController controller) { - super(settings); - + public RestGetJobsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java index 53aee5130e5..4e6b02049be 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestOpenJobAction.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -32,8 +31,7 @@ public class RestOpenJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestOpenJobAction.class)); - public RestOpenJobAction(Settings settings, RestController controller) { - super(settings); + public RestOpenJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_open", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java index 5de9028acd0..b3e57ca5855 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java @@ -8,14 +8,13 @@ package org.elasticsearch.xpack.ml.rest.job; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.PostDataAction; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -29,8 +28,7 @@ public class RestPostDataAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPostDataAction.class)); - public RestPostDataAction(Settings settings, RestController controller) { - super(settings); + public RestPostDataAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_data", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java index f4e1d7ecfe4..a237a8b1874 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.job; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -27,8 +26,7 @@ public class RestPostJobUpdateAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPostJobUpdateAction.class)); - public RestPostJobUpdateAction(Settings settings, RestController controller) { - super(settings); + public RestPostJobUpdateAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_update", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java index a7afcf15b2d..fe0e29cde49 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.job; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.PutJobAction; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -27,8 +26,7 @@ public class RestPutJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutJobAction.class)); - public RestPutJobAction(Settings settings, RestController controller) { - super(settings); + public RestPutJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( PUT, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java index b4a2de4570a..7ea082f9fd1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.modelsnapshots; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -27,8 +26,7 @@ public class RestDeleteModelSnapshotAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteModelSnapshotAction.class)); - public RestDeleteModelSnapshotAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteModelSnapshotAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java index 75ebd41d46c..2b2ef21442f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.modelsnapshots; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction.Request; -import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.ml.MachineLearning; @@ -40,8 +39,7 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetModelSnapshotsAction.class)); - public RestGetModelSnapshotsAction(Settings settings, RestController controller) { - super(settings); + public RestGetModelSnapshotsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java index 48b89abde6c..33b31af9952 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java @@ -8,15 +8,14 @@ package org.elasticsearch.xpack.ml.rest.modelsnapshots; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -29,8 +28,7 @@ public class RestRevertModelSnapshotAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestRevertModelSnapshotAction.class)); - public RestRevertModelSnapshotAction(Settings settings, RestController controller) { - super(settings); + public RestRevertModelSnapshotAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots/{" + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java index 98990edffd3..e1cd0b2d2f5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java @@ -8,16 +8,15 @@ package org.elasticsearch.xpack.ml.rest.modelsnapshots; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshotField; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -28,8 +27,7 @@ public class RestUpdateModelSnapshotAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestUpdateModelSnapshotAction.class)); - public RestUpdateModelSnapshotAction(Settings settings, RestController controller) { - super(settings); + public RestUpdateModelSnapshotAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java index 754ed08cd65..8bb423199fe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetBucketsAction.java @@ -9,17 +9,16 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.results.Result; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -31,8 +30,7 @@ public class RestGetBucketsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetBucketsAction.class)); - public RestGetBucketsAction(Settings settings, RestController controller) { - super(settings); + public RestGetBucketsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java index 431a04fef36..e2e069efd76 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java @@ -8,17 +8,16 @@ package org.elasticsearch.xpack.ml.rest.results; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.action.GetCategoriesAction; import org.elasticsearch.xpack.core.ml.action.GetCategoriesAction.Request; -import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -30,8 +29,7 @@ public class RestGetCategoriesAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetCategoriesAction.class)); - public RestGetCategoriesAction(Settings settings, RestController controller) { - super(settings); + public RestGetCategoriesAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categories/{" diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java index 2566dcbf705..a1ac9671b3b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetInfluencersAction.java @@ -8,16 +8,15 @@ package org.elasticsearch.xpack.ml.rest.results; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.core.ml.action.GetInfluencersAction; import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.action.GetInfluencersAction; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -29,8 +28,7 @@ public class RestGetInfluencersAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetInfluencersAction.class)); - public RestGetInfluencersAction(Settings settings, RestController controller) { - super(settings); + public RestGetInfluencersAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/influencers", diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java index 49087024012..0b7763a8bf6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java @@ -8,16 +8,15 @@ package org.elasticsearch.xpack.ml.rest.results; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.GetOverallBucketsAction; import org.elasticsearch.xpack.core.ml.action.GetOverallBucketsAction.Request; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -29,8 +28,7 @@ public class RestGetOverallBucketsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetOverallBucketsAction.class)); - public RestGetOverallBucketsAction(Settings settings, RestController controller) { - super(settings); + public RestGetOverallBucketsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/overall_buckets", diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java index b58105f847e..0ce2b5e7099 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetRecordsAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.results; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -29,8 +28,7 @@ public class RestGetRecordsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetRecordsAction.class)); - public RestGetRecordsAction(Settings settings, RestController controller) { - super(settings); + public RestGetRecordsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/records", diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java index c24cb63380a..3cf5f89f919 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateDetectorAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.validate; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -26,8 +25,7 @@ public class RestValidateDetectorAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestValidateDetectorAction.class)); - public RestValidateDetectorAction(Settings settings, RestController controller) { - super(settings); + public RestValidateDetectorAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/_validate/detector", this, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java index 97795ccba48..9f16162d172 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/validate/RestValidateJobConfigAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.rest.validate; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -26,8 +25,7 @@ public class RestValidateJobConfigAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestValidateJobConfigAction.class)); - public RestValidateJobConfigAction(Settings settings, RestController controller) { - super(settings); + public RestValidateJobConfigAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, MachineLearning.BASE_PATH + "anomaly_detectors/_validate", this, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java index 6969c97be0a..87f86a33f99 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; import java.util.Arrays; +import java.util.Collections; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -19,46 +20,51 @@ public class ExtractedFieldTests extends ESTestCase { public void testValueGivenDocValue() { SearchHit hit = new SearchHitBuilder(42).addField("single", "bar").addField("array", Arrays.asList("a", "b")).build(); - ExtractedField single = ExtractedField.newField("single", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField single = ExtractedField.newField("single", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(single.value(hit), equalTo(new String[] { "bar" })); - ExtractedField array = ExtractedField.newField("array", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField array = ExtractedField.newField("array", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(array.value(hit), equalTo(new String[] { "a", "b" })); - ExtractedField missing = ExtractedField.newField("missing", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField missing = ExtractedField.newField("missing",Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(missing.value(hit), equalTo(new Object[0])); } public void testValueGivenScriptField() { SearchHit hit = new SearchHitBuilder(42).addField("single", "bar").addField("array", Arrays.asList("a", "b")).build(); - ExtractedField single = ExtractedField.newField("single", ExtractedField.ExtractionMethod.SCRIPT_FIELD); + ExtractedField single = ExtractedField.newField("single",Collections.emptySet(), + ExtractedField.ExtractionMethod.SCRIPT_FIELD); assertThat(single.value(hit), equalTo(new String[] { "bar" })); - ExtractedField array = ExtractedField.newField("array", ExtractedField.ExtractionMethod.SCRIPT_FIELD); + ExtractedField array = ExtractedField.newField("array", Collections.emptySet(), ExtractedField.ExtractionMethod.SCRIPT_FIELD); assertThat(array.value(hit), equalTo(new String[] { "a", "b" })); - ExtractedField missing = ExtractedField.newField("missing", ExtractedField.ExtractionMethod.SCRIPT_FIELD); + ExtractedField missing = ExtractedField.newField("missing", Collections.emptySet(), ExtractedField.ExtractionMethod.SCRIPT_FIELD); assertThat(missing.value(hit), equalTo(new Object[0])); } public void testValueGivenSource() { SearchHit hit = new SearchHitBuilder(42).setSource("{\"single\":\"bar\",\"array\":[\"a\",\"b\"]}").build(); - ExtractedField single = ExtractedField.newField("single", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField single = ExtractedField.newField("single", Collections.singleton("text"), ExtractedField.ExtractionMethod.SOURCE); assertThat(single.value(hit), equalTo(new String[] { "bar" })); - ExtractedField array = ExtractedField.newField("array", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField array = ExtractedField.newField("array", Collections.singleton("text"), ExtractedField.ExtractionMethod.SOURCE); assertThat(array.value(hit), equalTo(new String[] { "a", "b" })); - ExtractedField missing = ExtractedField.newField("missing", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField missing = ExtractedField.newField("missing", Collections.singleton("text"), ExtractedField.ExtractionMethod.SOURCE); assertThat(missing.value(hit), equalTo(new Object[0])); } public void testValueGivenNestedSource() { SearchHit hit = new SearchHitBuilder(42).setSource("{\"level_1\":{\"level_2\":{\"foo\":\"bar\"}}}").build(); - ExtractedField nested = ExtractedField.newField("alias", "level_1.level_2.foo", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField nested = ExtractedField.newField("alias", "level_1.level_2.foo", Collections.singleton("text"), + ExtractedField.ExtractionMethod.SOURCE); assertThat(nested.value(hit), equalTo(new String[] { "bar" })); } @@ -91,49 +97,54 @@ public class ExtractedFieldTests extends ESTestCase { } public void testValueGivenSourceAndHitWithNoSource() { - ExtractedField missing = ExtractedField.newField("missing", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField missing = ExtractedField.newField("missing", Collections.singleton("text"), ExtractedField.ExtractionMethod.SOURCE); assertThat(missing.value(new SearchHitBuilder(3).build()), equalTo(new Object[0])); } public void testValueGivenMismatchingMethod() { SearchHit hit = new SearchHitBuilder(42).addField("a", 1).setSource("{\"b\":2}").build(); - ExtractedField invalidA = ExtractedField.newField("a", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField invalidA = ExtractedField.newField("a", Collections.singleton("text"), ExtractedField.ExtractionMethod.SOURCE); assertThat(invalidA.value(hit), equalTo(new Object[0])); - ExtractedField validA = ExtractedField.newField("a", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField validA = ExtractedField.newField("a", Collections.singleton("keyword"), ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(validA.value(hit), equalTo(new Integer[] { 1 })); - ExtractedField invalidB = ExtractedField.newField("b", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField invalidB = ExtractedField.newField("b", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(invalidB.value(hit), equalTo(new Object[0])); - ExtractedField validB = ExtractedField.newField("b", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField validB = ExtractedField.newField("b", Collections.singleton("text"), ExtractedField.ExtractionMethod.SOURCE); assertThat(validB.value(hit), equalTo(new Integer[] { 2 })); } public void testValueGivenEmptyHit() { SearchHit hit = new SearchHitBuilder(42).build(); - ExtractedField docValue = ExtractedField.newField("a", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField docValue = ExtractedField.newField("a", Collections.singleton("text"), ExtractedField.ExtractionMethod.SOURCE); assertThat(docValue.value(hit), equalTo(new Object[0])); - ExtractedField sourceField = ExtractedField.newField("b", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField sourceField = ExtractedField.newField("b", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(sourceField.value(hit), equalTo(new Object[0])); } public void testNewTimeFieldGivenSource() { - expectThrows(IllegalArgumentException.class, () -> ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.SOURCE)); + expectThrows(IllegalArgumentException.class, () -> ExtractedField.newTimeField("time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.SOURCE)); } public void testValueGivenStringTimeField() { final long millis = randomLong(); final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build(); - final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + final ExtractedField timeField = ExtractedField.newTimeField("time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(timeField.value(hit), equalTo(new Object[] { millis })); } public void testValueGivenLongTimeField() { final long millis = randomLong(); final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", millis).build(); - final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + final ExtractedField timeField = ExtractedField.newTimeField("time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(timeField.value(hit), equalTo(new Object[] { millis })); } @@ -141,13 +152,15 @@ public class ExtractedFieldTests extends ESTestCase { // Prior to 6.x, timestamps were simply `long` milliseconds-past-the-epoch values final long millis = randomLong(); final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", millis).build(); - final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + final ExtractedField timeField = ExtractedField.newTimeField("time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(timeField.value(hit), equalTo(new Object[] { millis })); } public void testValueGivenUnknownFormatTimeField() { final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new Object()).build(); - final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + final ExtractedField timeField = ExtractedField.newTimeField("time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(expectThrows(IllegalStateException.class, () -> timeField.value(hit)).getMessage(), startsWith("Unexpected value for a time field")); } @@ -155,14 +168,15 @@ public class ExtractedFieldTests extends ESTestCase { public void testAliasVersusName() { SearchHit hit = new SearchHitBuilder(42).addField("a", 1).addField("b", 2).build(); - ExtractedField field = ExtractedField.newField("a", "a", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField field = ExtractedField.newField("a", "a", Collections.singleton("int"), + ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(field.getAlias(), equalTo("a")); assertThat(field.getName(), equalTo("a")); assertThat(field.value(hit), equalTo(new Integer[] { 1 })); hit = new SearchHitBuilder(42).addField("a", 1).addField("b", 2).build(); - field = ExtractedField.newField("a", "b", ExtractedField.ExtractionMethod.DOC_VALUE); + field = ExtractedField.newField("a", "b", Collections.singleton("int"), ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(field.getAlias(), equalTo("a")); assertThat(field.getName(), equalTo("b")); assertThat(field.value(hit), equalTo(new Integer[] { 2 })); @@ -170,11 +184,11 @@ public class ExtractedFieldTests extends ESTestCase { public void testGetDocValueFormat() { for (ExtractedField.ExtractionMethod method : ExtractedField.ExtractionMethod.values()) { - assertThat(ExtractedField.newField("f", method).getDocValueFormat(), equalTo(null)); + assertThat(ExtractedField.newField("f", Collections.emptySet(), method).getDocValueFormat(), equalTo(null)); } - assertThat(ExtractedField.newTimeField("doc_value_time", ExtractedField.ExtractionMethod.DOC_VALUE).getDocValueFormat(), - equalTo("epoch_millis")); - assertThat(ExtractedField.newTimeField("source_time", ExtractedField.ExtractionMethod.SCRIPT_FIELD).getDocValueFormat(), - equalTo("epoch_millis")); + assertThat(ExtractedField.newTimeField("doc_value_time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.DOC_VALUE).getDocValueFormat(), equalTo("epoch_millis")); + assertThat(ExtractedField.newTimeField("source_time", Collections.emptySet(), + ExtractedField.ExtractionMethod.SCRIPT_FIELD).getDocValueFormat(), equalTo("epoch_millis")); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldsTests.java index db25f820dbb..8dd81b47eb8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldsTests.java @@ -27,12 +27,18 @@ import static org.mockito.Mockito.when; public class ExtractedFieldsTests extends ESTestCase { public void testAllTypesOfFields() { - ExtractedField docValue1 = ExtractedField.newField("doc1", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField docValue2 = ExtractedField.newField("doc2", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField scriptField1 = ExtractedField.newField("scripted1", ExtractedField.ExtractionMethod.SCRIPT_FIELD); - ExtractedField scriptField2 = ExtractedField.newField("scripted2", ExtractedField.ExtractionMethod.SCRIPT_FIELD); - ExtractedField sourceField1 = ExtractedField.newField("src1", ExtractedField.ExtractionMethod.SOURCE); - ExtractedField sourceField2 = ExtractedField.newField("src2", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField docValue1 = ExtractedField.newField("doc1", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField docValue2 = ExtractedField.newField("doc2", Collections.singleton("ip"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField scriptField1 = ExtractedField.newField("scripted1", Collections.emptySet(), + ExtractedField.ExtractionMethod.SCRIPT_FIELD); + ExtractedField scriptField2 = ExtractedField.newField("scripted2", Collections.emptySet(), + ExtractedField.ExtractionMethod.SCRIPT_FIELD); + ExtractedField sourceField1 = ExtractedField.newField("src1", Collections.singleton("text"), + ExtractedField.ExtractionMethod.SOURCE); + ExtractedField sourceField2 = ExtractedField.newField("src2", Collections.singleton("text"), + ExtractedField.ExtractionMethod.SOURCE); ExtractedFields extractedFields = new ExtractedFields(Arrays.asList( docValue1, docValue2, scriptField1, scriptField2, sourceField1, sourceField2)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java index 6e7a3740e0a..652eb068783 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java @@ -29,7 +29,8 @@ import static org.mockito.Mockito.when; public class TimeBasedExtractedFieldsTests extends ESTestCase { - private ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + private ExtractedField timeField = ExtractedField.newTimeField("time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.DOC_VALUE); public void testInvalidConstruction() { expectThrows(IllegalArgumentException.class, () -> new TimeBasedExtractedFields(timeField, Collections.emptyList())); @@ -46,12 +47,18 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase { } public void testAllTypesOfFields() { - ExtractedField docValue1 = ExtractedField.newField("doc1", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField docValue2 = ExtractedField.newField("doc2", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField scriptField1 = ExtractedField.newField("scripted1", ExtractedField.ExtractionMethod.SCRIPT_FIELD); - ExtractedField scriptField2 = ExtractedField.newField("scripted2", ExtractedField.ExtractionMethod.SCRIPT_FIELD); - ExtractedField sourceField1 = ExtractedField.newField("src1", ExtractedField.ExtractionMethod.SOURCE); - ExtractedField sourceField2 = ExtractedField.newField("src2", ExtractedField.ExtractionMethod.SOURCE); + ExtractedField docValue1 = ExtractedField.newField("doc1", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField docValue2 = ExtractedField.newField("doc2", Collections.singleton("float"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField scriptField1 = ExtractedField.newField("scripted1", Collections.emptySet(), + ExtractedField.ExtractionMethod.SCRIPT_FIELD); + ExtractedField scriptField2 = ExtractedField.newField("scripted2", Collections.emptySet(), + ExtractedField.ExtractionMethod.SCRIPT_FIELD); + ExtractedField sourceField1 = ExtractedField.newField("src1", Collections.singleton("text"), + ExtractedField.ExtractionMethod.SOURCE); + ExtractedField sourceField2 = ExtractedField.newField("src2", Collections.singleton("text"), + ExtractedField.ExtractionMethod.SOURCE); TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField, docValue1, docValue2, scriptField1, scriptField2, sourceField1, sourceField2)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java index c383cf20b18..bdbe81a66a6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java @@ -135,9 +135,11 @@ public class ScrollDataExtractorTests extends ESTestCase { capturedSearchRequests = new ArrayList<>(); capturedContinueScrollIds = new ArrayList<>(); jobId = "test-job"; - ExtractedField timeField = ExtractedField.newField("time", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField timeField = ExtractedField.newField("time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.DOC_VALUE); extractedFields = new TimeBasedExtractedFields(timeField, - Arrays.asList(timeField, ExtractedField.newField("field_1", ExtractedField.ExtractionMethod.DOC_VALUE))); + Arrays.asList(timeField, ExtractedField.newField("field_1", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE))); indices = Arrays.asList("index-1", "index-2"); query = QueryBuilders.matchAllQuery(); scriptFields = Collections.emptyList(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java index 41a74814461..d2befb407ae 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java @@ -16,16 +16,21 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; +import java.util.Collections; import static org.hamcrest.Matchers.equalTo; public class SearchHitToJsonProcessorTests extends ESTestCase { public void testProcessGivenSingleHit() throws IOException { - ExtractedField timeField = ExtractedField.newField("time", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField missingField = ExtractedField.newField("missing", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField singleField = ExtractedField.newField("single", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField arrayField = ExtractedField.newField("array", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField timeField = ExtractedField.newField("time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField missingField = ExtractedField.newField("missing", Collections.singleton("float"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField singleField = ExtractedField.newField("single", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField arrayField = ExtractedField.newField("array", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField, missingField, singleField, arrayField)); @@ -41,10 +46,14 @@ public class SearchHitToJsonProcessorTests extends ESTestCase { } public void testProcessGivenMultipleHits() throws IOException { - ExtractedField timeField = ExtractedField.newField("time", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField missingField = ExtractedField.newField("missing", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField singleField = ExtractedField.newField("single", ExtractedField.ExtractionMethod.DOC_VALUE); - ExtractedField arrayField = ExtractedField.newField("array", ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField timeField = ExtractedField.newField("time", Collections.singleton("date"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField missingField = ExtractedField.newField("missing", Collections.singleton("float"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField singleField = ExtractedField.newField("single", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); + ExtractedField arrayField = ExtractedField.newField("array", Collections.singleton("keyword"), + ExtractedField.ExtractionMethod.DOC_VALUE); TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField, missingField, singleField, arrayField)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java index ffd53e5576f..b456de7b637 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java @@ -71,8 +71,8 @@ public class DataFrameDataExtractorTests extends ESTestCase { indices = Arrays.asList("index-1", "index-2"); query = QueryBuilders.matchAllQuery(); extractedFields = new ExtractedFields(Arrays.asList( - ExtractedField.newField("field_1", ExtractedField.ExtractionMethod.DOC_VALUE), - ExtractedField.newField("field_2", ExtractedField.ExtractionMethod.DOC_VALUE))); + ExtractedField.newField("field_1", Collections.singleton("keyword"), ExtractedField.ExtractionMethod.DOC_VALUE), + ExtractedField.newField("field_2", Collections.singleton("keyword"), ExtractedField.ExtractionMethod.DOC_VALUE))); scrollSize = 1000; headers = Collections.emptyMap(); @@ -288,8 +288,8 @@ public class DataFrameDataExtractorTests extends ESTestCase { public void testIncludeSourceIsFalseAndAtLeastOneSourceField() throws IOException { extractedFields = new ExtractedFields(Arrays.asList( - ExtractedField.newField("field_1", ExtractedField.ExtractionMethod.DOC_VALUE), - ExtractedField.newField("field_2", ExtractedField.ExtractionMethod.SOURCE))); + ExtractedField.newField("field_1", Collections.singleton("keyword"), ExtractedField.ExtractionMethod.DOC_VALUE), + ExtractedField.newField("field_2", Collections.singleton("text"), ExtractedField.ExtractionMethod.SOURCE))); TestExtractor dataExtractor = createExtractor(false); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java index 1345a1fe128..5f781538bec 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; import org.elasticsearch.xpack.core.ml.dataframe.analyses.OutlierDetection; +import org.elasticsearch.xpack.core.ml.dataframe.analyses.Regression; import org.elasticsearch.xpack.ml.datafeed.extractor.fields.ExtractedField; import org.elasticsearch.xpack.ml.datafeed.extractor.fields.ExtractedFields; @@ -38,11 +39,11 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { private static final String RESULTS_FIELD = "ml"; public void testDetect_GivenFloatField() { - FieldCapabilitiesResponse fieldCapabilities= new MockFieldCapsResponseBuilder() + FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder() .addAggregatableField("some_float", "float").build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), false, 100, fieldCapabilities); ExtractedFields extractedFields = extractedFieldsDetector.detect(); List allFields = extractedFields.getAllFields(); @@ -52,12 +53,12 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { } public void testDetect_GivenNumericFieldWithMultipleTypes() { - FieldCapabilitiesResponse fieldCapabilities= new MockFieldCapsResponseBuilder() + FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder() .addAggregatableField("some_number", "long", "integer", "short", "byte", "double", "float", "half_float", "scaled_float") .build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), false, 100, fieldCapabilities); ExtractedFields extractedFields = extractedFieldsDetector.detect(); List allFields = extractedFields.getAllFields(); @@ -67,36 +68,36 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { } public void testDetect_GivenNonNumericField() { - FieldCapabilitiesResponse fieldCapabilities= new MockFieldCapsResponseBuilder() + FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder() .addAggregatableField("some_keyword", "keyword").build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), false, 100, fieldCapabilities); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> extractedFieldsDetector.detect()); assertThat(e.getMessage(), equalTo("No compatible fields could be detected in index [source_index]")); } - public void testDetect_GivenFieldWithNumericAndNonNumericTypes() { - FieldCapabilitiesResponse fieldCapabilities= new MockFieldCapsResponseBuilder() + public void testDetect_GivenOutlierDetectionAndFieldWithNumericAndNonNumericTypes() { + FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder() .addAggregatableField("indecisive_field", "float", "keyword").build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), false, 100, fieldCapabilities); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> extractedFieldsDetector.detect()); assertThat(e.getMessage(), equalTo("No compatible fields could be detected in index [source_index]")); } - public void testDetect_GivenMultipleFields() { - FieldCapabilitiesResponse fieldCapabilities= new MockFieldCapsResponseBuilder() + public void testDetect_GivenOutlierDetectionAndMultipleFields() { + FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder() .addAggregatableField("some_float", "float") .addAggregatableField("some_long", "long") .addAggregatableField("some_keyword", "keyword") .build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), false, 100, fieldCapabilities); ExtractedFields extractedFields = extractedFieldsDetector.detect(); List allFields = extractedFields.getAllFields(); @@ -107,12 +108,46 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { contains(equalTo(ExtractedField.ExtractionMethod.DOC_VALUE))); } + public void testDetect_GivenRegressionAndMultipleFields() { + FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder() + .addAggregatableField("some_float", "float") + .addAggregatableField("some_long", "long") + .addAggregatableField("some_keyword", "keyword") + .addAggregatableField("foo", "keyword") + .build(); + + ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( + SOURCE_INDEX, buildRegressionConfig("foo"), false, 100, fieldCapabilities); + ExtractedFields extractedFields = extractedFieldsDetector.detect(); + + List allFields = extractedFields.getAllFields(); + assertThat(allFields.size(), equalTo(4)); + assertThat(allFields.stream().map(ExtractedField::getName).collect(Collectors.toList()), + contains("foo", "some_float", "some_keyword", "some_long")); + assertThat(allFields.stream().map(ExtractedField::getExtractionMethod).collect(Collectors.toSet()), + contains(equalTo(ExtractedField.ExtractionMethod.DOC_VALUE))); + } + + public void testDetect_GivenRegressionAndRequiredFieldMissing() { + FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder() + .addAggregatableField("some_float", "float") + .addAggregatableField("some_long", "long") + .addAggregatableField("some_keyword", "keyword") + .build(); + + ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( + SOURCE_INDEX, buildRegressionConfig("foo"), false, 100, fieldCapabilities); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> extractedFieldsDetector.detect()); + + assertThat(e.getMessage(), equalTo("required fields [foo] are missing")); + } + public void testDetect_GivenIgnoredField() { - FieldCapabilitiesResponse fieldCapabilities= new MockFieldCapsResponseBuilder() + FieldCapabilitiesResponse fieldCapabilities = new MockFieldCapsResponseBuilder() .addAggregatableField("_id", "float").build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), false, 100, fieldCapabilities); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> extractedFieldsDetector.detect()); assertThat(e.getMessage(), equalTo("No compatible fields could be detected in index [source_index]")); @@ -134,7 +169,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { FieldCapabilitiesResponse fieldCapabilities = mockFieldCapsResponseBuilder.build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), false, 100, fieldCapabilities); ExtractedFields extractedFields = extractedFieldsDetector.detect(); List extractedFieldNames = extractedFields.getAllFields().stream().map(ExtractedField::getName) @@ -151,7 +186,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { FetchSourceContext desiredFields = new FetchSourceContext(true, new String[]{"your_field1", "my*"}, new String[0]); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(desiredFields), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(desiredFields), false, 100, fieldCapabilities); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> extractedFieldsDetector.detect()); assertThat(e.getMessage(), equalTo("No field [your_field1] could be detected")); @@ -166,7 +201,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { FetchSourceContext desiredFields = new FetchSourceContext(true, new String[0], new String[]{"my_*"}); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(desiredFields), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(desiredFields), false, 100, fieldCapabilities); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> extractedFieldsDetector.detect()); assertThat(e.getMessage(), equalTo("No compatible fields could be detected in index [source_index]")); } @@ -182,7 +217,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { FetchSourceContext desiredFields = new FetchSourceContext(true, new String[]{"your*", "my_*"}, new String[]{"*nope"}); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(desiredFields), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(desiredFields), false, 100, fieldCapabilities); ExtractedFields extractedFields = extractedFieldsDetector.detect(); List extractedFieldNames = extractedFields.getAllFields().stream().map(ExtractedField::getName) @@ -199,7 +234,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { .build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), false, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), false, 100, fieldCapabilities); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> extractedFieldsDetector.detect()); assertThat(e.getMessage(), equalTo("A field that matches the dest.results_field [ml] already exists; " + @@ -215,7 +250,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { .build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), true, 100, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), true, 100, fieldCapabilities); ExtractedFields extractedFields = extractedFieldsDetector.detect(); List extractedFieldNames = extractedFields.getAllFields().stream().map(ExtractedField::getName) @@ -232,7 +267,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { .build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), true, 4, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), true, 4, fieldCapabilities); ExtractedFields extractedFields = extractedFieldsDetector.detect(); List extractedFieldNames = extractedFields.getAllFields().stream().map(ExtractedField::getName) @@ -251,7 +286,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { .build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), true, 3, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), true, 3, fieldCapabilities); ExtractedFields extractedFields = extractedFieldsDetector.detect(); List extractedFieldNames = extractedFields.getAllFields().stream().map(ExtractedField::getName) @@ -270,7 +305,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { .build(); ExtractedFieldsDetector extractedFieldsDetector = new ExtractedFieldsDetector( - SOURCE_INDEX, buildAnalyticsConfig(), true, 2, fieldCapabilities); + SOURCE_INDEX, buildOutlierDetectionConfig(), true, 2, fieldCapabilities); ExtractedFields extractedFields = extractedFieldsDetector.detect(); List extractedFieldNames = extractedFields.getAllFields().stream().map(ExtractedField::getName) @@ -280,11 +315,11 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { contains(equalTo(ExtractedField.ExtractionMethod.SOURCE))); } - private static DataFrameAnalyticsConfig buildAnalyticsConfig() { - return buildAnalyticsConfig(null); + private static DataFrameAnalyticsConfig buildOutlierDetectionConfig() { + return buildOutlierDetectionConfig(null); } - private static DataFrameAnalyticsConfig buildAnalyticsConfig(FetchSourceContext analyzedFields) { + private static DataFrameAnalyticsConfig buildOutlierDetectionConfig(FetchSourceContext analyzedFields) { return new DataFrameAnalyticsConfig.Builder("foo") .setSource(new DataFrameAnalyticsSource(SOURCE_INDEX, null)) .setDest(new DataFrameAnalyticsDest(DEST_INDEX, null)) @@ -293,6 +328,19 @@ public class ExtractedFieldsDetectorTests extends ESTestCase { .build(); } + private static DataFrameAnalyticsConfig buildRegressionConfig(String dependentVariable) { + return buildRegressionConfig(dependentVariable, null); + } + + private static DataFrameAnalyticsConfig buildRegressionConfig(String dependentVariable, FetchSourceContext analyzedFields) { + return new DataFrameAnalyticsConfig.Builder("foo") + .setSource(new DataFrameAnalyticsSource(SOURCE_INDEX, null)) + .setDest(new DataFrameAnalyticsDest(DEST_INDEX, null)) + .setAnalyzedFields(analyzedFields) + .setAnalysis(new Regression(dependentVariable)) + .build(); + } + private static class MockFieldCapsResponseBuilder { private final Map> fieldCaps = new HashMap<>(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedActionTests.java index c83424dbc46..e36b8a6aaa3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestStartDatafeedActionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.datafeeds; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -22,8 +21,8 @@ import static org.mockito.Mockito.mock; public class RestStartDatafeedActionTests extends ESTestCase { public void testPrepareRequest() throws Exception { - RestStartDatafeedAction action = new RestStartDatafeedAction(Settings.EMPTY, - mock(RestController.class)); + RestStartDatafeedAction action = new RestStartDatafeedAction( + mock(RestController.class)); Map params = new HashMap<>(); params.put("start", "not-a-date"); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index b054eca1cc3..5d03a066d75 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -165,7 +165,7 @@ public class Monitoring extends Plugin implements ActionPlugin { if (false == enabled) { return emptyList(); } - return singletonList(new RestMonitoringBulkAction(settings, restController)); + return singletonList(new RestMonitoringBulkAction(restController)); } @Override diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java index 434b0bd9fa0..5cb732d22f0 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -45,8 +44,7 @@ public class RestMonitoringBulkAction extends XPackRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); private final Map> supportedApiVersions; - public RestMonitoringBulkAction(Settings settings, RestController controller) { - super(settings); + public RestMonitoringBulkAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler(POST, "/_monitoring/bulk", this, POST, "/_xpack/monitoring/_bulk", deprecationLogger); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java index 7a4427c9f0f..604f9dd453b 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -48,7 +47,7 @@ public class RestMonitoringBulkActionTests extends ESTestCase { private final RestController controller = mock(RestController.class); - private final RestMonitoringBulkAction action = new RestMonitoringBulkAction(Settings.EMPTY, controller); + private final RestMonitoringBulkAction action = new RestMonitoringBulkAction(controller); public void testGetName() { // Are you sure that you want to change the name? diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index d40dd96770c..956d67c1be7 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -136,14 +136,14 @@ public class Rollup extends Plugin implements ActionPlugin, PersistentTaskPlugin } return Arrays.asList( - new RestRollupSearchAction(settings, restController), - new RestPutRollupJobAction(settings, restController), - new RestStartRollupJobAction(settings, restController), - new RestStopRollupJobAction(settings, restController), - new RestDeleteRollupJobAction(settings, restController), - new RestGetRollupJobsAction(settings, restController), - new RestGetRollupCapsAction(settings, restController), - new RestGetRollupIndexCapsAction(settings, restController) + new RestRollupSearchAction(restController), + new RestPutRollupJobAction(restController), + new RestStartRollupJobAction(restController), + new RestStopRollupJobAction(restController), + new RestDeleteRollupJobAction(restController), + new RestGetRollupJobsAction(restController), + new RestGetRollupCapsAction(restController), + new RestGetRollupIndexCapsAction(restController) ); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java index 40c88b7d00f..2ab2af45068 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestDeleteRollupJobAction.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -18,8 +17,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.DELETE; public class RestDeleteRollupJobAction extends BaseRestHandler { @@ -28,8 +25,7 @@ public class RestDeleteRollupJobAction extends BaseRestHandler { public static final ParseField ID = new ParseField("id"); - public RestDeleteRollupJobAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteRollupJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, "/_rollup/job/{id}", this, @@ -37,7 +33,7 @@ public class RestDeleteRollupJobAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(ID.getPreferredName()); DeleteRollupJobAction.Request request = new DeleteRollupJobAction.Request(id); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java index 37a67d6ed46..9ddd9459425 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupCapsAction.java @@ -10,15 +10,12 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.rollup.action.GetRollupCapsAction; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestGetRollupCapsAction extends BaseRestHandler { @@ -27,8 +24,7 @@ public class RestGetRollupCapsAction extends BaseRestHandler { public static final ParseField ID = new ParseField("id"); - public RestGetRollupCapsAction(Settings settings, RestController controller) { - super(settings); + public RestGetRollupCapsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/_rollup/data/{id}", this, @@ -36,7 +32,7 @@ public class RestGetRollupCapsAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(ID.getPreferredName()); GetRollupCapsAction.Request request = new GetRollupCapsAction.Request(id); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java index 26ec57fad57..a5e2e6ea806 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -28,8 +27,7 @@ public class RestGetRollupIndexCapsAction extends BaseRestHandler { static final ParseField INDEX = new ParseField("index"); - public RestGetRollupIndexCapsAction(Settings settings, RestController controller) { - super(settings); + public RestGetRollupIndexCapsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/{index}/_rollup/data", this, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java index 99500e9214a..8b1327be55b 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupJobsAction.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -25,8 +24,7 @@ public class RestGetRollupJobsAction extends BaseRestHandler { public static final ParseField ID = new ParseField("id"); - public RestGetRollupJobsAction(Settings settings, RestController controller) { - super(settings); + public RestGetRollupJobsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/_rollup/job/{id}", this, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java index 87c7820e688..25612350dde 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -24,8 +23,7 @@ public class RestPutRollupJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutRollupJobAction.class)); - public RestPutRollupJobAction(Settings settings, RestController controller) { - super(settings); + public RestPutRollupJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( PUT, "/_rollup/job/{id}", this, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java index 1d9960e711f..c354e1ed1ed 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -27,8 +26,7 @@ public class RestRollupSearchAction extends BaseRestHandler { RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM))); - public RestRollupSearchAction(Settings settings, RestController controller) { - super(settings); + public RestRollupSearchAction(RestController controller) { controller.registerHandler(RestRequest.Method.GET, "_rollup_search", this); controller.registerHandler(RestRequest.Method.POST, "_rollup_search", this); controller.registerHandler(RestRequest.Method.GET, "{index}/_rollup_search", this); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java index 106b44de8d5..3e9e3ad0fc6 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStartRollupJobAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -17,16 +16,13 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestStartRollupJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestStartRollupJobAction.class)); - public RestStartRollupJobAction(Settings settings, RestController controller) { - super(settings); + public RestStartRollupJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, "/_rollup/job/{id}/_start", this, @@ -34,7 +30,7 @@ public class RestStartRollupJobAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(RollupField.ID.getPreferredName()); StartRollupJobAction.Request request = new StartRollupJobAction.Request(id); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java index 44f88b54147..107adf941b2 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestStopRollupJobAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.rollup.rest; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -24,8 +23,7 @@ public class RestStopRollupJobAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestStopRollupJobAction.class)); - public RestStopRollupJobAction(Settings settings, RestController controller) { - super(settings); + public RestStopRollupJobAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, "/_rollup/job/{id}/_stop", this, diff --git a/x-pack/plugin/security/qa/basic-enable-security/build.gradle b/x-pack/plugin/security/qa/basic-enable-security/build.gradle index 461bc11a9b4..18b5ffec414 100644 --- a/x-pack/plugin/security/qa/basic-enable-security/build.gradle +++ b/x-pack/plugin/security/qa/basic-enable-security/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.testclusters.RestTestRunnerTask + apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' @@ -24,13 +26,11 @@ testClusters.integTest { setting 'xpack.security.enabled', 'false' } -task integTestSecurity(type: Test) { +task integTestSecurity(type: RestTestRunnerTask) { description = "Run tests against a cluster that has security" useCluster testClusters.integTest dependsOn integTest systemProperty 'tests.has_security', 'true' - maxParallelForks = 1 - outputs.cacheIf "Caching of REST tests not implemented yet", { false } doFirst { testClusters.integTest { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 8e5f2daa39c..595c73a1c87 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -820,7 +820,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw new RestDeleteRoleMappingAction(settings, restController, getLicenseState()), new RestGetTokenAction(settings, restController, getLicenseState()), new RestInvalidateTokenAction(settings, restController, getLicenseState()), - new RestGetCertificateInfoAction(settings, restController), + new RestGetCertificateInfoAction(restController), new RestSamlPrepareAuthenticationAction(settings, restController, getLicenseState()), new RestSamlAuthenticateAction(settings, restController, getLicenseState()), new RestSamlLogoutAction(settings, restController, getLicenseState()), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportGetBuiltinPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportGetBuiltinPrivilegesAction.java index e179c4bf250..7f24ad5c4df 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportGetBuiltinPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportGetBuiltinPrivilegesAction.java @@ -14,7 +14,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.privilege.GetBuiltinPrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.GetBuiltinPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.privilege.GetBuiltinPrivilegesResponse; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import java.util.TreeSet; @@ -31,7 +31,7 @@ public class TransportGetBuiltinPrivilegesAction extends HandledTransportAction< @Override protected void doExecute(Task task, GetBuiltinPrivilegesRequest request, ActionListener listener) { - final TreeSet cluster = new TreeSet<>(ClusterPrivilege.names()); + final TreeSet cluster = new TreeSet<>(ClusterPrivilegeResolver.names()); final TreeSet index = new TreeSet<>(IndexPrivilege.names()); listener.onResponse(new GetBuiltinPrivilegesResponse(cluster, index)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index afbe7a1231b..69153379f3b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -54,7 +54,7 @@ import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; @@ -231,7 +231,7 @@ public class AuthorizationService { final TransportRequest request = requestInfo.getRequest(); final String action = requestInfo.getAction(); final AuthorizationEngine authzEngine = getAuthorizationEngine(authentication); - if (ClusterPrivilege.ACTION_MATCHER.test(action)) { + if (ClusterPrivilegeResolver.isClusterAction(action)) { final ActionListener clusterAuthzListener = wrapPreservingContext(new AuthorizationResultListener<>(result -> { putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, IndicesAccessControl.ALLOW_ALL); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index e2824e74eca..df00474f6d6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.transport.TransportActionProxy; @@ -57,7 +56,9 @@ import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.NamedClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.user.User; @@ -360,8 +361,7 @@ public class RBACEngine implements AuthorizationEngine { Map cluster = new HashMap<>(); for (String checkAction : request.clusterPrivileges()) { - final ClusterPrivilege checkPrivilege = ClusterPrivilege.get(Collections.singleton(checkAction)); - cluster.put(checkAction, userRole.grants(checkPrivilege)); + cluster.put(checkAction, userRole.grants(ClusterPrivilegeResolver.resolve(checkAction))); } boolean allMatch = cluster.values().stream().allMatch(Boolean::booleanValue); ResourcePrivilegesMap.Builder combineIndicesResourcePrivileges = ResourcePrivilegesMap.builder(); @@ -412,15 +412,17 @@ public class RBACEngine implements AuthorizationEngine { // We use sorted sets for Strings because they will typically be small, and having a predictable order allows for simpler testing final Set cluster = new TreeSet<>(); - // But we don't have a meaningful ordering for objects like ConditionalClusterPrivilege, so the tests work with "random" ordering - final Set conditionalCluster = new HashSet<>(); - for (Tuple tup : userRole.cluster().privileges()) { - if (tup.v2() == null) { - if (ClusterPrivilege.NONE.equals(tup.v1()) == false) { - cluster.addAll(tup.v1().name()); - } + // But we don't have a meaningful ordering for objects like ConfigurableClusterPrivilege, so the tests work with "random" ordering + final Set conditionalCluster = new HashSet<>(); + for (ClusterPrivilege privilege : userRole.cluster().privileges()) { + if (privilege instanceof NamedClusterPrivilege) { + cluster.add(((NamedClusterPrivilege) privilege).name()); + } else if (privilege instanceof ConfigurableClusterPrivilege) { + conditionalCluster.add((ConfigurableClusterPrivilege) privilege); } else { - conditionalCluster.add(tup.v2()); + throw new IllegalArgumentException( + "found unsupported cluster privilege : " + privilege + + ((privilege != null) ? " of type " + privilege.getClass().getSimpleName() : "")); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index 673dfab5b3b..0b8ba35c766 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -35,7 +35,7 @@ import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDe import org.elasticsearch.xpack.core.security.authz.permission.LimitedRole; import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; @@ -342,7 +342,7 @@ public class CompositeRolesStore { } Set clusterPrivileges = new HashSet<>(); - final List conditionalClusterPrivileges = new ArrayList<>(); + final List configurableClusterPrivileges = new ArrayList<>(); Set runAs = new HashSet<>(); final Map, MergeableIndicesPrivilege> restrictedIndicesPrivilegesMap = new HashMap<>(); final Map, MergeableIndicesPrivilege> indicesPrivilegesMap = new HashMap<>(); @@ -357,7 +357,7 @@ public class CompositeRolesStore { clusterPrivileges.addAll(Arrays.asList(descriptor.getClusterPrivileges())); } if (descriptor.getConditionalClusterPrivileges() != null) { - conditionalClusterPrivileges.addAll(Arrays.asList(descriptor.getConditionalClusterPrivileges())); + configurableClusterPrivileges.addAll(Arrays.asList(descriptor.getConditionalClusterPrivileges())); } if (descriptor.getRunAs() != null) { runAs.addAll(Arrays.asList(descriptor.getRunAs())); @@ -379,7 +379,7 @@ public class CompositeRolesStore { final Privilege runAsPrivilege = runAs.isEmpty() ? Privilege.NONE : new Privilege(runAs, runAs.toArray(Strings.EMPTY_ARRAY)); final Role.Builder builder = Role.builder(roleNames.toArray(new String[roleNames.size()])) - .cluster(clusterPrivileges, conditionalClusterPrivileges) + .cluster(clusterPrivileges, configurableClusterPrivileges) .runAs(runAsPrivilege); indicesPrivilegesMap.entrySet().forEach((entry) -> { MergeableIndicesPrivilege privilege = entry.getValue(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java index 801902d5b96..7136a110bdc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java @@ -32,7 +32,6 @@ public abstract class SecurityBaseRestHandler extends BaseRestHandler { * @param licenseState the license state that will be used to determine if security is licensed */ protected SecurityBaseRestHandler(Settings settings, XPackLicenseState licenseState) { - super(settings); this.settings = settings; this.licenseState = licenseState; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java index b073349d842..c3cd01d7e22 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java @@ -24,8 +24,8 @@ import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequestBuilder; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import org.elasticsearch.xpack.core.security.client.SecurityClient; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; @@ -82,8 +82,8 @@ public class RestGetUserPrivilegesAction extends SecurityBaseRestHandler { builder.field(RoleDescriptor.Fields.CLUSTER.getPreferredName(), response.getClusterPrivileges()); builder.startArray(RoleDescriptor.Fields.GLOBAL.getPreferredName()); - for (ConditionalClusterPrivilege ccp : response.getConditionalClusterPrivileges()) { - ConditionalClusterPrivileges.toXContent(builder, ToXContent.EMPTY_PARAMS, Collections.singleton(ccp)); + for (ConfigurableClusterPrivilege ccp : response.getConditionalClusterPrivileges()) { + ConfigurableClusterPrivileges.toXContent(builder, ToXContent.EMPTY_PARAMS, Collections.singleton(ccp)); } builder.endArray(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index b370c8e2b6b..5b6c712ecc2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -80,10 +80,12 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.XPackLicenseState; @@ -110,13 +112,16 @@ import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; +import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ActionClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; @@ -134,7 +139,6 @@ import org.elasticsearch.xpack.sql.action.SqlQueryRequest; import org.junit.Before; import org.mockito.ArgumentMatcher; import org.mockito.Matchers; -import org.mockito.Mockito; import java.io.IOException; import java.io.UncheckedIOException; @@ -315,15 +319,21 @@ public class AuthorizationServiceTests extends ESTestCase { final DeletePrivilegesRequest request = new DeletePrivilegesRequest(); final Authentication authentication = createAuthentication(new User("user1", "role1")); - final ConditionalClusterPrivilege conditionalClusterPrivilege = Mockito.mock(ConditionalClusterPrivilege.class); - final Predicate requestPredicate = r -> r == request; - Mockito.when(conditionalClusterPrivilege.getRequestPredicate()).thenReturn(requestPredicate); - Mockito.when(conditionalClusterPrivilege.getPrivilege()).thenReturn(ClusterPrivilege.MANAGE_SECURITY); - final ConditionalClusterPrivilege[] conditionalClusterPrivileges = new ConditionalClusterPrivilege[] { - conditionalClusterPrivilege + final ConfigurableClusterPrivilege configurableClusterPrivilege = new MockConfigurableClusterPrivilege() { + @Override + public ClusterPermission.Builder buildPermission(ClusterPermission.Builder builder) { + final Predicate requestPredicate = r -> r == request; + final Predicate actionPredicate = + Automatons.predicate(((ActionClusterPrivilege) ClusterPrivilegeResolver.MANAGE_SECURITY).getAllowedActionPatterns()); + builder.add(this, actionPredicate, requestPredicate); + return builder; + } + }; + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = new ConfigurableClusterPrivilege[] { + configurableClusterPrivilege }; final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); - RoleDescriptor role = new RoleDescriptor("role1", null, null, null, conditionalClusterPrivileges, null, null ,null); + RoleDescriptor role = new RoleDescriptor("role1", null, null, null, configurableClusterPrivileges, null, null ,null); roleMap.put("role1", role); authorize(authentication, DeletePrivilegesAction.NAME, request); @@ -336,15 +346,21 @@ public class AuthorizationServiceTests extends ESTestCase { final DeletePrivilegesRequest request = new DeletePrivilegesRequest(); final Authentication authentication = createAuthentication(new User("user1", "role1")); - final ConditionalClusterPrivilege conditionalClusterPrivilege = Mockito.mock(ConditionalClusterPrivilege.class); - final Predicate requestPredicate = r -> false; - Mockito.when(conditionalClusterPrivilege.getRequestPredicate()).thenReturn(requestPredicate); - Mockito.when(conditionalClusterPrivilege.getPrivilege()).thenReturn(ClusterPrivilege.MANAGE_SECURITY); - final ConditionalClusterPrivilege[] conditionalClusterPrivileges = new ConditionalClusterPrivilege[] { - conditionalClusterPrivilege + final ConfigurableClusterPrivilege configurableClusterPrivilege = new MockConfigurableClusterPrivilege() { + @Override + public ClusterPermission.Builder buildPermission(ClusterPermission.Builder builder) { + final Predicate requestPredicate = r -> false; + final Predicate actionPredicate = + Automatons.predicate(((ActionClusterPrivilege) ClusterPrivilegeResolver.MANAGE_SECURITY).getAllowedActionPatterns()); + builder.add(this, actionPredicate,requestPredicate); + return builder; + } + }; + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = new ConfigurableClusterPrivilege[] { + configurableClusterPrivilege }; final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); - RoleDescriptor role = new RoleDescriptor("role1", null, null, null, conditionalClusterPrivileges, null, null ,null); + RoleDescriptor role = new RoleDescriptor("role1", null, null, null, configurableClusterPrivileges, null, null ,null); roleMap.put("role1", role); assertThrowsAuthorizationException( @@ -1524,4 +1540,25 @@ public class AuthorizationServiceTests extends ESTestCase { return false; } } + + private abstract static class MockConfigurableClusterPrivilege implements ConfigurableClusterPrivilege { + @Override + public Category getCategory() { + return Category.APPLICATION; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder; + } + + @Override + public String getWriteableName() { + return "mock"; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java index 51dba4e4c23..a910900530a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java @@ -19,18 +19,18 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivile import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import java.util.Collections; import java.util.List; import java.util.Set; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.contains; public class AuthorizedIndicesTests extends ESTestCase { @@ -88,7 +88,8 @@ public class AuthorizedIndicesTests extends ESTestCase { } public void testSecurityIndicesAreRemovedFromRegularUser() { - Role role = Role.builder("user_role").add(IndexPrivilege.ALL, "*").cluster(ClusterPrivilege.ALL).build(); + Role role = Role.builder("user_role").add(IndexPrivilege.ALL, "*").cluster(Collections.singleton("all"), Collections.emptySet()) + .build(); List authorizedIndices = RBACEngine.resolveAuthorizedIndicesFromRole(role, SearchAction.NAME, MetaData.EMPTY_META_DATA.getAliasAndIndexLookup()); assertTrue(authorizedIndices.isEmpty()); @@ -97,7 +98,7 @@ public class AuthorizedIndicesTests extends ESTestCase { public void testSecurityIndicesAreRestrictedForDefaultRole() { Role role = Role.builder(randomFrom("user_role", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName())) .add(IndexPrivilege.ALL, "*") - .cluster(ClusterPrivilege.ALL) + .cluster(Collections.singleton("all"), Collections.emptySet()) .build(); Settings indexSettings = Settings.builder().put("index.version.created", Version.CURRENT).build(); final String internalSecurityIndex = randomFrom(RestrictedIndicesNames.INTERNAL_SECURITY_MAIN_INDEX_6, @@ -124,7 +125,7 @@ public class AuthorizedIndicesTests extends ESTestCase { public void testSecurityIndicesAreNotRemovedFromUnrestrictedRole() { Role role = Role.builder(randomAlphaOfLength(8)) .add(FieldPermissions.DEFAULT, null, IndexPrivilege.ALL, true, "*") - .cluster(ClusterPrivilege.ALL) + .cluster(Collections.singleton("all"), Collections.emptySet()) .build(); Settings indexSettings = Settings.builder().put("index.version.created", Version.CURRENT).build(); final String internalSecurityIndex = randomFrom(RestrictedIndicesNames.INTERNAL_SECURITY_MAIN_INDEX_6, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java index 5c2e964c743..5dee561edb6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java @@ -46,11 +46,10 @@ import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges.ManageApplicationPrivileges; -import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges.ManageApplicationPrivileges; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; +import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authz.RBACEngine.RBACAuthorizationInfo; @@ -284,7 +283,7 @@ public class RBACEngineTests extends ESTestCase { Authentication authentication = mock(Authentication.class); when(authentication.getUser()).thenReturn(user); Role role = Role.builder("test2") - .cluster(ClusterPrivilege.MONITOR) + .cluster(Collections.singleton("monitor"), Collections.emptySet()) .add(IndexPrivilege.INDEX, "academy") .add(IndexPrivilege.WRITE, "initiative") .build(); @@ -343,7 +342,7 @@ public class RBACEngineTests extends ESTestCase { Authentication authentication = mock(Authentication.class); when(authentication.getUser()).thenReturn(user); Role role = Role.builder("test3") - .cluster(ClusterPrivilege.MONITOR) + .cluster(Collections.singleton("monitor"), Collections.emptySet()) .build(); RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null); @@ -729,7 +728,7 @@ public class RBACEngineTests extends ESTestCase { Authentication authentication = mock(Authentication.class); when(authentication.getUser()).thenReturn(user); Role role = Role.builder("test-write") - .cluster(ClusterPrivilege.MONITOR) + .cluster(Collections.singleton("monitor"), Collections.emptySet()) .add(IndexPrivilege.READ, "read-*") .add(IndexPrivilege.ALL, "all-*") .addApplicationPrivilege(kibanaRead, Collections.singleton("*")) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java index 0f4b53e9841..37788d82403 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java @@ -22,8 +22,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.hamcrest.Matchers; @@ -72,12 +72,12 @@ public class RoleDescriptorTests extends ESTestCase { .build() }; - final ConditionalClusterPrivilege[] conditionalClusterPrivileges = new ConditionalClusterPrivilege[]{ - new ConditionalClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = new ConfigurableClusterPrivilege[]{ + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) }; RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all", "none" }, groups, applicationPrivileges, - conditionalClusterPrivileges, new String[] { "sudo" }, Collections.emptyMap(), Collections.emptyMap()); + configurableClusterPrivileges, new String[] { "sudo" }, Collections.emptyMap(), Collections.emptyMap()); assertThat(descriptor.toString(), is("Role[name=test, cluster=[all,none]" + ", global=[{APPLICATION:manage:applications=app01,app02}]" + @@ -104,13 +104,13 @@ public class RoleDescriptorTests extends ESTestCase { .resources("*") .build() }; - final ConditionalClusterPrivilege[] conditionalClusterPrivileges = { - new ConditionalClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) }; Map metadata = randomBoolean() ? MetadataUtils.DEFAULT_RESERVED_METADATA : null; RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all", "none" }, groups, applicationPrivileges, - conditionalClusterPrivileges, new String[]{ "sudo" }, metadata, Collections.emptyMap()); + configurableClusterPrivileges, new String[]{ "sudo" }, metadata, Collections.emptyMap()); XContentBuilder builder = descriptor.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS); RoleDescriptor parsed = RoleDescriptor.parse("test", BytesReference.bytes(builder), false, XContentType.JSON); assertThat(parsed, equalTo(descriptor)); @@ -189,10 +189,10 @@ public class RoleDescriptorTests extends ESTestCase { assertThat(rd.getApplicationPrivileges()[1].getApplication(), equalTo("app2")); assertThat(rd.getConditionalClusterPrivileges(), Matchers.arrayWithSize(1)); - final ConditionalClusterPrivilege conditionalPrivilege = rd.getConditionalClusterPrivileges()[0]; - assertThat(conditionalPrivilege.getCategory(), equalTo(ConditionalClusterPrivilege.Category.APPLICATION)); - assertThat(conditionalPrivilege, instanceOf(ConditionalClusterPrivileges.ManageApplicationPrivileges.class)); - assertThat(((ConditionalClusterPrivileges.ManageApplicationPrivileges) conditionalPrivilege).getApplicationNames(), + final ConfigurableClusterPrivilege conditionalPrivilege = rd.getConditionalClusterPrivileges()[0]; + assertThat(conditionalPrivilege.getCategory(), equalTo(ConfigurableClusterPrivilege.Category.APPLICATION)); + assertThat(conditionalPrivilege, instanceOf(ConfigurableClusterPrivileges.ManageApplicationPrivileges.class)); + assertThat(((ConfigurableClusterPrivileges.ManageApplicationPrivileges) conditionalPrivilege).getApplicationNames(), containsInAnyOrder("kibana", "logstash")); q = "{\"applications\": [{\"application\": \"myapp\", \"resources\": [\"*\"], \"privileges\": [\"login\" ]}] }"; @@ -233,13 +233,13 @@ public class RoleDescriptorTests extends ESTestCase { .resources("*") .build() }; - final ConditionalClusterPrivilege[] conditionalClusterPrivileges = { - new ConditionalClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) }; Map metadata = randomBoolean() ? MetadataUtils.DEFAULT_RESERVED_METADATA : null; final RoleDescriptor descriptor = new RoleDescriptor("test", new String[]{"all", "none"}, groups, applicationPrivileges, - conditionalClusterPrivileges, new String[] { "sudo" }, metadata, null); + configurableClusterPrivileges, new String[] { "sudo" }, metadata, null); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin(Settings.EMPTY).getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput(ByteBufferStreamInput.wrap(BytesReference.toBytes(output.bytes())), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 57b172f47f0..4ab525a43da 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -16,9 +16,11 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; @@ -37,16 +39,19 @@ import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; +import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ActionClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; @@ -75,11 +80,11 @@ import java.util.function.Predicate; import static org.elasticsearch.mock.orig.Mockito.times; import static org.elasticsearch.mock.orig.Mockito.verifyNoMoreInteractions; import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anySetOf; import static org.mockito.Matchers.eq; @@ -542,9 +547,15 @@ public class CompositeRolesStoreTests extends ESTestCase { final TransportRequest request2 = mock(TransportRequest.class); final TransportRequest request3 = mock(TransportRequest.class); - ConditionalClusterPrivilege ccp1 = mock(ConditionalClusterPrivilege.class); - when(ccp1.getPrivilege()).thenReturn(ClusterPrivilege.MANAGE_SECURITY); - when(ccp1.getRequestPredicate()).thenReturn(req -> req == request1); + ConfigurableClusterPrivilege ccp1 = new MockConfigurableClusterPrivilege() { + @Override + public ClusterPermission.Builder buildPermission(ClusterPermission.Builder builder) { + Predicate predicate1 = + Automatons.predicate(((ActionClusterPrivilege) ClusterPrivilegeResolver.MANAGE_SECURITY).getAllowedActionPatterns()); + builder.add(this, predicate1, req -> req == request1); + return builder; + } + }; RoleDescriptor role1 = new RoleDescriptor("r1", new String[]{"monitor"}, new IndicesPrivileges[]{ IndicesPrivileges.builder() .indices("abc-*", "xyz-*") @@ -565,12 +576,18 @@ public class CompositeRolesStoreTests extends ESTestCase { .resources("settings/*") .privileges("read") .build() - }, new ConditionalClusterPrivilege[] { ccp1 }, + }, new ConfigurableClusterPrivilege[] { ccp1 }, new String[]{"app-user-1"}, null, null); - ConditionalClusterPrivilege ccp2 = mock(ConditionalClusterPrivilege.class); - when(ccp2.getPrivilege()).thenReturn(ClusterPrivilege.MANAGE_SECURITY); - when(ccp2.getRequestPredicate()).thenReturn(req -> req == request2); + ConfigurableClusterPrivilege ccp2 = new MockConfigurableClusterPrivilege() { + @Override + public ClusterPermission.Builder buildPermission(ClusterPermission.Builder builder) { + Predicate predicate2 = + Automatons.predicate(((ActionClusterPrivilege) ClusterPrivilegeResolver.MANAGE_SECURITY).getAllowedActionPatterns()); + builder.add(this, predicate2, req -> req == request2); + return builder; + } + }; RoleDescriptor role2 = new RoleDescriptor("r2", new String[]{"manage_saml"}, new IndicesPrivileges[]{ IndicesPrivileges.builder() .indices("abc-*", "ind-2-*") @@ -587,7 +604,7 @@ public class CompositeRolesStoreTests extends ESTestCase { .resources("*") .privileges("read") .build() - }, new ConditionalClusterPrivilege[] { ccp2 }, + }, new ConfigurableClusterPrivilege[] { ccp2 }, new String[]{"app-user-2"}, null, null); FieldPermissionsCache cache = new FieldPermissionsCache(Settings.EMPTY); @@ -1076,4 +1093,25 @@ public class CompositeRolesStoreTests extends ESTestCase { listener.onResponse(roleDescriptorsFunc.apply(roles)); } } + + private abstract static class MockConfigurableClusterPrivilege implements ConfigurableClusterPrivilege { + @Override + public Category getCategory() { + return Category.APPLICATION; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder; + } + + @Override + public String getWriteableName() { + return "mock"; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 0763ff65ec5..6555dbd8823 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; import org.elasticsearch.xpack.core.security.authz.permission.IndicesPermission; import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.permission.RunAsPermission; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import java.io.BufferedWriter; @@ -74,7 +74,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role1" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster().privilege(), is(ClusterPrivilege.ALL)); + assertTrue(role.cluster().implies(ClusterPrivilegeResolver.ALL.buildPermission(ClusterPermission.builder()).build())); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(2)); @@ -102,7 +102,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role1.ab" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster().privilege(), is(ClusterPrivilege.ALL)); + assertTrue(role.cluster().implies(ClusterPrivilegeResolver.ALL.buildPermission(ClusterPermission.builder()).build())); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(0)); @@ -114,7 +114,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role2" })); assertThat(role.cluster(), notNullValue()); - assertTrue(Operations.sameLanguage(role.cluster().privilege().getAutomaton(), ClusterPrivilege.ALL.getAutomaton())); + assertTrue(role.cluster().implies(ClusterPrivilegeResolver.ALL.buildPermission(ClusterPermission.builder()).build())); assertThat(role.indices(), notNullValue()); assertThat(role.indices(), is(IndicesPermission.NONE)); assertThat(role.runAs(), is(RunAsPermission.NONE)); @@ -125,7 +125,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role3" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(1)); @@ -149,7 +149,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_run_as" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); assertThat(role.indices(), is(IndicesPermission.NONE)); assertThat(role.runAs(), notNullValue()); assertThat(role.runAs().check("user1"), is(true)); @@ -162,7 +162,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_run_as1" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); assertThat(role.indices(), is(IndicesPermission.NONE)); assertThat(role.runAs(), notNullValue()); assertThat(role.runAs().check("user1"), is(true)); @@ -175,7 +175,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_fields" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); assertThat(role.runAs(), is(RunAsPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); @@ -197,7 +197,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_query" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); assertThat(role.runAs(), is(RunAsPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); @@ -218,7 +218,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_query_fields" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.NONE)); assertThat(role.runAs(), is(RunAsPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java index 41c5c397651..16c9218058f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java @@ -22,8 +22,8 @@ import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import java.util.Arrays; import java.util.Collections; @@ -55,8 +55,8 @@ public class RestGetUserPrivilegesActionTests extends ESTestCase { public void testBuildResponse() throws Exception { final RestGetUserPrivilegesAction.RestListener listener = new RestGetUserPrivilegesAction.RestListener(null); final Set cluster = new LinkedHashSet<>(Arrays.asList("monitor", "manage_ml", "manage_watcher")); - final Set conditionalCluster = Collections.singleton( - new ConditionalClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02")))); + final Set conditionalCluster = Collections.singleton( + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02")))); final Set index = new LinkedHashSet<>(Arrays.asList( new GetUserPrivilegesResponse.Indices(Arrays.asList("index-1", "index-2", "index-3-*"), Arrays.asList("read", "write"), new LinkedHashSet<>(Arrays.asList( diff --git a/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec index 39f9b2965c6..8b837ee00d1 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec @@ -297,3 +297,54 @@ SELECT dep.dep_id, dep.dep_name, first_name, emp_no FROM test_emp WHERE emp_no=1 ---------------+----------------+---------------+--------------- d007 |Sales |Valter |10099 ; + +selectNestedFieldsWithTwoNestedConditions +SELECT CONCAT(CONCAT(first_name, ' '), last_name) AS name, dep.dep_name, dep.dep_id, dep.from_date FROM test_emp WHERE dep.dep_name = 'Production' AND dep.from_date > '1990-01-01' AND first_name IS NOT NULL; + + + name:s | dep.dep_name:s | dep.dep_id:s | dep.from_date:ts +---------------------+------------------+---------------+------------------------ +Parto Bamford |Production |d004 |1995-12-03T00:00:00.000Z +Duangkaew Piveteau |Production |d004 |1996-11-24T00:00:00.000Z +Duangkaew Piveteau |Quality Management|d006 |2000-06-26T00:00:00.000Z +Kazuhide Peha |Production |d004 |1992-07-29T00:00:00.000Z +Mayuko Warwick |Production |d004 |1997-12-30T00:00:00.000Z +Suzette Pettey |Production |d004 |1998-06-14T00:00:00.000Z +Yongqiao Berztiss |Production |d004 |1995-03-20T00:00:00.000Z +Otmar Herbst |Production |d004 |1991-09-18T00:00:00.000Z +Otmar Herbst |Quality Management|d006 |1999-07-08T00:00:00.000Z +Mingsen Casley |Production |d004 |1994-05-21T00:00:00.000Z +Moss Shanbhogue |Production |d004 |1996-11-16T00:00:00.000Z +Hidefumi Caine |Production |d004 |1992-10-15T00:00:00.000Z +Margareta Bierman |Production |d004 |1992-06-14T00:00:00.000Z +Tuval Kalloufi |Production |d004 |1995-12-15T00:00:00.000Z +Kenroku Malabarba |Production |d004 |1994-04-09T00:00:00.000Z +Jayson Mandell |Production |d004 |1999-01-23T00:00:00.000Z +Sreekrishna Servieres|Production |d004 |1985-05-13T00:00:00.000Z +Sreekrishna Servieres|Research |d008 |1992-12-11T00:00:00.000Z +Sreekrishna Servieres|Sales |d007 |1993-05-05T00:00:00.000Z +; + +selectNestedAndRootDocument_WithTwoNestedConditions_AndOneRootCondition +SELECT last_name AS name, dep.dep_name, dep.dep_id, dep.from_date FROM test_emp WHERE dep.dep_name = 'Production' AND dep.from_date > '1990-01-01' AND last_name LIKE 'M%' ORDER BY last_name DESC; + + name:s | dep.dep_name:s | dep.dep_id:s | dep.from_date:ts +---------------+----------------+---------------+------------------------ +Mandell |Production |d004 |1999-01-23T00:00:00.000Z +Malabarba |Production |d004 |1994-04-09T00:00:00.000Z +; + +selectNestedAndRootDocument_WithMultipleConditions_AndNestedSorting +SELECT CONCAT(CONCAT(first_name, ' '), last_name) AS name, dep.dep_name, dep.dep_id, dep.from_date, dep.to_date FROM test_emp WHERE dep.from_date > '1990-01-01' AND dep.dep_name='Production' AND dep.to_date < '2000-01-01' ORDER BY dep.dep_id, dep.from_date, name; + + name:s | dep.dep_name:s | dep.dep_id:s | dep.from_date:ts | dep.to_date:ts +---------------------+------------------+---------------+------------------------+------------------------ +Otmar Herbst |Production |d004 |1991-09-18T00:00:00.000Z|1999-07-08T00:00:00.000Z +Otmar Herbst |Quality Management|d006 |1999-07-08T00:00:00.000Z|9999-01-01T00:00:00.000Z +Kazuhide Peha |Production |d004 |1992-07-29T00:00:00.000Z|9999-01-01T00:00:00.000Z +Kazuhide Peha |Development |d005 |1987-04-03T00:00:00.000Z|1992-07-29T00:00:00.000Z +Sreekrishna Servieres|Production |d004 |1985-05-13T00:00:00.000Z|1989-06-29T00:00:00.000Z +Sreekrishna Servieres|Customer Service |d009 |1989-06-29T00:00:00.000Z|1992-12-11T00:00:00.000Z +Sreekrishna Servieres|Research |d008 |1992-12-11T00:00:00.000Z|1993-05-05T00:00:00.000Z +Sreekrishna Servieres|Sales |d007 |1993-05-05T00:00:00.000Z|1994-02-01T00:00:00.000Z +; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java index 54f60ec6ae1..83503541385 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java @@ -11,10 +11,16 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor; import org.elasticsearch.xpack.sql.session.Cursor; +import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Set; /** @@ -22,6 +28,7 @@ import java.util.Set; */ class SearchHitRowSet extends ResultRowSet { private final SearchHit[] hits; + private final Map> flatInnerHits = new HashMap<>(); private final Cursor cursor; private final Set innerHits = new LinkedHashSet<>(); private final String innerHit; @@ -60,12 +67,13 @@ class SearchHitRowSet extends ResultRowSet { sz = 0; for (SearchHit hit : hits) { + Map innerHitsPerPath = new HashMap<>(innerHits.size()); for (String ih : innerHits) { - SearchHits sh = hit.getInnerHits().get(ih); - if (sh != null) { - sz += sh.getHits().length; - } + SearchHit[] sh = getAllInnerHits(hit, ih); + innerHitsPerPath.put(ih, sh); + sz += sh.length; } + flatInnerHits.put(hit, innerHitsPerPath); } } // page size @@ -102,8 +110,8 @@ class SearchHitRowSet extends ResultRowSet { for (int lvl = 0; lvl <= extractorLevel ; lvl++) { // TODO: add support for multi-nested doc if (hit != null) { - SearchHits innerHits = hit.getInnerHits().get(innerHit); - sh = innerHits == null ? SearchHits.EMPTY : innerHits.getHits(); + SearchHit[] innerHits = flatInnerHits.get(hit).get(innerHit); + sh = innerHits == null ? SearchHits.EMPTY : innerHits; } hit = sh[indexPerLevel[lvl]]; } @@ -111,6 +119,47 @@ class SearchHitRowSet extends ResultRowSet { return e.extract(hit); } + private SearchHit[] getAllInnerHits(SearchHit hit, String path) { + if (hit == null) { + return null; + } + + // multiple inner_hits results sections can match the same nested documents, thus we eliminate the duplicates by + // using the offset as the "deduplicator" in a HashMap + HashMap lhm = new HashMap<>(); + for (Entry entry : hit.getInnerHits().entrySet()) { + int endOfPath = entry.getKey().lastIndexOf('_'); + if (endOfPath >= 0 && entry.getKey().substring(0, endOfPath).equals(path)) { + SearchHit[] h = entry.getValue().getHits(); + for (int i = 0; i < h.length; i++) { + lhm.put(h[i].getNestedIdentity().getOffset(), h[i]); + } + } + } + + // Then sort the resulting List based on the offset of the same inner hit. Each inner_hit match will have an offset value, + // relative to its location in the _source + List sortedList = new ArrayList<>(lhm.values()); + Collections.sort(sortedList, new NestedHitOffsetComparator()); + + return sortedList.toArray(new SearchHit[sortedList.size()]); + } + + private class NestedHitOffsetComparator implements Comparator { + @Override + public int compare(SearchHit sh1, SearchHit sh2) { + if (sh1 == null && sh2 == null) { + return 0; + } else if (sh1 == null) { + return -1; + } else if (sh2 == null) { + return 1; + } + + return Integer.valueOf(sh1.getNestedIdentity().getOffset()).compareTo(Integer.valueOf(sh2.getNestedIdentity().getOffset())); + } + } + @Override protected boolean doHasCurrent() { return row < size; @@ -139,8 +188,8 @@ class SearchHitRowSet extends ResultRowSet { // TODO: improve this for multi-nested responses String path = lvl == 0 ? innerHit : null; if (path != null) { - SearchHits innerHits = h.getInnerHits().get(path); - sh = innerHits == null ? SearchHits.EMPTY : innerHits.getHits(); + SearchHit[] innerHits = flatInnerHits.get(h).get(path); + sh = innerHits == null ? SearchHits.EMPTY : innerHits; } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java index cf4a66131cf..5d02e81dda7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.sql.plugin; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -27,8 +26,7 @@ public class RestSqlClearCursorAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlClearCursorAction.class)); - public RestSqlClearCursorAction(Settings settings, RestController controller) { - super(settings); + public RestSqlClearCursorAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, Protocol.CLEAR_CURSOR_REST_ENDPOINT, this, @@ -42,7 +40,7 @@ public class RestSqlClearCursorAction extends BaseRestHandler { try (XContentParser parser = request.contentParser()) { sqlRequest = SqlClearCursorRequest.fromXContent(parser); } - + return channel -> client.executeLocally(SqlClearCursorAction.INSTANCE, sqlRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index bae5a859484..f1bd8162239 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -38,8 +37,7 @@ public class RestSqlQueryAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlQueryAction.class)); - public RestSqlQueryAction(Settings settings, RestController controller) { - super(settings); + public RestSqlQueryAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, Protocol.SQL_QUERY_REST_ENDPOINT, this, @@ -57,7 +55,7 @@ public class RestSqlQueryAction extends BaseRestHandler { try (XContentParser parser = request.contentOrSourceParamParser()) { sqlRequest = SqlQueryRequest.fromXContent(parser); } - + /* * Since we support {@link TextFormat} and * {@link XContent} outputs we can't use {@link RestToXContentListener} @@ -110,7 +108,7 @@ public class RestSqlQueryAction extends BaseRestHandler { throw new IllegalArgumentException("Invalid use of [columnar] argument: cannot be used in combination with " + "txt, csv or tsv formats"); } - + long startNanos = System.nanoTime(); return channel -> client.execute(SqlQueryAction.INSTANCE, sqlRequest, new RestResponseListener(channel) { @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java index 8f5d75008c9..bc6af60d796 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlStatsAction.java @@ -9,23 +9,19 @@ package org.elasticsearch.xpack.sql.plugin; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.xpack.sql.proto.Protocol; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestSqlStatsAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlStatsAction.class)); - protected RestSqlStatsAction(Settings settings, RestController controller) { - super(settings); + protected RestSqlStatsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, Protocol.SQL_STATS_REST_ENDPOINT, this, @@ -38,7 +34,7 @@ public class RestSqlStatsAction extends BaseRestHandler { } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { SqlStatsRequest request = new SqlStatsRequest(); return channel -> client.execute(SqlStatsAction.INSTANCE, request, new RestActions.NodesResponseRestListener<>(channel)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java index c066b854593..8ac6ef9fc81 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.sql.plugin; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -27,11 +26,10 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; * REST action for translating SQL queries into ES requests */ public class RestSqlTranslateAction extends BaseRestHandler { - + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSqlTranslateAction.class)); - - public RestSqlTranslateAction(Settings settings, RestController controller) { - super(settings); + + public RestSqlTranslateAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, Protocol.SQL_TRANSLATE_REST_ENDPOINT, this, @@ -49,7 +47,7 @@ public class RestSqlTranslateAction extends BaseRestHandler { try (XContentParser parser = request.contentOrSourceParamParser()) { sqlRequest = SqlTranslateRequest.fromXContent(parser); } - + return channel -> client.executeLocally(SqlTranslateAction.INSTANCE, sqlRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java index c80b399d447..4866acfde34 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java @@ -120,10 +120,10 @@ public class SqlPlugin extends Plugin implements ActionPlugin { return emptyList(); } - return Arrays.asList(new RestSqlQueryAction(settings, restController), - new RestSqlTranslateAction(settings, restController), - new RestSqlClearCursorAction(settings, restController), - new RestSqlStatsAction(settings, restController)); + return Arrays.asList(new RestSqlQueryAction(restController), + new RestSqlTranslateAction(restController), + new RestSqlClearCursorAction(restController), + new RestSqlStatsAction(restController)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQuery.java index bb4310d3b91..59703da23ab 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQuery.java @@ -5,13 +5,6 @@ */ package org.elasticsearch.xpack.sql.querydsl.query; -import java.util.AbstractMap; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; @@ -19,19 +12,25 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.NestedSortBuilder; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.tree.Source; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static java.util.Collections.unmodifiableMap; - import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; /** * A query to a nested document. */ public class NestedQuery extends Query { + private static long COUNTER = 0; // TODO: make this configurable private static final int MAX_INNER_HITS = 99; private static final List NO_STORED_FIELD = singletonList(StoredFieldsContext._NONE_); @@ -93,9 +92,14 @@ public class NestedQuery extends Query { if (false == sort.getPath().equals(path)) { return; } + + //TODO: Add all filters in nested sorting when https://github.com/elastic/elasticsearch/issues/33079 is implemented + // Adding multiple filters to sort sections makes sense for nested queries where multiple conditions belong to the same + // nested query. The current functionality creates one nested query for each condition involving a nested field. QueryBuilder childAsBuilder = child.asBuilder(); if (sort.getFilter() != null && false == sort.getFilter().equals(childAsBuilder)) { - throw new SqlIllegalArgumentException("nested query should have been grouped in one place"); + // throw new SqlIllegalArgumentException("nested query should have been grouped in one place"); + return; } sort.setFilter(childAsBuilder); } @@ -109,6 +113,7 @@ public class NestedQuery extends Query { InnerHitBuilder ihb = new InnerHitBuilder(); ihb.setSize(0); ihb.setSize(MAX_INNER_HITS); + ihb.setName(path + "_" + COUNTER++); boolean noSourceNeeded = true; List sourceFields = new ArrayList<>(); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQueryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQueryTests.java index 818ba04fa18..a1d1c7c93f5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQueryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQueryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.sql.querydsl.query; import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.SourceTests; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -122,11 +121,11 @@ public class NestedQueryTests extends ESTestCase { assertEquals(q.child().asBuilder(), sort.getFilter()); q.enrichNestedSort(sort); - // But enriching using another query is not - NestedQuery other = new NestedQuery(SourceTests.randomSource(), q.path(), q.fields(), - randomValueOtherThan(q.child(), () -> randomQuery(0))); - Exception e = expectThrows(SqlIllegalArgumentException.class, () -> other.enrichNestedSort(sort)); - assertEquals("nested query should have been grouped in one place", e.getMessage()); + // But enriching using another query will keep only the first query + Query originalChildQuery = randomValueOtherThan(q.child(), () -> randomQuery(0)); + NestedQuery other = new NestedQuery(SourceTests.randomSource(), q.path(), q.fields(), originalChildQuery); + other.enrichNestedSort(sort); + assertEquals(other.child().asBuilder(), originalChildQuery.asBuilder()); } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml index 881cddc7102..93b69bbc3e2 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml @@ -619,7 +619,7 @@ setup: transform_id: "airline-transform-start-delete" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-delete" } - - match: { transforms.0.task_state: "started" } + - match: { transforms.0.state: "/started|indexing/" } - do: catch: /Cannot delete data frame \[airline-transform-start-delete\] as the task is running/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml index 936294d8fa1..044f5212a99 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml @@ -100,7 +100,7 @@ teardown: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - - match: { transforms.0.task_state: "started" } + - match: { transforms.0.state: "/started|indexing/" } - do: data_frame.stop_data_frame_transform: @@ -113,7 +113,7 @@ teardown: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - - match: { transforms.0.task_state: "stopped" } + - match: { transforms.0.state: "stopped" } - do: data_frame.start_data_frame_transform: @@ -125,7 +125,7 @@ teardown: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - - match: { transforms.0.task_state: "started" } + - match: { transforms.0.state: "/started|indexing/" } --- "Test start/stop/start continuous transform": - do: @@ -157,7 +157,7 @@ teardown: transform_id: "airline-transform-start-stop-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop-continuous" } - - match: { transforms.0.task_state: "started" } + - match: { transforms.0.state: "/started|indexing/" } - do: data_frame.stop_data_frame_transform: @@ -170,7 +170,7 @@ teardown: transform_id: "airline-transform-start-stop-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop-continuous" } - - match: { transforms.0.task_state: "stopped" } + - match: { transforms.0.state: "stopped" } - do: data_frame.start_data_frame_transform: @@ -182,7 +182,7 @@ teardown: transform_id: "airline-transform-start-stop-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop-continuous" } - - match: { transforms.0.task_state: "started" } + - match: { transforms.0.state: "/started|indexing/" } - do: data_frame.stop_data_frame_transform: @@ -244,14 +244,14 @@ teardown: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - - match: { transforms.0.task_state: "started" } + - match: { transforms.0.state: "/started|indexing/" } - do: data_frame.get_data_frame_transform_stats: transform_id: "airline-transform-start-later" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-later" } - - match: { transforms.0.task_state: "stopped" } + - match: { transforms.0.state: "stopped" } - do: data_frame.start_data_frame_transform: @@ -270,7 +270,7 @@ teardown: transform_id: "airline-transform-start-later" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-later" } - - match: { transforms.0.task_state: "started" } + - match: { transforms.0.state: "/started|indexing/" } - do: data_frame.stop_data_frame_transform: @@ -316,8 +316,8 @@ teardown: data_frame.get_data_frame_transform_stats: transform_id: "*" - match: { count: 2 } - - match: { transforms.0.task_state: "stopped" } - - match: { transforms.1.task_state: "stopped" } + - match: { transforms.0.state: "stopped" } + - match: { transforms.1.state: "stopped" } - do: data_frame.delete_data_frame_transform: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml index 8609159f02a..08ab6ce969c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml @@ -47,7 +47,7 @@ teardown: transform_id: "airline-transform-stats" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-stats" } - - match: { transforms.0.task_state: "/started|stopped/" } + - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - lte: { transforms.0.checkpointing.last.checkpoint: 1 } - lte: { transforms.0.stats.pages_processed: 1 } - match: { transforms.0.stats.documents_processed: 0 } @@ -227,7 +227,7 @@ teardown: - match: { count: 1 } - match: { transforms.0.id: "airline-transform-stats-continuous" } # Since this is continuous, there is no worry of it automatically stopping - - match: { transforms.0.task_state: "started" } + - match: { transforms.0.state: "/started|indexing/" } - lte: { transforms.0.checkpointing.last.checkpoint: 1 } # Since this is continuous, and _start does not return until it is assigned # we should see a node assignment diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml index 772c48e5474..253790878c5 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/data_frame_analytics_crud.yml @@ -607,7 +607,11 @@ setup: "dest": { "index": "index-bar_dest" }, - "analysis": {"outlier_detection":{}} + "analysis": { + "regression":{ + "dependent_variable": "to_predict" + } + } } - match: { id: "bar" } @@ -768,7 +772,11 @@ setup: "dest": { "index": "index-bar_dest" }, - "analysis": {"outlier_detection":{}} + "analysis": { + "regression":{ + "dependent_variable": "to_predict" + } + } } - match: { id: "bar" } @@ -930,3 +938,247 @@ setup: xpack.ml.max_model_memory_limit: null - match: {transient: {}} +--- +"Test put regression given dependent_variable is not defined": + + - do: + catch: /parse_exception/ + ml.put_data_frame_analytics: + id: "regression-without-dependent-variable" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": {} + } + } + +--- +"Test put regression given negative lambda": + + - do: + catch: /\[lambda\] must be a non-negative double/ + ml.put_data_frame_analytics: + id: "regression-negative-lambda" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": { + "dependent_variable": "foo", + "lambda": -1.0 + } + } + } + +--- +"Test put regression given negative gamma": + + - do: + catch: /\[gamma\] must be a non-negative double/ + ml.put_data_frame_analytics: + id: "regression-negative-gamma" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": { + "dependent_variable": "foo", + "gamma": -1.0 + } + } + } + +--- +"Test put regression given eta less than 1e-3": + + - do: + catch: /\[eta\] must be a double in \[0.001, 1\]/ + ml.put_data_frame_analytics: + id: "regression-eta-greater-less-than-valid" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": { + "dependent_variable": "foo", + "eta": 0.0009 + } + } + } + +--- +"Test put regression given eta greater than one": + + - do: + catch: /\[eta\] must be a double in \[0.001, 1\]/ + ml.put_data_frame_analytics: + id: "regression-eta-greater-than-one" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": { + "dependent_variable": "foo", + "eta": 1.00001 + } + } + } + +--- +"Test put regression given maximum_number_trees is zero": + + - do: + catch: /\[maximum_number_trees\] must be an integer in \[1, 2000\]/ + ml.put_data_frame_analytics: + id: "regression-maximum-number-trees-is-zero" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": { + "dependent_variable": "foo", + "maximum_number_trees": 0 + } + } + } + +--- +"Test put regression given maximum_number_trees is greater than 2k": + + - do: + catch: /\[maximum_number_trees\] must be an integer in \[1, 2000\]/ + ml.put_data_frame_analytics: + id: "regression-maximum-number-trees-greater-than-2k" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": { + "dependent_variable": "foo", + "maximum_number_trees": 2001 + } + } + } + +--- +"Test put regression given feature_bag_fraction is negative": + + - do: + catch: /\[feature_bag_fraction\] must be a double in \(0, 1\]/ + ml.put_data_frame_analytics: + id: "regression-feature-bag-fraction-is-negative" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": { + "dependent_variable": "foo", + "feature_bag_fraction": -0.0001 + } + } + } + +--- +"Test put regression given feature_bag_fraction is greater than one": + + - do: + catch: /\[feature_bag_fraction\] must be a double in \(0, 1\]/ + ml.put_data_frame_analytics: + id: "regression-feature-bag-fraction-is-greater-than-one" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": { + "dependent_variable": "foo", + "feature_bag_fraction": 1.0001 + } + } + } + +--- +"Test put regression given valid": + + - do: + ml.put_data_frame_analytics: + id: "valid-regression" + body: > + { + "source": { + "index": "index-source" + }, + "dest": { + "index": "index-dest" + }, + "analysis": { + "regression": { + "dependent_variable": "foo", + "lambda": 3.14, + "gamma": 0.42, + "eta": 0.5, + "maximum_number_trees": 400, + "feature_bag_fraction": 0.3 + } + } + } + - match: { id: "valid-regression" } + - match: { source.index: ["index-source"] } + - match: { dest.index: "index-dest" } + - match: { analysis: { + "regression":{ + "dependent_variable": "foo", + "lambda": 3.14, + "gamma": 0.42, + "eta": 0.5, + "maximum_number_trees": 400, + "feature_bag_fraction": 0.3 + } + }} + - is_true: create_time + - is_true: version diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 3712629d018..ee4ebec0b0b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -568,14 +568,14 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa return emptyList(); } return Arrays.asList( - new RestPutWatchAction(settings, restController), - new RestDeleteWatchAction(settings, restController), - new RestWatcherStatsAction(settings, restController), - new RestGetWatchAction(settings, restController), - new RestWatchServiceAction(settings, restController), - new RestAckWatchAction(settings, restController), - new RestActivateWatchAction(settings, restController), - new RestExecuteWatchAction(settings, restController)); + new RestPutWatchAction(restController), + new RestDeleteWatchAction(restController), + new RestWatcherStatsAction(restController), + new RestGetWatchAction(restController), + new RestWatchServiceAction(restController), + new RestAckWatchAction(restController), + new RestActivateWatchAction(restController), + new RestExecuteWatchAction(restController)); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/WatcherRestHandler.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/WatcherRestHandler.java index 3588df514c3..f56baad2e49 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/WatcherRestHandler.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/WatcherRestHandler.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.rest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; @@ -17,10 +16,6 @@ public abstract class WatcherRestHandler extends BaseRestHandler { protected static String URI_BASE = "/_xpack"; - public WatcherRestHandler(Settings settings) { - super(settings); - } - @Override public final RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { return doPrepareRequest(request, new WatcherClient(client)); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java index 23180631902..5fa7c42cb07 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -23,8 +22,6 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRespon import org.elasticsearch.xpack.core.watcher.watch.WatchField; import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; @@ -35,8 +32,7 @@ public class RestAckWatchAction extends WatcherRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestAckWatchAction.class)); - public RestAckWatchAction(Settings settings, RestController controller) { - super(settings); + public RestAckWatchAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, "/_watcher/watch/{id}/_ack", this, @@ -58,7 +54,7 @@ public class RestAckWatchAction extends WatcherRestHandler { } @Override - public RestChannelConsumer doPrepareRequest(RestRequest request, WatcherClient client) throws IOException { + public RestChannelConsumer doPrepareRequest(RestRequest request, WatcherClient client) { AckWatchRequest ackWatchRequest = new AckWatchRequest(request.param("id")); String[] actions = request.paramAsStringArray("actions", null); if (actions != null) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java index 93ce7f1b322..d31b704df10 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -23,8 +22,6 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateW import org.elasticsearch.xpack.core.watcher.watch.WatchField; import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; @@ -35,8 +32,7 @@ public class RestActivateWatchAction extends WatcherRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestActivateWatchAction.class)); - public RestActivateWatchAction(Settings settings, RestController controller) { - super(settings); + public RestActivateWatchAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, "/_watcher/watch/{id}/_activate", this, @@ -45,7 +41,7 @@ public class RestActivateWatchAction extends WatcherRestHandler { PUT, "/_watcher/watch/{id}/_activate", this, PUT, URI_BASE + "/watcher/watch/{id}/_activate", deprecationLogger); - final DeactivateRestHandler deactivateRestHandler = new DeactivateRestHandler(settings); + final DeactivateRestHandler deactivateRestHandler = new DeactivateRestHandler(); controller.registerWithDeprecatedHandler( POST, "/_watcher/watch/{id}/_deactivate", deactivateRestHandler, POST, URI_BASE + "/watcher/watch/{id}/_deactivate", deprecationLogger); @@ -60,7 +56,7 @@ public class RestActivateWatchAction extends WatcherRestHandler { } @Override - public RestChannelConsumer doPrepareRequest(RestRequest request, WatcherClient client) throws IOException { + public RestChannelConsumer doPrepareRequest(RestRequest request, WatcherClient client) { String watchId = request.param("id"); return channel -> client.activateWatch(new ActivateWatchRequest(watchId, true), new RestBuilderListener(channel) { @@ -75,8 +71,7 @@ public class RestActivateWatchAction extends WatcherRestHandler { private static class DeactivateRestHandler extends WatcherRestHandler { - DeactivateRestHandler(Settings settings) { - super(settings); + DeactivateRestHandler() { } @Override @@ -85,7 +80,7 @@ public class RestActivateWatchAction extends WatcherRestHandler { } @Override - public RestChannelConsumer doPrepareRequest(RestRequest request, WatcherClient client) throws IOException { + public RestChannelConsumer doPrepareRequest(RestRequest request, WatcherClient client) { String watchId = request.param("id"); return channel -> client.activateWatch(new ActivateWatchRequest(watchId, false), new RestBuilderListener(channel) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java index 0b0fbc478df..4314462c78d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -21,8 +20,6 @@ import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; -import java.io.IOException; - import static org.elasticsearch.rest.RestRequest.Method.DELETE; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; @@ -31,8 +28,7 @@ public class RestDeleteWatchAction extends WatcherRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestDeleteWatchAction.class)); - public RestDeleteWatchAction(Settings settings, RestController controller) { - super(settings); + public RestDeleteWatchAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( DELETE, "/_watcher/watch/{id}", this, @@ -45,7 +41,7 @@ public class RestDeleteWatchAction extends WatcherRestHandler { } @Override - protected RestChannelConsumer doPrepareRequest(final RestRequest request, WatcherClient client) throws IOException { + protected RestChannelConsumer doPrepareRequest(final RestRequest request, WatcherClient client) { DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(request.param("id")); return channel -> client.deleteWatch(deleteWatchRequest, new RestBuilderListener(channel) { @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java index 2c85dca35c0..cd213a65dfb 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -52,8 +51,7 @@ public class RestExecuteWatchAction extends WatcherRestHandler implements RestRe WatchField.THROTTLE_PERIOD.getPreferredName(), WatchField.THROTTLE_PERIOD_HUMAN.getPreferredName(), WatchField.METADATA.getPreferredName(), WatchField.STATUS.getPreferredName()); - public RestExecuteWatchAction(Settings settings, RestController controller) { - super(settings); + public RestExecuteWatchAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, "/_watcher/watch/{id}/_execute", this, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java index 0d9b1ee6a19..38128259e3a 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestGetWatchAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -29,8 +28,7 @@ public class RestGetWatchAction extends WatcherRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetWatchAction.class)); - public RestGetWatchAction(Settings settings, RestController controller) { - super(settings); + public RestGetWatchAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/_watcher/watch/{id}", this, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java index edbfa3cf1fe..c422907ce17 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; @@ -24,7 +23,6 @@ import org.elasticsearch.xpack.core.security.rest.RestRequestFilter; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; -import java.io.IOException; import java.util.Collections; import java.util.Set; @@ -37,8 +35,7 @@ public class RestPutWatchAction extends WatcherRestHandler implements RestReques private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutWatchAction.class)); - public RestPutWatchAction(Settings settings, RestController controller) { - super(settings); + public RestPutWatchAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, "/_watcher/watch/{id}", this, @@ -54,7 +51,7 @@ public class RestPutWatchAction extends WatcherRestHandler implements RestReques } @Override - protected RestChannelConsumer doPrepareRequest(final RestRequest request, WatcherClient client) throws IOException { + protected RestChannelConsumer doPrepareRequest(final RestRequest request, WatcherClient client) { PutWatchRequest putWatchRequest = new PutWatchRequest(request.param("id"), request.content(), request.getXContentType()); putWatchRequest.setVersion(request.paramAsLong("version", Versions.MATCH_ANY)); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java index d2ec2bd423e..c9f6db4b6e0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatchServiceAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -22,14 +21,13 @@ public class RestWatchServiceAction extends WatcherRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestWatchServiceAction.class)); - public RestWatchServiceAction(Settings settings, RestController controller) { - super(settings); + public RestWatchServiceAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( POST, "/_watcher/_start", this, POST, URI_BASE + "/watcher/_start", deprecationLogger); controller.registerWithDeprecatedHandler( - POST, "/_watcher/_stop", new StopRestHandler(settings), + POST, "/_watcher/_stop", new StopRestHandler(), POST, URI_BASE + "/watcher/_stop", deprecationLogger); } @@ -45,8 +43,7 @@ public class RestWatchServiceAction extends WatcherRestHandler { private static class StopRestHandler extends WatcherRestHandler { - StopRestHandler(Settings settings) { - super(settings); + StopRestHandler() { } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java index 6d2640d66da..1a41f8d951e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions; @@ -18,7 +17,6 @@ import org.elasticsearch.xpack.core.watcher.client.WatcherClient; import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsRequest; import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; -import java.io.IOException; import java.util.Collections; import java.util.Set; @@ -28,8 +26,7 @@ public class RestWatcherStatsAction extends WatcherRestHandler { private static final Logger logger = LogManager.getLogger(RestWatcherStatsAction.class); private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); - public RestWatcherStatsAction(Settings settings, RestController controller) { - super(settings); + public RestWatcherStatsAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( GET, "/_watcher/stats", this, @@ -45,7 +42,7 @@ public class RestWatcherStatsAction extends WatcherRestHandler { } @Override - protected RestChannelConsumer doPrepareRequest(final RestRequest restRequest, WatcherClient client) throws IOException { + protected RestChannelConsumer doPrepareRequest(final RestRequest restRequest, WatcherClient client) { Set metrics = Strings.tokenizeByCommaToSet(restRequest.param("metric", "")); WatcherStatsRequest request = new WatcherStatsRequest(); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchActionTests.java index f71999a068c..89be42ae5b9 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/rest/action/RestExecuteWatchActionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.rest.action; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestController; @@ -39,7 +38,7 @@ public class RestExecuteWatchActionTests extends ESTestCase { ExecuteWatchRequestBuilder builder = new ExecuteWatchRequestBuilder(client); when(watcherClient.prepareExecuteWatch()).thenReturn(builder); - RestExecuteWatchAction restExecuteWatchAction = new RestExecuteWatchAction(Settings.EMPTY, restController); + RestExecuteWatchAction restExecuteWatchAction = new RestExecuteWatchAction(restController); restExecuteWatchAction.doPrepareRequest(createFakeRestRequest(randomId, recordExecution, ignoreCondition, debugCondition), watcherClient); diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java index 4bbef475f99..8b7f9d06bf5 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java @@ -11,15 +11,22 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.WarningFailureException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.upgrades.AbstractFullClusterRestartTestCase; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; import org.elasticsearch.xpack.test.rest.XPackRestTestHelper; import org.junit.Before; @@ -28,12 +35,12 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClusterRestartTestCase { @@ -41,14 +48,23 @@ public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClust private static final String OLD_CLUSTER_JOB_ID = "ml-config-mappings-old-cluster-job"; private static final String NEW_CLUSTER_JOB_ID = "ml-config-mappings-new-cluster-job"; - private static final Map EXPECTED_DATA_FRAME_ANALYSIS_MAPPINGS = - mapOf( - "properties", mapOf( - "outlier_detection", mapOf( - "properties", mapOf( - "method", mapOf("type", "keyword"), - "n_neighbors", mapOf("type", "integer"), - "feature_influence_threshold", mapOf("type", "double"))))); + private static final Map EXPECTED_DATA_FRAME_ANALYSIS_MAPPINGS = getDataFrameAnalysisMappings(); + + @SuppressWarnings("unchecked") + private static Map getDataFrameAnalysisMappings() { + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.startObject(); + ElasticsearchMappings.addDataFrameAnalyticsFields(builder); + builder.endObject(); + + Map asMap = builder.generator().contentType().xContent().createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()).map(); + return (Map) asMap.get(DataFrameAnalyticsConfig.ANALYSIS.getPreferredName()); + } catch (IOException e) { + fail("Failed to initialize expected data frame analysis mappings"); + } + return null; + } @Override protected Settings restClientSettings() { @@ -71,8 +87,8 @@ public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClust // trigger .ml-config index creation createAnomalyDetectorJob(OLD_CLUSTER_JOB_ID); if (getOldClusterVersion().onOrAfter(Version.V_7_3_0)) { - // .ml-config has correct mappings from the start - assertThat(mappingsForDataFrameAnalysis(), is(equalTo(EXPECTED_DATA_FRAME_ANALYSIS_MAPPINGS))); + // .ml-config has mappings for analytics as the feature was introduced in 7.3.0 + assertThat(mappingsForDataFrameAnalysis(), is(notNullValue())); } else { // .ml-config does not yet have correct mappings, it will need an update after cluster is upgraded assertThat(mappingsForDataFrameAnalysis(), is(nullValue())); @@ -125,18 +141,4 @@ public class MlConfigIndexMappingsFullClusterRestartIT extends AbstractFullClust mappings = (Map) XContentMapValues.extractValue(mappings, "properties", "analysis"); return mappings; } - - private static Map mapOf(K k1, V v1) { - Map map = new HashMap<>(); - map.put(k1, v1); - return map; - } - - private static Map mapOf(K k1, V v1, K k2, V v2, K k3, V v3) { - Map map = new HashMap<>(); - map.put(k1, v1); - map.put(k2, v2); - map.put(k3, v3); - return map; - } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java index d4ebbb83a26..a4a5025a139 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsResponse; import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.client.dataframe.transforms.DataFrameTransformStats; -import org.elasticsearch.client.dataframe.transforms.DataFrameTransformTaskState; import org.elasticsearch.client.dataframe.transforms.DestConfig; import org.elasticsearch.client.dataframe.transforms.SourceConfig; import org.elasticsearch.client.dataframe.transforms.TimeSyncConfig; @@ -47,6 +46,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.oneOf; @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/43662") public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { @@ -139,7 +139,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { assertThat(stateAndStats.getIndexerStats().getOutputDocuments(), equalTo((long)ENTITIES.size())); assertThat(stateAndStats.getIndexerStats().getNumDocuments(), equalTo(totalDocsWritten)); - assertThat(stateAndStats.getTaskState(), equalTo(DataFrameTransformTaskState.STARTED)); + assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); } private void verifyContinuousDataFrameHandlesData(long expectedLastCheckpoint) throws Exception { @@ -148,7 +148,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { // if it was assigned to the node that was removed from the cluster assertBusy(() -> { DataFrameTransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); - assertThat(stateAndStats.getTaskState(), equalTo(DataFrameTransformTaskState.STARTED)); + assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); }, 120, TimeUnit.SECONDS); @@ -174,8 +174,8 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { TimeUnit.SECONDS); DataFrameTransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); - assertThat(stateAndStats.getTaskState(), - equalTo(DataFrameTransformTaskState.STARTED)); + assertThat(stateAndStats.getState(), + oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); assertThat(stateAndStats.getIndexerStats().getOutputDocuments(), greaterThan(previousStateAndStats.getIndexerStats().getOutputDocuments())); assertThat(stateAndStats.getIndexerStats().getNumDocuments(), diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml index 4d10ba94850..86a1e6a8daa 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml @@ -29,10 +29,10 @@ transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we - # cannot assert on task_state in the mixed cluster as it could be at the top level or under state + # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we cannot + # assert on state in the mixed cluster as it could be state at the top level or state.task_state # TODO: uncomment this assertion in master - #- match: { transforms.0.task_state: "/started|stopped/" } + #- match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: data_frame.stop_data_frame_transform: @@ -45,10 +45,10 @@ transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we - # cannot assert on task_state in the mixed cluster as it could be at the top level or under state + # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we cannot + # assert on state in the mixed cluster as it could be state at the top level or state.task_state # TODO: uncomment this assertion in master - #- match: { transforms.0.task_state: "stopped" } + #- match: { transforms.0.state: "stopped" } - do: data_frame.put_data_frame_transform: @@ -92,10 +92,10 @@ transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we - # cannot assert on task_state in the mixed cluster as it could be at the top level or under state + # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we cannot + # assert on state in the mixed cluster as it could be state at the top level or state.task_state # TODO: uncomment this assertion in master - #- match: { transforms.0.task_state: "/started|stopped/" } + #- match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: data_frame.stop_data_frame_transform: @@ -108,10 +108,10 @@ transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we - # cannot assert on task_state in the mixed cluster as it could be at the top level or under state + # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we cannot + # assert on state in the mixed cluster as it could be state at the top level or state.task_state # TODO: uncomment this assertion in master - #- match: { transforms.0.task_state: "stopped" } + #- match: { transforms.0.state: "stopped" } --- "Test GET, start, and stop old cluster batch transforms": @@ -143,10 +143,10 @@ transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we - # cannot assert on task_state in the mixed cluster as it could be at the top level or under state + # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we cannot + # assert on state in the mixed cluster as it could be state at the top level or state.task_state # TODO: uncomment this assertion in master - #- match: { transforms.0.task_state: "/started|stopped/" } + #- match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: data_frame.stop_data_frame_transform: @@ -158,10 +158,10 @@ transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we - # cannot assert on task_state in the mixed cluster as it could be at the top level or under state + # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we cannot + # assert on state in the mixed cluster as it could be state at the top level or state.task_state # TODO: uncomment this assertion in master - #- match: { transforms.0.task_state: "stopped" } + #- match: { transforms.0.state: "stopped" } - do: data_frame.get_data_frame_transform: @@ -184,10 +184,10 @@ transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we - # cannot assert on task_state in the mixed cluster as it could be at the top level or under state + # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we cannot + # assert on state in the mixed cluster as it could be state at the top level or state.task_state # TODO: uncomment this assertion in master - #- match: { transforms.0.task_state: "/started|stopped/" } + #- match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: data_frame.stop_data_frame_transform: @@ -199,7 +199,7 @@ transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we - # cannot assert on task_state in the mixed cluster as it could be at the top level or under state + # Since we are breaking the stats format between 7.3 and 7.4 (allowed because we're beta) we cannot + # assert on state in the mixed cluster as it could be state at the top level or state.task_state # TODO: uncomment this assertion in master - #- match: { transforms.0.task_state: "stopped" } + #- match: { transforms.0.state: "stopped" } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml index d16bfe7c436..36df712fc35 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml @@ -27,7 +27,7 @@ setup: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - - match: { transforms.0.task_state: "/started|stopped/" } + - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: data_frame.stop_data_frame_transform: @@ -39,7 +39,7 @@ setup: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - - match: { transforms.0.task_state: "stopped" } + - match: { transforms.0.state: "stopped" } - do: data_frame.get_data_frame_transform: transform_id: "old-complex-transform" @@ -61,7 +61,7 @@ setup: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - - match: { transforms.0.task_state: "/started|stopped/" } + - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: data_frame.stop_data_frame_transform: @@ -73,7 +73,7 @@ setup: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - - match: { transforms.0.task_state: "stopped" } + - match: { transforms.0.state: "stopped" } # Simple and complex Mixed cluster transforms - do: @@ -95,7 +95,7 @@ setup: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - - match: { transforms.0.task_state: "/started|stopped/" } + - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: data_frame.stop_data_frame_transform: @@ -107,7 +107,7 @@ setup: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - - match: { transforms.0.task_state: "stopped" } + - match: { transforms.0.state: "stopped" } - do: data_frame.get_data_frame_transform: @@ -130,7 +130,7 @@ setup: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - - match: { transforms.0.task_state: "/started|stopped/" } + - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: data_frame.stop_data_frame_transform: @@ -142,7 +142,7 @@ setup: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - - match: { transforms.0.task_state: "stopped" } + - match: { transforms.0.state: "stopped" } # Delete all old and mixed transforms - do: diff --git a/x-pack/qa/vagrant/build.gradle b/x-pack/qa/vagrant/build.gradle deleted file mode 100644 index 411b8d90c6d..00000000000 --- a/x-pack/qa/vagrant/build.gradle +++ /dev/null @@ -1,11 +0,0 @@ -apply plugin: 'elasticsearch.vagrantsupport' -apply plugin: 'elasticsearch.vagrant' - -esvagrant { - inheritTestUtils true -} - -dependencies { - // Inherit Bats test utils from :qa:vagrant project - packaging project(path: ':qa:vagrant', configuration: 'packaging') -}