diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 15cdfdd1c52..1461dae5ae1 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -25,6 +25,46 @@ run it using Gradle: ./gradlew run ------------------------------------- +==== Launching and debugging from an IDE + +If you want to run Elasticsearch from your IDE, the `./gradlew run` task +supports a remote debugging option: + +--------------------------------------------------------------------------- +./gradlew run --debug-jvm +--------------------------------------------------------------------------- + +==== Distribution + +By default a node is started with the zip distribution. +In order to start with a different distribution use the `-Drun.distribution` argument. + +To for example start the open source distribution: + +------------------------------------- +./gradlew run -Drun.distribution=oss-zip +------------------------------------- + +==== License type + +By default a node is started with the `basic` license type. +In order to start with a different license type use the `-Drun.license_type` argument. + +In order to start a node with a trial license execute the following command: + +------------------------------------- +./gradlew run -Drun.license_type=trial +------------------------------------- + +This enables security and other paid features and adds a superuser with the username: `elastic-admin` and +password: `elastic-password`. + +==== Other useful arguments + +In order to start a node with a different max heap space add: `-Dtests.heap.size=4G` +In order to disable annotations add: `-Dtests.asserts=false` +In order to set an Elasticsearch setting, provide a setting with the following prefix: `-Dtests.es.` + === Test case filtering. - `tests.class` is a class-filtering shell-like glob pattern, @@ -572,15 +612,6 @@ as its build system. Since the switch to Gradle though, this is no longer possib the code currently used to build Elasticsearch does not allow JaCoCo to recognize its tests. For more information on this, see the discussion in https://github.com/elastic/elasticsearch/issues/28867[issue #28867]. -== Launching and debugging from an IDE - -If you want to run Elasticsearch from your IDE, the `./gradlew run` task -supports a remote debugging option: - ---------------------------------------------------------------------------- -./gradlew run --debug-jvm ---------------------------------------------------------------------------- - == Debugging remotely from an IDE If you want to run Elasticsearch and be able to remotely attach the process diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy deleted file mode 100644 index acb8f57d9d7..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle.plugin - -import org.elasticsearch.gradle.BuildPlugin -import org.elasticsearch.gradle.test.RestTestPlugin -import org.elasticsearch.gradle.test.RunTask -import org.elasticsearch.gradle.test.StandaloneRestTestPlugin -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.file.FileCopyDetails -import org.gradle.api.file.RelativePath -import org.gradle.api.tasks.bundling.Zip - -class MetaPluginBuildPlugin implements Plugin { - - @Override - void apply(Project project) { - project.plugins.apply(StandaloneRestTestPlugin) - project.plugins.apply(RestTestPlugin) - - createBundleTask(project) - boolean isModule = project.path.startsWith(':modules:') || project.path.startsWith(':x-pack:plugin') - - project.integTestCluster { - dependsOn(project.bundlePlugin) - distribution = 'integ-test-zip' - } - BuildPlugin.configurePomGeneration(project) - project.afterEvaluate { - PluginBuildPlugin.addZipPomGeneration(project) - if (isModule) { - if (project.integTestCluster.distribution == 'integ-test-zip') { - project.integTestCluster.module(project) - } - } else { - project.integTestCluster.plugin(project.path) - } - } - - RunTask run = project.tasks.create('run', RunTask) - run.dependsOn(project.bundlePlugin) - if (isModule == false) { - run.clusterConfig.plugin(project.path) - } - } - - private static void createBundleTask(Project project) { - - MetaPluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', MetaPluginPropertiesTask.class) - - // create the actual bundle task, which zips up all the files for the plugin - Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [buildProperties]) { - from(buildProperties.descriptorOutput.parentFile) { - // plugin properties file - include(buildProperties.descriptorOutput.name) - } - // due to how the renames work for each bundled plugin, we must exclude empty dirs or every subdir - // within bundled plugin zips will show up at the root as an empty dir - includeEmptyDirs = false - - } - project.assemble.dependsOn(bundle) - - // also make the zip available as a configuration (used when depending on this project) - project.configurations.create('zip') - project.artifacts.add('zip', bundle) - - // a super hacky way to inject code to run at the end of each of the bundled plugin's configuration - // to add itself back to this meta plugin zip - project.afterEvaluate { - buildProperties.extension.plugins.each { String bundledPluginProjectName -> - Project bundledPluginProject = project.project(bundledPluginProjectName) - bundledPluginProject.afterEvaluate { - String bundledPluginName = bundledPluginProject.esplugin.name - bundle.configure { - dependsOn bundledPluginProject.bundlePlugin - from(project.zipTree(bundledPluginProject.bundlePlugin.outputs.files.singleFile)) { - eachFile { FileCopyDetails details -> - // we want each path to have the plugin name interjected - details.relativePath = new RelativePath(true, bundledPluginName, details.relativePath.toString()) - } - } - } - } - } - } - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesExtension.groovy deleted file mode 100644 index e5d84002e53..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesExtension.groovy +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle.plugin - -import org.gradle.api.Project -import org.gradle.api.tasks.Input - -/** - * A container for meta plugin properties that will be written to the meta plugin descriptor, for easy - * manipulation in the gradle DSL. - */ -class MetaPluginPropertiesExtension { - @Input - String name - - @Input - String description - - /** - * The plugins this meta plugin wraps. - * Note this is not written to the plugin descriptor, but used to setup the final zip file task. - */ - @Input - List plugins - - MetaPluginPropertiesExtension(Project project) { - name = project.name - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesTask.groovy deleted file mode 100644 index e868cc2cc31..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesTask.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle.plugin - -import org.gradle.api.InvalidUserDataException -import org.gradle.api.Task -import org.gradle.api.tasks.Copy -import org.gradle.api.tasks.OutputFile - -class MetaPluginPropertiesTask extends Copy { - - MetaPluginPropertiesExtension extension - - @OutputFile - File descriptorOutput = new File(project.buildDir, 'generated-resources/meta-plugin-descriptor.properties') - - MetaPluginPropertiesTask() { - File templateFile = new File(project.buildDir, "templates/${descriptorOutput.name}") - Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') { - doLast { - InputStream resourceTemplate = PluginPropertiesTask.getResourceAsStream("/${descriptorOutput.name}") - templateFile.parentFile.mkdirs() - templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8') - } - } - - dependsOn(copyPluginPropertiesTemplate) - extension = project.extensions.create('es_meta_plugin', MetaPluginPropertiesExtension, project) - project.afterEvaluate { - // check require properties are set - if (extension.name == null) { - throw new InvalidUserDataException('name is a required setting for es_meta_plugin') - } - if (extension.description == null) { - throw new InvalidUserDataException('description is a required setting for es_meta_plugin') - } - // configure property substitution - from(templateFile.parentFile).include(descriptorOutput.name) - into(descriptorOutput.parentFile) - Map properties = generateSubstitutions() - expand(properties) - inputs.properties(properties) - } - } - - Map generateSubstitutions() { - return ['name': extension.name, - 'description': extension.description - ] - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index b9a38396318..14aa53e4a17 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -24,7 +24,7 @@ import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin + import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.elasticsearch.gradle.plugin.PluginPropertiesExtension import org.gradle.api.AntBuilder @@ -842,19 +842,15 @@ class ClusterFormationTasks { } static void verifyProjectHasBuildPlugin(String name, Version version, Project project, Project pluginProject) { - if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false && pluginProject.plugins.hasPlugin(MetaPluginBuildPlugin) == false) { + if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false) { throw new GradleException("Task [${name}] cannot add plugin [${pluginProject.path}] with version [${version}] to project's " + - "[${project.path}] dependencies: the plugin is not an esplugin or es_meta_plugin") + "[${project.path}] dependencies: the plugin is not an esplugin") } } - /** Find the plugin name in the given project, whether a regular plugin or meta plugin. */ + /** Find the plugin name in the given project. */ static String findPluginName(Project pluginProject) { PluginPropertiesExtension extension = pluginProject.extensions.findByName('esplugin') - if (extension != null) { - return extension.name - } else { - return pluginProject.extensions.findByName('es_meta_plugin').name - } + return extension.name } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy index de52d75c600..5eec829dfa1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy @@ -24,6 +24,7 @@ import org.elasticsearch.gradle.BuildPlugin import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.plugins.JavaBasePlugin +import org.gradle.api.tasks.compile.JavaCompile /** * Configures the build to compile against Elasticsearch's test framework and @@ -49,5 +50,12 @@ public class StandaloneTestPlugin implements Plugin { test.testClassesDir project.sourceSets.test.output.classesDir test.mustRunAfter(project.precommit) project.check.dependsOn(test) + + project.tasks.withType(JavaCompile) { + // This will be the default in Gradle 5.0 + if (options.compilerArgs.contains("-processor") == false) { + options.compilerArgs << '-proc:none' + } + } } } diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.es-meta-plugin.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.es-meta-plugin.properties deleted file mode 100644 index 50240e95416..00000000000 --- a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.es-meta-plugin.properties +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -implementation-class=org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin diff --git a/buildSrc/src/main/resources/meta-plugin-descriptor.properties b/buildSrc/src/main/resources/meta-plugin-descriptor.properties deleted file mode 100644 index 950cb032400..00000000000 --- a/buildSrc/src/main/resources/meta-plugin-descriptor.properties +++ /dev/null @@ -1,20 +0,0 @@ -# Elasticsearch meta plugin descriptor file -# This file must exist as 'meta-plugin-descriptor.properties' inside a plugin. -# -### example meta plugin for "meta-foo" -# -# meta-foo.zip <-- zip file for the meta plugin, with this structure: -# |____ <-- The plugin files for bundled_plugin_1 -# |____ <-- The plugin files for bundled_plugin_2 -# |____ meta-plugin-descriptor.properties <-- example contents below: -# -# description=My meta plugin -# name=meta-foo -# -### mandatory elements for all meta plugins: -# -# 'description': simple summary of the meta plugin -description=${description} -# -# 'name': the meta plugin name -name=${name} \ No newline at end of file diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 89fe9cf330a..a547982e3b6 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.4.0-snapshot-6705632810 +lucene = 7.4.0-snapshot-59f2b7aec2 # optional dependencies spatial4j = 0.7 diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 931447d85d4..88e4a256815 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -642,7 +642,12 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { ResizeRequest resizeRequest = new ResizeRequest("target", "source"); resizeRequest.setResizeType(ResizeType.SHRINK); - Settings targetSettings = Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build(); + Settings targetSettings = + Settings.builder() + .put("index.number_of_shards", 2) + .put("index.number_of_replicas", 0) + .putNull("index.routing.allocation.require._name") + .build(); resizeRequest.setTargetIndex(new CreateIndexRequest("target").settings(targetSettings).alias(new Alias("alias"))); ResizeResponse resizeResponse = highLevelClient().indices().shrink(resizeRequest); assertTrue(resizeResponse.isAcknowledged()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 1dd9834d8f5..38a963fa33c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -1305,7 +1305,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::shrink-index-request-waitForActiveShards // tag::shrink-index-request-settings request.getTargetIndexRequest().settings(Settings.builder() - .put("index.number_of_shards", 2)); // <1> + .put("index.number_of_shards", 2) // <1> + .putNull("index.routing.allocation.require._name")); // <2> // end::shrink-index-request-settings // tag::shrink-index-request-aliases request.getTargetIndexRequest().alias(new Alias("target_alias")); // <1> diff --git a/distribution/build.gradle b/distribution/build.gradle index d2e2810bc7e..940a4152bfd 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -310,12 +310,14 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { task run(type: RunTask) { distribution = System.getProperty('run.distribution', 'zip') if (distribution == 'zip') { - String licenseType = System.getProperty("license_type", "basic") + String licenseType = System.getProperty("run.license_type", "basic") if (licenseType == 'trial') { setting 'xpack.ml.enabled', 'true' setting 'xpack.graph.enabled', 'true' setting 'xpack.watcher.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' + setupCommand 'setupTestAdmin', + 'bin/elasticsearch-users', 'useradd', 'elastic-admin', '-p', 'elastic-password', '-r', 'superuser' } else if (licenseType != 'basic') { throw new IllegalArgumentException("Unsupported self-generated license type: [" + licenseType + "[basic] or [trial].") } diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 71c57f7f101..d6f6e36b8c4 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -87,8 +87,8 @@ import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; *
  • A URL to a plugin zip
  • * * - * Plugins are packaged as zip files. Each packaged plugin must contain a plugin properties file - * or a meta plugin properties file. See {@link PluginInfo} and {@link MetaPluginInfo}, respectively. + * Plugins are packaged as zip files. Each packaged plugin must contain a plugin properties file. + * See {@link PluginInfo}. *

    * The installation process first extracts the plugin files into a temporary * directory in order to verify the plugin satisfies the following requirements: @@ -106,11 +106,6 @@ import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; * files specific to the plugin. The config files be installed into a subdirectory of the * elasticsearch config directory, using the name of the plugin. If any files to be installed * already exist, they will be skipped. - *

    - * If the plugin is a meta plugin, the installation process installs each plugin separately - * inside the meta plugin directory. The {@code bin} and {@code config} directory are also moved - * inside the meta plugin directory. - *

    */ class InstallPluginCommand extends EnvironmentAwareCommand { @@ -550,7 +545,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand { } // checking for existing version of the plugin - private void verifyPluginName(Path pluginPath, String pluginName, Path candidateDir) throws UserException, IOException { + private void verifyPluginName(Path pluginPath, String pluginName) throws UserException, IOException { // don't let user install plugin conflicting with module... // they might be unavoidably in maven central and are packaged up the same way) if (MODULES.contains(pluginName)) { @@ -567,28 +562,10 @@ class InstallPluginCommand extends EnvironmentAwareCommand { pluginName); throw new UserException(PLUGIN_EXISTS, message); } - // checks meta plugins too - try (DirectoryStream stream = Files.newDirectoryStream(pluginPath)) { - for (Path plugin : stream) { - if (candidateDir.equals(plugin.resolve(pluginName))) { - continue; - } - if (MetaPluginInfo.isMetaPlugin(plugin) && Files.exists(plugin.resolve(pluginName))) { - final MetaPluginInfo info = MetaPluginInfo.readFromProperties(plugin); - final String message = String.format( - Locale.ROOT, - "plugin name [%s] already exists in a meta plugin; if you need to update the meta plugin, " + - "uninstall it first using command 'remove %s'", - plugin.resolve(pluginName).toAbsolutePath(), - info.getName()); - throw new UserException(PLUGIN_EXISTS, message); - } - } - } } /** Load information about the plugin, and verify it can be installed with no errors. */ - private PluginInfo loadPluginInfo(Terminal terminal, Path pluginRoot, boolean isBatch, Environment env) throws Exception { + private PluginInfo loadPluginInfo(Terminal terminal, Path pluginRoot, Environment env) throws Exception { final PluginInfo info = PluginInfo.readFromProperties(pluginRoot); if (info.hasNativeController()) { throw new IllegalStateException("plugins can not have native controllers"); @@ -596,7 +573,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand { PluginsService.verifyCompatibility(info); // checking for existing version of the plugin - verifyPluginName(env.pluginsFile(), info.getName(), pluginRoot); + verifyPluginName(env.pluginsFile(), info.getName()); PluginsService.checkForFailedPluginRemovals(env.pluginsFile()); @@ -635,11 +612,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand { List deleteOnFailure = new ArrayList<>(); deleteOnFailure.add(tmpRoot); try { - if (MetaPluginInfo.isMetaPlugin(tmpRoot)) { - installMetaPlugin(terminal, isBatch, tmpRoot, env, deleteOnFailure); - } else { - installPlugin(terminal, isBatch, tmpRoot, env, deleteOnFailure); - } + installPlugin(terminal, isBatch, tmpRoot, env, deleteOnFailure); } catch (Exception installProblem) { try { IOUtils.rm(deleteOnFailure.toArray(new Path[0])); @@ -650,71 +623,13 @@ class InstallPluginCommand extends EnvironmentAwareCommand { } } - /** - * Installs the meta plugin and all the bundled plugins from {@code tmpRoot} into the plugins dir. - * If a bundled plugin has a bin dir and/or a config dir, those are copied. - */ - private void installMetaPlugin(Terminal terminal, boolean isBatch, Path tmpRoot, - Environment env, List deleteOnFailure) throws Exception { - final MetaPluginInfo metaInfo = MetaPluginInfo.readFromProperties(tmpRoot); - verifyPluginName(env.pluginsFile(), metaInfo.getName(), tmpRoot); - - final Path destination = env.pluginsFile().resolve(metaInfo.getName()); - deleteOnFailure.add(destination); - terminal.println(VERBOSE, metaInfo.toString()); - - final List pluginPaths = new ArrayList<>(); - try (DirectoryStream paths = Files.newDirectoryStream(tmpRoot)) { - // Extract bundled plugins path and validate plugin names - for (Path plugin : paths) { - if (MetaPluginInfo.isPropertiesFile(plugin)) { - continue; - } - final PluginInfo info = PluginInfo.readFromProperties(plugin); - PluginsService.verifyCompatibility(info); - verifyPluginName(env.pluginsFile(), info.getName(), plugin); - pluginPaths.add(plugin); - } - } - - // read optional security policy from each bundled plugin, and confirm all exceptions one time with user - - Set permissions = new HashSet<>(); - final List pluginInfos = new ArrayList<>(); - for (Path plugin : pluginPaths) { - final PluginInfo info = loadPluginInfo(terminal, plugin, isBatch, env); - pluginInfos.add(info); - - Path policy = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY); - if (Files.exists(policy)) { - permissions.addAll(PluginSecurity.parsePermissions(policy, env.tmpFile())); - } - } - PluginSecurity.confirmPolicyExceptions(terminal, permissions, isBatch); - - // move support files and rename as needed to prepare the exploded plugin for its final location - for (int i = 0; i < pluginPaths.size(); ++i) { - Path pluginPath = pluginPaths.get(i); - PluginInfo info = pluginInfos.get(i); - installPluginSupportFiles(info, pluginPath, env.binFile().resolve(metaInfo.getName()), - env.configFile().resolve(metaInfo.getName()), deleteOnFailure); - // ensure the plugin dir within the tmpRoot has the correct name - if (pluginPath.getFileName().toString().equals(info.getName()) == false) { - Files.move(pluginPath, pluginPath.getParent().resolve(info.getName()), StandardCopyOption.ATOMIC_MOVE); - } - } - movePlugin(tmpRoot, destination); - String[] plugins = pluginInfos.stream().map(PluginInfo::getName).toArray(String[]::new); - terminal.println("-> Installed " + metaInfo.getName() + " with: " + Strings.arrayToCommaDelimitedString(plugins)); - } - /** * Installs the plugin from {@code tmpRoot} into the plugins dir. * If the plugin has a bin dir and/or a config dir, those are moved. */ private void installPlugin(Terminal terminal, boolean isBatch, Path tmpRoot, Environment env, List deleteOnFailure) throws Exception { - final PluginInfo info = loadPluginInfo(terminal, tmpRoot, isBatch, env); + final PluginInfo info = loadPluginInfo(terminal, tmpRoot, env); // read optional security policy (extra permissions), if it exists, confirm or warn the user Path policy = tmpRoot.resolve(PluginInfo.ES_PLUGIN_POLICY); final Set permissions; diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index fb73554c2b1..6015d9da143 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -61,25 +61,7 @@ class ListPluginsCommand extends EnvironmentAwareCommand { } Collections.sort(plugins); for (final Path plugin : plugins) { - if (MetaPluginInfo.isMetaPlugin(plugin)) { - MetaPluginInfo metaInfo = MetaPluginInfo.readFromProperties(plugin); - List subPluginPaths = new ArrayList<>(); - try (DirectoryStream subPaths = Files.newDirectoryStream(plugin)) { - for (Path subPlugin : subPaths) { - if (MetaPluginInfo.isPropertiesFile(subPlugin)) { - continue; - } - subPluginPaths.add(subPlugin); - } - } - Collections.sort(subPluginPaths); - terminal.println(Terminal.Verbosity.SILENT, metaInfo.getName()); - for (Path subPlugin : subPluginPaths) { - printPlugin(env, terminal, subPlugin, "\t"); - } - } else { - printPlugin(env, terminal, plugin, ""); - } + printPlugin(env, terminal, plugin, ""); } } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 5931e66cb9a..bfeb3c0279b 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -219,18 +219,6 @@ public class InstallPluginCommandTests extends ESTestCase { return createPlugin(name, structure, additionalProps).toUri().toURL().toString(); } - /** creates an meta plugin .zip and returns the url for testing */ - static String createMetaPluginUrl(String name, Path structure) throws IOException { - return createMetaPlugin(name, structure).toUri().toURL().toString(); - } - - static void writeMetaPlugin(String name, Path structure) throws IOException { - PluginTestUtil.writeMetaPluginProperties(structure, - "description", "fake desc", - "name", name - ); - } - static void writePlugin(String name, Path structure, String... additionalProps) throws IOException { String[] properties = Stream.concat(Stream.of( "description", "fake desc", @@ -261,11 +249,6 @@ public class InstallPluginCommandTests extends ESTestCase { return writeZip(structure, null); } - static Path createMetaPlugin(String name, Path structure) throws IOException { - writeMetaPlugin(name, structure); - return writeZip(structure, null); - } - void installPlugin(String pluginUrl, Path home) throws Exception { installPlugin(pluginUrl, home, skipJarHellCommand); } @@ -275,11 +258,6 @@ public class InstallPluginCommandTests extends ESTestCase { command.execute(terminal, pluginUrl, false, env); } - void assertMetaPlugin(String metaPlugin, String name, Path original, Environment env) throws IOException { - assertPluginInternal(name, env.pluginsFile().resolve(metaPlugin)); - assertConfigAndBin(metaPlugin, original, env); - } - void assertPlugin(String name, Path original, Environment env) throws IOException { assertPluginInternal(name, env.pluginsFile()); assertConfigAndBin(name, original, env); @@ -388,23 +366,9 @@ public class InstallPluginCommandTests extends ESTestCase { assertPlugin("fake", pluginDir, env.v2()); } - public void testWithMetaPlugin() throws Exception { - Tuple env = createEnv(fs, temp); - Path pluginDir = createPluginDir(temp); - Files.createDirectory(pluginDir.resolve("fake1")); - writePlugin("fake1", pluginDir.resolve("fake1")); - Files.createDirectory(pluginDir.resolve("fake2")); - writePlugin("fake2", pluginDir.resolve("fake2")); - String pluginZip = createMetaPluginUrl("my_plugins", pluginDir); - installPlugin(pluginZip, env.v1()); - assertMetaPlugin("my_plugins", "fake1", pluginDir, env.v2()); - assertMetaPlugin("my_plugins", "fake2", pluginDir, env.v2()); - } - public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); + Path pluginDir = createPluginDir(temp); String pluginZip = createPluginUrl("fake", pluginDir); final Path removing = env.v2().pluginsFile().resolve(".removing-failed"); Files.createDirectory(removing); @@ -414,11 +378,6 @@ public class InstallPluginCommandTests extends ESTestCase { "found file [%s] from a failed attempt to remove the plugin [failed]; execute [elasticsearch-plugin remove failed]", removing); assertThat(e, hasToString(containsString(expected))); - - // test with meta plugin - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - final IllegalStateException e1 = expectThrows(IllegalStateException.class, () -> installPlugin(metaZip, env.v1())); - assertThat(e1, hasToString(containsString(expected))); } public void testSpaceInUrl() throws Exception { @@ -500,23 +459,6 @@ public class InstallPluginCommandTests extends ESTestCase { assertInstallCleaned(environment.v2()); } - public void testJarHellInMetaPlugin() throws Exception { - // jar hell test needs a real filesystem - assumeTrue("real filesystem", isReal); - Tuple environment = createEnv(fs, temp); - Path pluginDir = createPluginDir(temp); - Files.createDirectory(pluginDir.resolve("fake1")); - writePlugin("fake1", pluginDir.resolve("fake1")); - Files.createDirectory(pluginDir.resolve("fake2")); - writePlugin("fake2", pluginDir.resolve("fake2")); // adds plugin.jar with Fake2Plugin - writeJar(pluginDir.resolve("fake2").resolve("other.jar"), "Fake2Plugin"); - String pluginZip = createMetaPluginUrl("my_plugins", pluginDir); - IllegalStateException e = expectThrows(IllegalStateException.class, - () -> installPlugin(pluginZip, environment.v1(), defaultCommand)); - assertTrue(e.getMessage(), e.getMessage().contains("jar hell")); - assertInstallCleaned(environment.v2()); - } - public void testIsolatedPlugins() throws Exception { Tuple env = createEnv(fs, temp); // these both share the same FakePlugin class @@ -540,23 +482,6 @@ public class InstallPluginCommandTests extends ESTestCase { assertInstallCleaned(env.v2()); } - public void testExistingMetaPlugin() throws Exception { - Tuple env = createEnv(fs, temp); - Path metaZip = createPluginDir(temp); - Path pluginDir = metaZip.resolve("fake"); - Files.createDirectory(pluginDir); - String pluginZip = createPluginUrl("fake", pluginDir); - installPlugin(pluginZip, env.v1()); - UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("already exists")); - assertInstallCleaned(env.v2()); - - String anotherZip = createMetaPluginUrl("another_plugins", metaZip); - e = expectThrows(UserException.class, () -> installPlugin(anotherZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("already exists")); - assertInstallCleaned(env.v2()); - } - public void testBin() throws Exception { Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); @@ -568,43 +493,20 @@ public class InstallPluginCommandTests extends ESTestCase { assertPlugin("fake", pluginDir, env.v2()); } - public void testMetaBin() throws Exception { - Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); - writePlugin("fake", pluginDir); - Path binDir = pluginDir.resolve("bin"); - Files.createDirectory(binDir); - Files.createFile(binDir.resolve("somescript")); - String pluginZip = createMetaPluginUrl("my_plugins", metaDir); - installPlugin(pluginZip, env.v1()); - assertMetaPlugin("my_plugins","fake", pluginDir, env.v2()); - } - public void testBinNotDir() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); + Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createFile(binDir); String pluginZip = createPluginUrl("fake", pluginDir); UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); - - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env.v2()); } public void testBinContainsDir() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); + Path pluginDir = createPluginDir(temp); Path dirInBinDir = pluginDir.resolve("bin").resolve("foo"); Files.createDirectories(dirInBinDir); Files.createFile(dirInBinDir.resolve("somescript")); @@ -612,11 +514,6 @@ public class InstallPluginCommandTests extends ESTestCase { UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); assertInstallCleaned(env.v2()); - - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); - assertInstallCleaned(env.v2()); } public void testBinConflict() throws Exception { @@ -649,27 +546,6 @@ public class InstallPluginCommandTests extends ESTestCase { } } - public void testMetaBinPermissions() throws Exception { - assumeTrue("posix filesystem", isPosix); - Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); - writePlugin("fake", pluginDir); - Path binDir = pluginDir.resolve("bin"); - Files.createDirectory(binDir); - Files.createFile(binDir.resolve("somescript")); - String pluginZip = createMetaPluginUrl("my_plugins", metaDir); - try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) { - Set perms = binAttrs.getCopyPermissions(); - // make sure at least one execute perm is missing, so we know we forced it during installation - perms.remove(PosixFilePermission.GROUP_EXECUTE); - binAttrs.setPermissions(perms); - installPlugin(pluginZip, env.v1()); - assertMetaPlugin("my_plugins", "fake", pluginDir, env.v2()); - } - } - public void testPluginPermissions() throws Exception { assumeTrue("posix filesystem", isPosix); @@ -761,32 +637,9 @@ public class InstallPluginCommandTests extends ESTestCase { assertTrue(Files.exists(envConfigDir.resolve("other.yml"))); } - public void testExistingMetaConfig() throws Exception { - Tuple env = createEnv(fs, temp); - Path envConfigDir = env.v2().configFile().resolve("my_plugins"); - Files.createDirectories(envConfigDir); - Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8)); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); - writePlugin("fake", pluginDir); - Path configDir = pluginDir.resolve("config"); - Files.createDirectory(configDir); - Files.write(configDir.resolve("custom.yml"), "new config".getBytes(StandardCharsets.UTF_8)); - Files.createFile(configDir.resolve("other.yml")); - String pluginZip = createMetaPluginUrl("my_plugins", metaDir); - installPlugin(pluginZip, env.v1()); - assertMetaPlugin("my_plugins", "fake", pluginDir, env.v2()); - List configLines = Files.readAllLines(envConfigDir.resolve("custom.yml"), StandardCharsets.UTF_8); - assertEquals(1, configLines.size()); - assertEquals("existing config", configLines.get(0)); - assertTrue(Files.exists(envConfigDir.resolve("other.yml"))); - } - public void testConfigNotDir() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); + Path pluginDir = createPluginDir(temp); Files.createDirectories(pluginDir); Path configDir = pluginDir.resolve("config"); Files.createFile(configDir); @@ -794,11 +647,6 @@ public class InstallPluginCommandTests extends ESTestCase { UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); - - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env.v2()); } public void testConfigContainsDir() throws Exception { @@ -815,19 +663,12 @@ public class InstallPluginCommandTests extends ESTestCase { public void testMissingDescriptor() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); + Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve("fake.yml")); String pluginZip = writeZip(pluginDir, null).toUri().toURL().toString(); NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties")); assertInstallCleaned(env.v2()); - - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - e = expectThrows(NoSuchFileException.class, () -> installPlugin(metaZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties")); - assertInstallCleaned(env.v2()); } public void testContainsIntermediateDirectory() throws Exception { @@ -840,16 +681,6 @@ public class InstallPluginCommandTests extends ESTestCase { assertInstallCleaned(env.v2()); } - public void testContainsIntermediateDirectoryMeta() throws Exception { - Tuple env = createEnv(fs, temp); - Path pluginDir = createPluginDir(temp); - Files.createFile(pluginDir.resolve(MetaPluginInfo.ES_META_PLUGIN_PROPERTIES)); - String pluginZip = writeZip(pluginDir, "elasticsearch").toUri().toURL().toString(); - UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); - assertThat(e.getMessage(), containsString("This plugin was built with an older plugin structure")); - assertInstallCleaned(env.v2()); - } - public void testZipRelativeOutsideEntryName() throws Exception { Tuple env = createEnv(fs, temp); Path zip = createTempDir().resolve("broken.zip"); @@ -958,29 +789,6 @@ public class InstallPluginCommandTests extends ESTestCase { "if you need to update the plugin, uninstall it first using command 'remove fake'")); } - public void testMetaPluginAlreadyInstalled() throws Exception { - Tuple env = createEnv(fs, temp); - { - // install fake plugin - Path pluginDir = createPluginDir(temp); - String pluginZip = createPluginUrl("fake", pluginDir); - installPlugin(pluginZip, env.v1()); - } - - Path pluginDir = createPluginDir(temp); - Files.createDirectory(pluginDir.resolve("fake")); - writePlugin("fake", pluginDir.resolve("fake")); - Files.createDirectory(pluginDir.resolve("other")); - writePlugin("other", pluginDir.resolve("other")); - String metaZip = createMetaPluginUrl("meta", pluginDir); - final UserException e = expectThrows(UserException.class, - () -> installPlugin(metaZip, env.v1(), randomFrom(skipJarHellCommand, defaultCommand))); - assertThat( - e.getMessage(), - equalTo("plugin directory [" + env.v2().pluginsFile().resolve("fake") + "] already exists; " + - "if you need to update the plugin, uninstall it first using command 'remove fake'")); - } - private void installPlugin(MockTerminal terminal, boolean isBatch) throws Exception { Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); @@ -1224,24 +1032,6 @@ public class InstallPluginCommandTests extends ESTestCase { assertPlugin("fake", pluginDir, env.v2()); } - public void testMetaPluginPolicyConfirmation() throws Exception { - Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path fake1Dir = metaDir.resolve("fake1"); - Files.createDirectory(fake1Dir); - writePluginSecurityPolicy(fake1Dir, "setAccessible", "setFactory"); - writePlugin("fake1", fake1Dir); - Path fake2Dir = metaDir.resolve("fake2"); - Files.createDirectory(fake2Dir); - writePluginSecurityPolicy(fake2Dir, "setAccessible", "accessDeclaredMembers"); - writePlugin("fake2", fake2Dir); - String pluginZip = createMetaPluginUrl("meta-plugin", metaDir); - - assertPolicyConfirmation(env, pluginZip, "plugin requires additional permissions"); - assertMetaPlugin("meta-plugin", "fake1", metaDir, env.v2()); - assertMetaPlugin("meta-plugin", "fake2", metaDir, env.v2()); - } - public void testPluginWithNativeController() throws Exception { Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); @@ -1250,21 +1040,4 @@ public class InstallPluginCommandTests extends ESTestCase { final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1())); assertThat(e, hasToString(containsString("plugins can not have native controllers"))); } - - public void testMetaPluginWithNativeController() throws Exception { - Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path fake1Dir = metaDir.resolve("fake1"); - Files.createDirectory(fake1Dir); - writePluginSecurityPolicy(fake1Dir, "setAccessible", "setFactory"); - writePlugin("fake1", fake1Dir); - Path fake2Dir = metaDir.resolve("fake2"); - Files.createDirectory(fake2Dir); - writePlugin("fake2", fake2Dir, "has.native.controller", "true"); - String pluginZip = createMetaPluginUrl("meta-plugin", metaDir); - - final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1())); - assertThat(e, hasToString(containsString("plugins can not have native controllers"))); - } - } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index f106d51063f..8144c5f0600 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -92,16 +92,7 @@ public class ListPluginsCommandTests extends ESTestCase { final String description, final String name, final String classname) throws IOException { - buildFakePlugin(env, null, description, name, classname, false); - } - - private static void buildFakePlugin( - final Environment env, - final String metaPlugin, - final String description, - final String name, - final String classname) throws IOException { - buildFakePlugin(env, metaPlugin, description, name, classname, false); + buildFakePlugin(env, description, name, classname, false); } private static void buildFakePlugin( @@ -110,36 +101,15 @@ public class ListPluginsCommandTests extends ESTestCase { final String name, final String classname, final boolean hasNativeController) throws IOException { - buildFakePlugin(env, null, description, name, classname, hasNativeController); - } - - private static void buildFakePlugin( - final Environment env, - final String metaPlugin, - final String description, - final String name, - final String classname, - final boolean hasNativeController) throws IOException { - Path dest = metaPlugin != null ? env.pluginsFile().resolve(metaPlugin) : env.pluginsFile(); PluginTestUtil.writePluginProperties( - dest.resolve(name), - "description", description, - "name", name, - "version", "1.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", "1.8", - "classname", classname, - "has.native.controller", Boolean.toString(hasNativeController)); - } - - private static void buildFakeMetaPlugin( - final Environment env, - final String description, - final String name) throws IOException { - PluginTestUtil.writeMetaPluginProperties( env.pluginsFile().resolve(name), "description", description, - "name", name); + "name", name, + "version", "1.0", + "elasticsearch.version", Version.CURRENT.toString(), + "java.version", "1.8", + "classname", classname, + "has.native.controller", Boolean.toString(hasNativeController)); } public void testPluginsDirMissing() throws Exception { @@ -166,16 +136,6 @@ public class ListPluginsCommandTests extends ESTestCase { assertEquals(buildMultiline("fake1", "fake2"), terminal.getOutput()); } - public void testMetaPlugin() throws Exception { - buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin"); - buildFakePlugin(env, "meta_plugin", "fake desc", "fake1", "org.fake1"); - buildFakePlugin(env, "meta_plugin", "fake desc 2", "fake2", "org.fake2"); - buildFakePlugin(env, "fake desc 3", "fake3", "org.fake3"); - buildFakePlugin(env, "fake desc 4", "fake4", "org.fake4"); - MockTerminal terminal = listPlugins(home); - assertEquals(buildMultiline("fake3", "fake4", "meta_plugin", "\tfake1", "\tfake2"), terminal.getOutput()); - } - public void testPluginWithVerbose() throws Exception { buildFakePlugin(env, "fake desc", "fake_plugin", "org.fake"); String[] params = { "-v" }; @@ -247,39 +207,6 @@ public class ListPluginsCommandTests extends ESTestCase { terminal.getOutput()); } - public void testPluginWithVerboseMetaPlugins() throws Exception { - buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin"); - buildFakePlugin(env, "meta_plugin", "fake desc 1", "fake_plugin1", "org.fake"); - buildFakePlugin(env, "meta_plugin", "fake desc 2", "fake_plugin2", "org.fake2"); - String[] params = { "-v" }; - MockTerminal terminal = listPlugins(home, params); - assertEquals( - buildMultiline( - "Plugins directory: " + env.pluginsFile(), - "meta_plugin", - "\tfake_plugin1", - "\t- Plugin information:", - "\tName: fake_plugin1", - "\tDescription: fake desc 1", - "\tVersion: 1.0", - "\tElasticsearch Version: " + Version.CURRENT.toString(), - "\tJava Version: 1.8", - "\tNative Controller: false", - "\tExtended Plugins: []", - "\t * Classname: org.fake", - "\tfake_plugin2", - "\t- Plugin information:", - "\tName: fake_plugin2", - "\tDescription: fake desc 2", - "\tVersion: 1.0", - "\tElasticsearch Version: " + Version.CURRENT.toString(), - "\tJava Version: 1.8", - "\tNative Controller: false", - "\tExtended Plugins: []", - "\t * Classname: org.fake2"), - terminal.getOutput()); - } - public void testPluginWithoutVerboseMultiplePlugins() throws Exception { buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); @@ -307,19 +234,6 @@ public class ListPluginsCommandTests extends ESTestCase { e.getMessage()); } - public void testMetaPluginWithWrongDescriptorFile() throws Exception{ - buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin"); - final Path pluginDir = env.pluginsFile().resolve("meta_plugin").resolve("fake_plugin1"); - PluginTestUtil.writePluginProperties(pluginDir, "description", "fake desc"); - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> listPlugins(home)); - final Path descriptorPath = pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES); - assertEquals( - "property [name] is missing in [" + descriptorPath.toString() + "]", - e.getMessage()); - } - public void testExistingIncompatiblePlugin() throws Exception { PluginTestUtil.writePluginProperties(env.pluginsFile().resolve("fake_plugin1"), "description", "fake desc 1", @@ -340,27 +254,4 @@ public class ListPluginsCommandTests extends ESTestCase { terminal = listPlugins(home, params); assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput()); } - - public void testExistingIncompatibleMetaPlugin() throws Exception { - buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin"); - PluginTestUtil.writePluginProperties(env.pluginsFile().resolve("meta_plugin").resolve("fake_plugin1"), - "description", "fake desc 1", - "name", "fake_plugin1", - "version", "1.0", - "elasticsearch.version", Version.fromString("1.0.0").toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "org.fake1"); - buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); - - MockTerminal terminal = listPlugins(home); - String message = "plugin [fake_plugin1] was built for Elasticsearch version 1.0 but version " + Version.CURRENT + " is required"; - assertEquals( - "fake_plugin2\nmeta_plugin\n\tfake_plugin1\n" + "WARNING: " + message + "\n", - terminal.getOutput()); - - String[] params = {"-s"}; - terminal = listPlugins(home, params); - assertEquals("fake_plugin2\nmeta_plugin\n\tfake_plugin1\n", terminal.getOutput()); - } - } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index 13506cf986a..67c55bc348c 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -103,16 +103,6 @@ public class RemovePluginCommandTests extends ESTestCase { "classname", "SomeClass"); } - void createMetaPlugin(String name, String... plugins) throws Exception { - PluginTestUtil.writeMetaPluginProperties( - env.pluginsFile().resolve(name), - "description", "dummy", - "name", name); - for (String plugin : plugins) { - createPlugin(env.pluginsFile().resolve(name), plugin); - } - } - static MockTerminal removePlugin(String name, Path home, boolean purge) throws Exception { Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build()); MockTerminal terminal = new MockTerminal(); @@ -159,19 +149,6 @@ public class RemovePluginCommandTests extends ESTestCase { assertRemoveCleaned(env); } - public void testBasicMeta() throws Exception { - createMetaPlugin("meta", "fake1"); - createPlugin("other"); - removePlugin("meta", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("meta"))); - assertTrue(Files.exists(env.pluginsFile().resolve("other"))); - assertRemoveCleaned(env); - - UserException exc = - expectThrows(UserException.class, () -> removePlugin("fake1", home, randomBoolean())); - assertThat(exc.getMessage(), containsString("plugin [fake1] not found")); - } - public void testBin() throws Exception { createPlugin("fake"); Path binDir = env.binFile().resolve("fake"); diff --git a/docs/README.asciidoc b/docs/README.asciidoc index f0826b41612..2963359d44c 100644 --- a/docs/README.asciidoc +++ b/docs/README.asciidoc @@ -6,7 +6,7 @@ See: https://github.com/elastic/docs Snippets marked with `// CONSOLE` are automatically annotated with "VIEW IN CONSOLE" and "COPY AS CURL" in the documentation and are automatically tested by the command `gradle :docs:check`. To test just the docs from a single page, -use e.g. `gradle :docs:check -Dtests.method="\*rollover*"`. +use e.g. `gradle :docs:check -Dtests.method="*rollover*"`. By default each `// CONSOLE` snippet runs as its own isolated test. You can manipulate the test execution in the following ways: diff --git a/docs/community-clients/index.asciidoc b/docs/community-clients/index.asciidoc index 0fd5c3a483f..e28ec84f087 100644 --- a/docs/community-clients/index.asciidoc +++ b/docs/community-clients/index.asciidoc @@ -208,6 +208,9 @@ The following projects appear to be abandoned: * https://github.com/ropensci/elasticdsl[elasticdsl]: A high-level R DSL for Elasticsearch, wrapping the elastic R client. + +* https://github.com/UptakeOpenSource/uptasticsearch[uptasticsearch]: + An R client tailored to data science workflows. The following projects appear to be abandoned: diff --git a/docs/java-rest/high-level/indices/shrink_index.asciidoc b/docs/java-rest/high-level/indices/shrink_index.asciidoc index 31827964631..5bef7a561a6 100644 --- a/docs/java-rest/high-level/indices/shrink_index.asciidoc +++ b/docs/java-rest/high-level/indices/shrink_index.asciidoc @@ -45,8 +45,8 @@ returns a response, as an `ActiveShardCount` -------------------------------------------------- include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[shrink-index-request-settings] -------------------------------------------------- -<1> The settings to apply to the target index, which include the number of -shards to create for it +<1> The number of shards on the target of the shrink index request +<2> Remove the allocation requirement copied from the source index ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/docs/painless/painless-execute-script.asciidoc b/docs/painless/painless-execute-script.asciidoc index 7997c87e3e4..3344bd9f751 100644 --- a/docs/painless/painless-execute-script.asciidoc +++ b/docs/painless/painless-execute-script.asciidoc @@ -1,6 +1,8 @@ [[painless-execute-api]] === Painless execute API +experimental[The painless execute api is new and the request / response format may change in a breaking way in the future] + The Painless execute API allows an arbitrary script to be executed and a result to be returned. [[painless-execute-api-parameters]] diff --git a/docs/plugins/authors.asciidoc b/docs/plugins/authors.asciidoc index b89ac903592..fceeeac892c 100644 --- a/docs/plugins/authors.asciidoc +++ b/docs/plugins/authors.asciidoc @@ -13,8 +13,6 @@ The Elasticsearch repository contains examples of: which contains a rescore plugin. * a https://github.com/elastic/elasticsearch/tree/master/plugins/examples/script-expert-scoring[Java plugin] which contains a script plugin. -* a https://github.com/elastic/elasticsearch/tree/master/plugins/examples/meta-plugin[Java plugin] - which contains a meta plugin. These examples provide the bare bones needed to get started. For more information about how to write a plugin, we recommend looking at the plugins @@ -120,19 +118,3 @@ AccessController.doPrivileged( See http://www.oracle.com/technetwork/java/seccodeguide-139067.html[Secure Coding Guidelines for Java SE] for more information. - -[float] -=== Meta Plugin - -It is also possible to bundle multiple plugins into a meta plugin. -A directory for each sub-plugin must be contained in a directory called `elasticsearch. -The meta plugin must also contain a file called `meta-plugin-descriptor.properties` in the directory named -`elasticsearch`. -The format for this file is described in detail in this example: - -["source","properties",subs="attributes"] --------------------------------------------------- -include::{plugin-properties-files}/meta-plugin-descriptor.properties[] --------------------------------------------------- - -A meta plugin can be installed/removed like a normal plugin with the `bin/elasticsearch-plugin` command. diff --git a/docs/reference/index-shared1.asciidoc b/docs/reference/index-shared1.asciidoc deleted file mode 100644 index ae208e29011..00000000000 --- a/docs/reference/index-shared1.asciidoc +++ /dev/null @@ -1,6 +0,0 @@ - -include::getting-started.asciidoc[] - -include::setup.asciidoc[] - -include::upgrade.asciidoc[] diff --git a/docs/reference/index-shared2.asciidoc b/docs/reference/index-shared2.asciidoc deleted file mode 100644 index e48948079cc..00000000000 --- a/docs/reference/index-shared2.asciidoc +++ /dev/null @@ -1,2 +0,0 @@ - -include::migration/index.asciidoc[] diff --git a/docs/reference/index-shared3.asciidoc b/docs/reference/index-shared3.asciidoc deleted file mode 100644 index 4da338186b0..00000000000 --- a/docs/reference/index-shared3.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ - -include::api-conventions.asciidoc[] - -include::docs.asciidoc[] - -include::search.asciidoc[] - -include::aggregations.asciidoc[] - -include::indices.asciidoc[] - -include::cat.asciidoc[] - -include::cluster.asciidoc[] - -include::query-dsl.asciidoc[] - -include::mapping.asciidoc[] - -include::analysis.asciidoc[] - -include::modules.asciidoc[] - -include::index-modules.asciidoc[] - -include::ingest.asciidoc[] diff --git a/docs/reference/index-shared4.asciidoc b/docs/reference/index-shared4.asciidoc deleted file mode 100644 index 3dc9e4f5e07..00000000000 --- a/docs/reference/index-shared4.asciidoc +++ /dev/null @@ -1,10 +0,0 @@ - -include::how-to.asciidoc[] - -include::testing.asciidoc[] - -include::glossary.asciidoc[] - -include::release-notes/highlights.asciidoc[] - -include::release-notes.asciidoc[] \ No newline at end of file diff --git a/docs/reference/index-shared5.asciidoc b/docs/reference/index-shared5.asciidoc deleted file mode 100644 index 572522f6c8e..00000000000 --- a/docs/reference/index-shared5.asciidoc +++ /dev/null @@ -1,2 +0,0 @@ - -include::redirects.asciidoc[] diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 8aa9eef32f8..59f8d77a0c0 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -1,12 +1,79 @@ [[elasticsearch-reference]] = Elasticsearch Reference +:include-xpack: true :es-test-dir: {docdir}/../src/test :plugins-examples-dir: {docdir}/../../plugins/examples +:xes-repo-dir: {docdir}/../../x-pack/docs/{lang} +:es-repo-dir: {docdir} + include::../Versions.asciidoc[] -include::index-shared1.asciidoc[] -include::index-shared2.asciidoc[] -include::index-shared3.asciidoc[] -include::index-shared4.asciidoc[] -include::index-shared5.asciidoc[] + +include::getting-started.asciidoc[] + +include::setup.asciidoc[] + +include::{xes-repo-dir}/setup/setup-xes.asciidoc[] + +include::{xes-repo-dir}/monitoring/configuring-monitoring.asciidoc[] + +include::{xes-repo-dir}/security/configuring-es.asciidoc[] + +include::{xes-repo-dir}/setup/setup-xclient.asciidoc[] + +include::{xes-repo-dir}/settings/configuring-xes.asciidoc[] + +include::{xes-repo-dir}/setup/bootstrap-checks-xes.asciidoc[] + +include::upgrade.asciidoc[] + +include::migration/index.asciidoc[] + +include::api-conventions.asciidoc[] + +include::docs.asciidoc[] + +include::search.asciidoc[] + +include::aggregations.asciidoc[] + +include::indices.asciidoc[] + +include::cat.asciidoc[] + +include::cluster.asciidoc[] + +include::query-dsl.asciidoc[] + +include::mapping.asciidoc[] + +include::analysis.asciidoc[] + +include::modules.asciidoc[] + +include::index-modules.asciidoc[] + +include::ingest.asciidoc[] + +include::{xes-repo-dir}/sql/index.asciidoc[] + +include::{xes-repo-dir}/monitoring/index.asciidoc[] + +include::{xes-repo-dir}/rollup/index.asciidoc[] + +include::{xes-repo-dir}/rest-api/index.asciidoc[] + +include::{xes-repo-dir}/commands/index.asciidoc[] + +include::how-to.asciidoc[] + +include::testing.asciidoc[] + +include::glossary.asciidoc[] + +include::release-notes/highlights.asciidoc[] + +include::release-notes.asciidoc[] + +include::redirects.asciidoc[] diff --git a/docs/reference/index.x.asciidoc b/docs/reference/index.x.asciidoc index 5be21cb0043..35204eef5b6 100644 --- a/docs/reference/index.x.asciidoc +++ b/docs/reference/index.x.asciidoc @@ -1,12 +1 @@ -[[elasticsearch-reference]] -= Elasticsearch Reference - -:include-xpack: true -:es-test-dir: {docdir}/../src/test -:plugins-examples-dir: {docdir}/../../plugins/examples -:xes-repo-dir: {docdir}/../../x-pack/docs/{lang} -:es-repo-dir: {docdir} - - -include::../Versions.asciidoc[] -include::{xes-repo-dir}/index.asciidoc[] +include::index.asciidoc[] diff --git a/docs/reference/indices/create-index.asciidoc b/docs/reference/indices/create-index.asciidoc index f2882e6fb60..32a45462499 100644 --- a/docs/reference/indices/create-index.asciidoc +++ b/docs/reference/indices/create-index.asciidoc @@ -25,7 +25,7 @@ PUT twitter } -------------------------------------------------- // CONSOLE -<1> Default for `number_of_shards` is 5 +<1> Default for `number_of_shards` is 1 <2> Default for `number_of_replicas` is 1 (ie one replica for each primary shard) The above second curl example shows how an index called `twitter` can be diff --git a/docs/reference/indices/shrink-index.asciidoc b/docs/reference/indices/shrink-index.asciidoc index 34e90e6799d..24a67208f72 100644 --- a/docs/reference/indices/shrink-index.asciidoc +++ b/docs/reference/indices/shrink-index.asciidoc @@ -62,7 +62,7 @@ the following request: [source,js] -------------------------------------------------- -POST my_source_index/_shrink/my_target_index?copy_settings=true +POST my_source_index/_shrink/my_target_index { "settings": { "index.routing.allocation.require._name": null, <1> @@ -106,7 +106,7 @@ and accepts `settings` and `aliases` parameters for the target index: [source,js] -------------------------------------------------- -POST my_source_index/_shrink/my_target_index?copy_settings=true +POST my_source_index/_shrink/my_target_index { "settings": { "index.number_of_replicas": 1, @@ -130,16 +130,6 @@ POST my_source_index/_shrink/my_target_index?copy_settings=true NOTE: Mappings may not be specified in the `_shrink` request. -NOTE: By default, with the exception of `index.analysis`, `index.similarity`, -and `index.sort` settings, index settings on the source index are not copied -during a shrink operation. With the exception of non-copyable settings, settings -from the source index can be copied to the target index by adding the URL -parameter `copy_settings=true` to the request. Note that `copy_settings` can not -be set to `false`. The parameter `copy_settings` will be removed in 8.0.0 - -deprecated[6.4.0, not copying settings is deprecated, copying settings will be -the default behavior in 7.x] - [float] === Monitoring the shrink process diff --git a/docs/reference/indices/split-index.asciidoc b/docs/reference/indices/split-index.asciidoc index aaed23459c3..ade0a8075d5 100644 --- a/docs/reference/indices/split-index.asciidoc +++ b/docs/reference/indices/split-index.asciidoc @@ -123,7 +123,7 @@ the following request: [source,js] -------------------------------------------------- -POST my_source_index/_split/my_target_index?copy_settings=true +POST my_source_index/_split/my_target_index { "settings": { "index.number_of_shards": 2 @@ -158,7 +158,7 @@ and accepts `settings` and `aliases` parameters for the target index: [source,js] -------------------------------------------------- -POST my_source_index/_split/my_target_index?copy_settings=true +POST my_source_index/_split/my_target_index { "settings": { "index.number_of_shards": 5 <1> @@ -177,16 +177,6 @@ POST my_source_index/_split/my_target_index?copy_settings=true NOTE: Mappings may not be specified in the `_split` request. -NOTE: By default, with the exception of `index.analysis`, `index.similarity`, -and `index.sort` settings, index settings on the source index are not copied -during a split operation. With the exception of non-copyable settings, settings -from the source index can be copied to the target index by adding the URL -parameter `copy_settings=true` to the request. Note that `copy_settings` can not -be set to `false`. The parameter `copy_settings` will be removed in 8.0.0 - -deprecated[6.4.0, not copying settings is deprecated, copying settings will be -the default behavior in 7.x] - [float] === Monitoring the split process diff --git a/docs/reference/migration/migrate_7_0.asciidoc b/docs/reference/migration/migrate_7_0.asciidoc index aea6d14fac9..c68dee287d5 100644 --- a/docs/reference/migration/migrate_7_0.asciidoc +++ b/docs/reference/migration/migrate_7_0.asciidoc @@ -34,6 +34,7 @@ Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. * <> * <> * <> +* <> include::migrate_7_0/aggregations.asciidoc[] @@ -47,3 +48,4 @@ include::migrate_7_0/plugins.asciidoc[] include::migrate_7_0/api.asciidoc[] include::migrate_7_0/java.asciidoc[] include::migrate_7_0/settings.asciidoc[] +include::migrate_7_0/scripting.asciidoc[] diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index e140fd577bd..3d824c60064 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -65,3 +65,13 @@ deprecated in 6.3.0 and now removed in 7.0.0. In the past, `fields` could be provided either as a parameter, or as part of the request body. Specifying `fields` in the request body as opposed to a parameter was deprecated in 6.4.0, and is now unsupported in 7.0.0. + +==== `copy_settings` is deprecated on shrink and split APIs + +Versions of Elasticsearch prior to 6.4.0 did not copy index settings on shrink +and split operations. Starting with Elasticsearch 7.0.0, the default behavior +will be for such settings to be copied on such operations. To enable users in +6.4.0 to transition in 6.4.0 to the default behavior in 7.0.0, the +`copy_settings` parameter was added on the REST layer. As this behavior will be +the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in +8.0.0. diff --git a/docs/reference/migration/migrate_7_0/scripting.asciidoc b/docs/reference/migration/migrate_7_0/scripting.asciidoc new file mode 100644 index 00000000000..df43aaa92ea --- /dev/null +++ b/docs/reference/migration/migrate_7_0/scripting.asciidoc @@ -0,0 +1,13 @@ +[[breaking_70_scripting_changes]] +=== Scripting changes + +==== getDate() and getDates() removed + +Fields of type `long` and `date` had `getDate()` and `getDates()` methods +(for multi valued fields) to get an object with date specific helper methods +for the current doc value. In 5.3.0, `date` fields were changed to expose +this same date object directly when calling `doc["myfield"].value`, and +the getter methods for date objects were deprecated. These methods have +now been removed. Instead, use `.value` on `date` fields, or explicitly +parse `long` fields into a date object using +`Instance.ofEpochMillis(doc["myfield"].value)`. diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..702782e1c5e --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +a3dba337d06e1f5930cb7ae638c1655b99ce0cb7 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-6705632810.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index b333863b94d..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f72ad4b6474c2d59b0eed0ca84eddd1f99d29129 \ No newline at end of file diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 51a1b7cecb3..6495659d9cd 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -74,16 +74,12 @@ class org.elasticsearch.index.fielddata.ScriptDocValues$Longs { Long get(int) long getValue() List getValues() - org.joda.time.ReadableDateTime getDate() - List getDates() } class org.elasticsearch.index.fielddata.ScriptDocValues$Dates { org.joda.time.ReadableDateTime get(int) org.joda.time.ReadableDateTime getValue() List getValues() - org.joda.time.ReadableDateTime getDate() - List getDates() } class org.elasticsearch.index.fielddata.ScriptDocValues$Doubles { diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/50_script_doc_values.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/50_script_doc_values.yml index ede2927b992..617b8df61b6 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/50_script_doc_values.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/50_script_doc_values.yml @@ -106,28 +106,6 @@ setup: source: "doc.date.value" - match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' } - - do: - warnings: - - getDate is no longer necessary on date fields as the value is now a date. - search: - body: - script_fields: - field: - script: - source: "doc['date'].date" - - match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' } - - - do: - warnings: - - getDates is no longer necessary on date fields as the values are now dates. - search: - body: - script_fields: - field: - script: - source: "doc['date'].dates.get(0)" - - match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' } - --- "geo_point": - do: @@ -213,28 +191,6 @@ setup: source: "doc['long'].value" - match: { hits.hits.0.fields.field.0: 12348732141234 } - - do: - warnings: - - getDate on numeric fields is deprecated. Use a date field to get dates. - search: - body: - script_fields: - field: - script: - source: "doc['long'].date" - - match: { hits.hits.0.fields.field.0: '2361-04-26T03:22:21.234Z' } - - - do: - warnings: - - getDates on numeric fields is deprecated. Use a date field to get dates. - search: - body: - script_fields: - field: - script: - source: "doc['long'].dates.get(0)" - - match: { hits.hits.0.fields.field.0: '2361-04-26T03:22:21.234Z' } - --- "integer": - do: diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..f99b0177de5 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +473a7f4d955f132bb498482648266653f8da85bd \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 6720beb8d86..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b4e19c53f29fa9b40bd7ad12ff598e3f08d507a3 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..08269eed636 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +c5a72b9a790e2552248c8bbb36af47c4c399ba27 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 861a2110e16..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -23dd8cb3834f3641d9b3e8bc3d38281389a597bc \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..325fe161204 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +14f680ab9b886c7c5224ff682a7fa70b6df44a05 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c7c19feb57d..00000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e8119a17448a6f5512ded0bd2a6faa7fc8e70890 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..9e88119ed1d --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +e033c68c9ec1ba9cd8439758adf7eb5fee22acef \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 94e8c269838..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -336d9ac698066b8cf8a448f193e4a29ef163baa8 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..74721c85757 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +08df0a5029f11c109b22064dec78c05dfa25f9e3 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 364facee9ef..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e1e77951a83fc6a9deab884773314992fefa14f3 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..1c257797c08 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +a9d1819b2b13f134f6a605ab5a59ce3c602c0460 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index b55d8cf04ec..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da149a16673c6326f4898ad877756259f676f8 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..117ac05c91f --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +47bc91ccb0cdf0c1c404646ffe0d5fd6b020a4ab \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index dcc2249c45f..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ab4141b43cc6c2680d5f5a0b5086299f38ebec4d \ No newline at end of file diff --git a/plugins/examples/meta-plugin/build.gradle b/plugins/examples/meta-plugin/build.gradle deleted file mode 100644 index db28e637871..00000000000 --- a/plugins/examples/meta-plugin/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// A meta plugin packaging example that bundles multiple plugins in a single zip. - -apply plugin: 'elasticsearch.es-meta-plugin' - -es_meta_plugin { - name 'meta-plugin' - description 'example meta plugin' - plugins = ['dummy-plugin1', 'dummy-plugin2'] -} diff --git a/plugins/examples/meta-plugin/dummy-plugin1/build.gradle b/plugins/examples/meta-plugin/dummy-plugin1/build.gradle deleted file mode 100644 index 5a02e993f8c..00000000000 --- a/plugins/examples/meta-plugin/dummy-plugin1/build.gradle +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -apply plugin: 'elasticsearch.esplugin' - -esplugin { - name 'dummy-plugin1' - description 'A dummy plugin' - classname 'org.elasticsearch.example.DummyPlugin1' -} - -test.enabled = false -integTestRunner.enabled = false \ No newline at end of file diff --git a/plugins/examples/meta-plugin/dummy-plugin1/src/main/java/org/elasticsearch/example/DummyPlugin1.java b/plugins/examples/meta-plugin/dummy-plugin1/src/main/java/org/elasticsearch/example/DummyPlugin1.java deleted file mode 100644 index 65102dbc2e3..00000000000 --- a/plugins/examples/meta-plugin/dummy-plugin1/src/main/java/org/elasticsearch/example/DummyPlugin1.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.example; - -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.SearchPlugin; - -import java.util.List; - -import static java.util.Collections.singletonList; - -public class DummyPlugin1 extends Plugin {} diff --git a/plugins/examples/meta-plugin/dummy-plugin2/build.gradle b/plugins/examples/meta-plugin/dummy-plugin2/build.gradle deleted file mode 100644 index d90983adfed..00000000000 --- a/plugins/examples/meta-plugin/dummy-plugin2/build.gradle +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -apply plugin: 'elasticsearch.esplugin' - -esplugin { - name 'dummy-plugin2' - description 'Another dummy plugin' - classname 'org.elasticsearch.example.DummyPlugin2' -} - -test.enabled = false -integTestRunner.enabled = false \ No newline at end of file diff --git a/plugins/examples/meta-plugin/dummy-plugin2/src/main/java/org/elasticsearch/example/DummyPlugin2.java b/plugins/examples/meta-plugin/dummy-plugin2/src/main/java/org/elasticsearch/example/DummyPlugin2.java deleted file mode 100644 index 2d74d7603d1..00000000000 --- a/plugins/examples/meta-plugin/dummy-plugin2/src/main/java/org/elasticsearch/example/DummyPlugin2.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.example; - -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.SearchPlugin; - -import java.util.List; - -import static java.util.Collections.singletonList; - -public class DummyPlugin2 extends Plugin {} diff --git a/plugins/examples/meta-plugin/src/main/resources/meta-plugin-descriptor.properties b/plugins/examples/meta-plugin/src/main/resources/meta-plugin-descriptor.properties deleted file mode 100644 index 1fd5a86b95a..00000000000 --- a/plugins/examples/meta-plugin/src/main/resources/meta-plugin-descriptor.properties +++ /dev/null @@ -1,4 +0,0 @@ -# The name of the meta plugin -name=my_meta_plugin -# The description of the meta plugin -description=A meta plugin example \ No newline at end of file diff --git a/plugins/examples/meta-plugin/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java b/plugins/examples/meta-plugin/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java deleted file mode 100644 index d1f9e6b7370..00000000000 --- a/plugins/examples/meta-plugin/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.smoketest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -public class SmokeTestPluginsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - - public SmokeTestPluginsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); - } -} - diff --git a/plugins/examples/meta-plugin/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yml b/plugins/examples/meta-plugin/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yml deleted file mode 100644 index 011a278ed89..00000000000 --- a/plugins/examples/meta-plugin/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yml +++ /dev/null @@ -1,14 +0,0 @@ -# Integration tests for testing meta plugins -# -"Check meta plugin install": - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - - match: { nodes.$master.plugins.0.name: dummy-plugin1 } - - match: { nodes.$master.plugins.1.name: dummy-plugin2 } diff --git a/plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java b/plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java index 5a146f75919..b910526ef3d 100644 --- a/plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java +++ b/plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java @@ -54,7 +54,7 @@ public class ExpertScriptPlugin extends Plugin implements ScriptPlugin { @Override public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { - if (context.equals(SearchScript.CONTEXT) == false) { + if (context.equals(SearchScript.SCRIPT_SCORE_CONTEXT) == false) { throw new IllegalArgumentException(getType() + " scripts cannot be used for context [" + context.name + "]"); } // we use the script "source" as the script identifier diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 1afda01130b..edc10bdec3a 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -170,91 +170,6 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { } } - /** - * Two plugins in a meta module - one with a controller daemon and one without. - */ - public void testControllerSpawnMeta() throws Exception { - runTestControllerSpawnMeta(Environment::pluginsFile, false); - runTestControllerSpawnMeta(Environment::modulesFile, true); - } - - - private void runTestControllerSpawnMeta( - final Function pluginsDirFinder, final boolean expectSpawn) throws Exception { - /* - * On Windows you can not directly run a batch file - you have to run cmd.exe with the batch - * file as an argument and that's out of the remit of the controller daemon process spawner. - */ - assumeFalse("This test does not work on Windows", Constants.WINDOWS); - - Path esHome = createTempDir().resolve("esHome"); - Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString()); - Settings settings = settingsBuilder.build(); - - Environment environment = TestEnvironment.newEnvironment(settings); - - Path metaModule = pluginsDirFinder.apply(environment).resolve("meta_module"); - Files.createDirectories(environment.modulesFile()); - Files.createDirectories(metaModule); - PluginTestUtil.writeMetaPluginProperties( - metaModule, - "description", "test_plugin", - "name", "meta_plugin", - "plugins", "test_plugin,other_plugin"); - - // this plugin will have a controller daemon - Path plugin = metaModule.resolve("test_plugin"); - - Files.createDirectories(plugin); - PluginTestUtil.writePluginProperties( - plugin, - "description", "test_plugin", - "version", Version.CURRENT.toString(), - "elasticsearch.version", Version.CURRENT.toString(), - "name", "test_plugin", - "java.version", "1.8", - "classname", "TestPlugin", - "has.native.controller", "true"); - Path controllerProgram = Platforms.nativeControllerPath(plugin); - createControllerProgram(controllerProgram); - - // this plugin will not have a controller daemon - Path otherPlugin = metaModule.resolve("other_plugin"); - Files.createDirectories(otherPlugin); - PluginTestUtil.writePluginProperties( - otherPlugin, - "description", "other_plugin", - "version", Version.CURRENT.toString(), - "elasticsearch.version", Version.CURRENT.toString(), - "name", "other_plugin", - "java.version", "1.8", - "classname", "OtherPlugin", - "has.native.controller", "false"); - - Spawner spawner = new Spawner(); - spawner.spawnNativeControllers(environment); - - List processes = spawner.getProcesses(); - - if (expectSpawn) { - // as there should only be a reference in the list for the plugin that had the controller daemon, we expect one here - assertThat(processes, hasSize(1)); - Process process = processes.get(0); - final InputStreamReader in = - new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8); - try (BufferedReader stdoutReader = new BufferedReader(in)) { - String line = stdoutReader.readLine(); - assertEquals("I am alive", line); - spawner.close(); - // fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling - assertTrue(process.waitFor(1, TimeUnit.SECONDS)); - } - } else { - assertThat(processes, hasSize(0)); - } - } - public void testControllerSpawnWithIncorrectDescriptor() throws IOException { // this plugin will have a controller daemon Path esHome = createTempDir().resolve("esHome"); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml index f94cf286fd8..bfd3fc58fdf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml @@ -1,8 +1,8 @@ --- "Shrink index via API": - skip: - version: " - 6.3.99" - reason: expects warnings that pre-6.4.0 will not send + version: " - 6.9.99" + reason: expects warnings that pre-7.0.0 will not send features: "warnings" # creates an index with one document solely allocated on the master node # and shrinks it into a new index with a single shard @@ -67,8 +67,6 @@ body: settings: index.number_of_replicas: 0 - warnings: - - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml index 6f532ff81c6..2e7a13b38e1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml @@ -1,8 +1,8 @@ --- "Shrink index ignores target template mapping": - skip: - version: " - 6.3.99" - reason: expects warnings that pre-6.4.0 will not send + version: " - 6.9.99" + reason: expects warnings that pre-7.0.0 will not send features: "warnings" - do: @@ -71,8 +71,6 @@ body: settings: index.number_of_replicas: 0 - warnings: - - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml index 53a12aad787..8c4c84c4be1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml @@ -1,8 +1,8 @@ --- "Copy settings during shrink index": - skip: - version: " - 6.3.99" - reason: expects warnings that pre-6.4.0 will not send + version: " - 6.9.99" + reason: expects warnings that pre-7.0.0 will not send features: "warnings" - do: @@ -48,6 +48,8 @@ settings: index.number_of_replicas: 0 index.merge.scheduler.max_thread_count: 2 + warnings: + - "parameter [copy_settings] is deprecated and will be removed in 8.0.0" - do: cluster.health: @@ -63,19 +65,17 @@ - match: { copy-settings-target.settings.index.blocks.write: "true" } - match: { copy-settings-target.settings.index.routing.allocation.include._id: $master } - # now we do a actual shrink and do not copy settings (by default) + # now we do a actual shrink and copy settings (by default) - do: indices.shrink: index: "source" - target: "no-copy-settings-target" + target: "default-copy-settings-target" wait_for_active_shards: 1 master_timeout: 10s body: settings: index.number_of_replicas: 0 index.merge.scheduler.max_thread_count: 2 - warnings: - - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: @@ -83,13 +83,13 @@ - do: indices.get_settings: - index: "no-copy-settings-target" + index: "default-copy-settings-target" - # only the request setting should be copied - - is_false: no-copy-settings-target.settings.index.merge.scheduler.max_merge_count - - match: { no-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" } - - is_false: no-copy-settings-target.settings.index.blocks.write - - is_false: no-copy-settings-target.settings.index.routing.allocation.include._id + # settings should be copied + - match: { default-copy-settings-target.settings.index.merge.scheduler.max_merge_count: "4" } + - match: { default-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" } + - match: { default-copy-settings-target.settings.index.blocks.write: "true" } + - match: { default-copy-settings-target.settings.index.routing.allocation.include._id: $master } # now we do a actual shrink and try to set no copy settings - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml index 8cfe77042dd..74774f13e21 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml @@ -33,8 +33,8 @@ setup: --- "Split index via API": - skip: - version: " - 6.3.99" - reason: expects warnings that pre-6.4.0 will not send + version: " - 6.9.99" + reason: pre-7.0.0 will send warnings features: "warnings" # make it read-only @@ -61,8 +61,6 @@ setup: settings: index.number_of_replicas: 0 index.number_of_shards: 4 - warnings: - - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: @@ -108,8 +106,7 @@ setup: "Split from 1 to N": - skip: version: " - 6.99.99" - reason: Automatic preparation for splitting was added in 7.0.0 - features: "warnings" + reason: automatic preparation for splitting was added in 7.0.0 - do: indices.create: index: source_one_shard @@ -163,8 +160,6 @@ setup: settings: index.number_of_replicas: 0 index.number_of_shards: 5 - warnings: - - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: @@ -205,13 +200,11 @@ setup: - match: { _id: "3" } - match: { _source: { foo: "hello world 3" } } - - --- "Create illegal split indices": - skip: - version: " - 6.3.99" - reason: expects warnings that pre-6.4.0 will not send + version: " - 6.9.99" + reason: pre-7.0.0 will send warnings features: "warnings" # try to do an illegal split with number_of_routing_shards set @@ -227,8 +220,6 @@ setup: index.number_of_replicas: 0 index.number_of_shards: 4 index.number_of_routing_shards: 8 - warnings: - - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" # try to do an illegal split with illegal number_of_shards - do: @@ -242,5 +233,3 @@ setup: settings: index.number_of_replicas: 0 index.number_of_shards: 6 - warnings: - - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml index 88d3f3c6102..727e1e374ba 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml @@ -1,8 +1,8 @@ --- "Split index ignores target template mapping": - skip: - version: " - 6.3.99" - reason: expects warnings that pre-6.4.0 will not send + version: " - 6.9.99" + reason: pre-7.0.0 will send warnings features: "warnings" # create index @@ -65,8 +65,6 @@ settings: index.number_of_shards: 2 index.number_of_replicas: 0 - warnings: - - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml index 9e64b2b8130..90d4080e463 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml @@ -1,8 +1,8 @@ --- "Copy settings during split index": - skip: - version: " - 6.3.99" - reason: expects warnings that pre-6.4.0 will not send + version: " - 6.9.99" + reason: expects warnings that pre-7.0.0 will not send features: "warnings" - do: @@ -50,6 +50,9 @@ index.number_of_replicas: 0 index.number_of_shards: 2 index.merge.scheduler.max_thread_count: 2 + warnings: + - "parameter [copy_settings] is deprecated and will be removed in 8.0.0" + - do: cluster.health: @@ -65,11 +68,11 @@ - match: { copy-settings-target.settings.index.blocks.write: "true" } - match: { copy-settings-target.settings.index.routing.allocation.include._id: $master } - # now we do a actual shrink and do not copy settings (by default) + # now we do a actual shrink and copy settings (by default) - do: indices.split: index: "source" - target: "no-copy-settings-target" + target: "default-copy-settings-target" wait_for_active_shards: 1 master_timeout: 10s body: @@ -77,8 +80,6 @@ index.number_of_replicas: 0 index.number_of_shards: 2 index.merge.scheduler.max_thread_count: 2 - warnings: - - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: @@ -86,13 +87,13 @@ - do: indices.get_settings: - index: "no-copy-settings-target" + index: "default-copy-settings-target" - # only the request setting should be copied - - is_false: no-copy-settings-target.settings.index.merge.scheduler.max_merge_count - - match: { no-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" } - - is_false: no-copy-settings-target.settings.index.blocks.write - - is_false: no-copy-settings-target.settings.index.routing.allocation.include._id + # settings should be copied + - match: { default-copy-settings-target.settings.index.merge.scheduler.max_merge_count: "4" } + - match: { default-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" } + - match: { default-copy-settings-target.settings.index.blocks.write: "true" } + - match: { default-copy-settings-target.settings.index.routing.allocation.include._id: $master } - do: catch: /illegal_argument_exception/ diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..14f5fcb381f --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +b70d03784d06a643e096fae4d959200aa246ba16 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index ccabc013780..00000000000 --- a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f465718b3db829e7660009aac2c1211fd5d74ca0 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..47afb59e45e --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +d660a63ac0f7ab2772a45ae518518472bf620620 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 68c4dcebd2e..00000000000 --- a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d502441e830e1a9d30270442f8e3fd8317fe7bba \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..50392f59374 --- /dev/null +++ b/server/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +bf8f9e8284a54af18545574cb4a530da0deb968a \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c25718d0a9e..00000000000 --- a/server/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5167fb0a14434cb10ec3224e9e32ca668e9f9ad4 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..27d7aaab2f5 --- /dev/null +++ b/server/licenses/lucene-grouping-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +9eaae9dcd4ec88227475cb81d3be9afa767f1b22 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 007ae9bb24e..00000000000 --- a/server/licenses/lucene-grouping-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -488aeecf49413b63a404989ae00b07b20951e76e \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..176c3a86afe --- /dev/null +++ b/server/licenses/lucene-highlighter-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +cd15f0008742c84899d678cb0cecda06d0a6d63e \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index f1733ffb682..00000000000 --- a/server/licenses/lucene-highlighter-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -107755edd67cddb3fb9817de50c0bed3a10da19c \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..0bfe9cfb79a --- /dev/null +++ b/server/licenses/lucene-join-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +5ce38b8610a7f402f2da3b0e408e508151d979c5 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 71800d6aa15..00000000000 --- a/server/licenses/lucene-join-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9226fab3b9c6250af52b87061f637c0f8e3114b6 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..c1a0127e2ce --- /dev/null +++ b/server/licenses/lucene-memory-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +53819f03a07050a4af28361d64395c86f2cea008 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 6cc24bbe98b..00000000000 --- a/server/licenses/lucene-memory-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2b7bf384c1933225972f04224d867ec800f5e3a7 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..615a0dec0c0 --- /dev/null +++ b/server/licenses/lucene-misc-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +8cdc0e2b65d146ed11f4d2507109e530d59ff33d \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 03c146f5c64..00000000000 --- a/server/licenses/lucene-misc-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18b770c35db8757dc036b1506870a4ddaad7b1ab \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..12f5eff262e --- /dev/null +++ b/server/licenses/lucene-queries-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +e56090463703112ad64ad457d18bae9a5b2966b8 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 3ecdd79cafd..00000000000 --- a/server/licenses/lucene-queries-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -683f6436938c67709d0c665c9e1fdef7bd893e4a \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..a787a00541a --- /dev/null +++ b/server/licenses/lucene-queryparser-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +9faf974b77058e44a6d35e956db4f5fb67389dfa \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index d113267f6f3..00000000000 --- a/server/licenses/lucene-queryparser-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1df20ba64b9aa68f1fa9a15c9ff75f87f94dec47 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..7d95cd6b3b6 --- /dev/null +++ b/server/licenses/lucene-sandbox-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +b852b1fe70ef70736b2b1a9ad57eb93cbaed0423 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 7c3391aec27..00000000000 --- a/server/licenses/lucene-sandbox-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -895ca714fc62b66ba63d43931730cdc4ef56d35f \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..ac0598b3f0c --- /dev/null +++ b/server/licenses/lucene-spatial-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +d2fa99ec7140fcf35db16ac1feb78ef142750d39 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 35d6d5359eb..00000000000 --- a/server/licenses/lucene-spatial-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -95ab7e9421bbeb8229d83ac72700b37a521fdf4f \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..a2537dbdde5 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +c9963f60d3a0924b877a6f910650c5f2384822a0 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c2f2f39a1f8..00000000000 --- a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -773ff8c8425d32609ccec6956759ad377dfb8f6b \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..6844bcd13b2 --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +3f33ba54da5e0e125f4c5ef7dd800dd6185e4f61 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 0533067ff0d..00000000000 --- a/server/licenses/lucene-spatial3d-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ea711541e243ee768f950041e6e2843d0cc5e695 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..0343db2d944 --- /dev/null +++ b/server/licenses/lucene-suggest-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +bb3c18c987395dae6fe63744f5a50fd367ea5a74 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index ce8adccc89a..00000000000 --- a/server/licenses/lucene-suggest-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2ca005cf25722ba3777ed93f720f40c937081fa6 \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 896d7ab690e..5111348d363 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -169,7 +169,7 @@ public class Version implements Comparable, ToXContentFragment { public static final int V_6_2_5_ID = 6020599; public static final Version V_6_2_5 = new Version(V_6_2_5_ID, LUCENE_7_2_1); public static final int V_6_3_0_ID = 6030099; - public static final Version V_6_3_0 = new Version(V_6_3_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_0); + public static final Version V_6_3_0 = new Version(V_6_3_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_1); public static final int V_6_4_0_ID = 6040099; public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); public static final int V_7_0_0_alpha1_ID = 7000001; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java index ca046c48acc..eb95a2455b2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java @@ -56,7 +56,7 @@ public class ResizeRequest extends AcknowledgedRequest implements private CreateIndexRequest targetIndexRequest; private String sourceIndex; private ResizeType type = ResizeType.SHRINK; - private Boolean copySettings; + private Boolean copySettings = true; ResizeRequest() {} diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index 3d07a0f87aa..dd85e921e4a 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -94,38 +94,19 @@ public abstract class ScriptDocValues extends AbstractList { } public static final class Longs extends ScriptDocValues { - protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Longs.class)); - private final SortedNumericDocValues in; - /** - * Callback for deprecated fields. In production this should always point to - * {@link #deprecationLogger} but tests will override it so they can test that - * we use the required permissions when calling it. - */ - private final Consumer deprecationCallback; private long[] values = new long[0]; private int count; - private Dates dates; - private int docId = -1; /** * Standard constructor. */ public Longs(SortedNumericDocValues in) { - this(in, deprecationLogger::deprecated); - } - - /** - * Constructor for testing the deprecation callback. - */ - Longs(SortedNumericDocValues in, Consumer deprecationCallback) { this.in = in; - this.deprecationCallback = deprecationCallback; } @Override public void setNextDocId(int docId) throws IOException { - this.docId = docId; if (in.advanceExact(docId)) { resize(in.docValueCount()); for (int i = 0; i < count; i++) { @@ -134,9 +115,6 @@ public abstract class ScriptDocValues extends AbstractList { } else { resize(0); } - if (dates != null) { - dates.setNextDocId(docId); - } } /** @@ -148,10 +126,6 @@ public abstract class ScriptDocValues extends AbstractList { values = ArrayUtil.grow(values, count); } - public SortedNumericDocValues getInternalValues() { - return this.in; - } - public long getValue() { if (count == 0) { return 0L; @@ -159,26 +133,6 @@ public abstract class ScriptDocValues extends AbstractList { return values[0]; } - @Deprecated - public ReadableDateTime getDate() throws IOException { - deprecated("getDate on numeric fields is deprecated. Use a date field to get dates."); - if (dates == null) { - dates = new Dates(in); - dates.setNextDocId(docId); - } - return dates.getValue(); - } - - @Deprecated - public List getDates() throws IOException { - deprecated("getDates on numeric fields is deprecated. Use a date field to get dates."); - if (dates == null) { - dates = new Dates(in); - dates.setNextDocId(docId); - } - return dates; - } - @Override public Long get(int index) { return values[index]; @@ -188,22 +142,6 @@ public abstract class ScriptDocValues extends AbstractList { public int size() { return count; } - - /** - * Log a deprecation log, with the server's permissions, not the permissions of the - * script calling this method. We need to do this to prevent errors when rolling - * the log file. - */ - private void deprecated(String message) { - // Intentionally not calling SpecialPermission.check because this is supposed to be called by scripts - AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Void run() { - deprecationCallback.accept(message); - return null; - } - }); - } } public static final class Dates extends ScriptDocValues { @@ -212,12 +150,6 @@ public abstract class ScriptDocValues extends AbstractList { private static final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC); private final SortedNumericDocValues in; - /** - * Callback for deprecated fields. In production this should always point to - * {@link #deprecationLogger} but tests will override it so they can test that - * we use the required permissions when calling it. - */ - private final Consumer deprecationCallback; /** * Values wrapped in {@link MutableDateTime}. Null by default an allocated on first usage so we allocate a reasonably size. We keep * this array so we don't have allocate new {@link MutableDateTime}s on every usage. Instead we reuse them for every document. @@ -229,15 +161,7 @@ public abstract class ScriptDocValues extends AbstractList { * Standard constructor. */ public Dates(SortedNumericDocValues in) { - this(in, deprecationLogger::deprecated); - } - - /** - * Constructor for testing deprecation logging. - */ - Dates(SortedNumericDocValues in, Consumer deprecationCallback) { this.in = in; - this.deprecationCallback = deprecationCallback; } /** @@ -251,24 +175,6 @@ public abstract class ScriptDocValues extends AbstractList { return get(0); } - /** - * Fetch the first value. Added for backwards compatibility with 5.x when date fields were {@link Longs}. - */ - @Deprecated - public ReadableDateTime getDate() { - deprecated("getDate is no longer necessary on date fields as the value is now a date."); - return getValue(); - } - - /** - * Fetch all the values. Added for backwards compatibility with 5.x when date fields were {@link Longs}. - */ - @Deprecated - public List getDates() { - deprecated("getDates is no longer necessary on date fields as the values are now dates."); - return this; - } - @Override public ReadableDateTime get(int index) { if (index >= count) { @@ -326,22 +232,6 @@ public abstract class ScriptDocValues extends AbstractList { dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC); } } - - /** - * Log a deprecation log, with the server's permissions, not the permissions of the - * script calling this method. We need to do this to prevent errors when rolling - * the log file. - */ - private void deprecated(String message) { - // Intentionally not calling SpecialPermission.check because this is supposed to be called by scripts - AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Void run() { - deprecationCallback.accept(message); - return null; - } - }); - } } public static final class Doubles extends ScriptDocValues { diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java index 5caabd445b3..fbada58f294 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java @@ -249,7 +249,8 @@ public final class TermsSetQueryBuilder extends AbstractQueryBuilder params = new HashMap<>(); params.putAll(minimumShouldMatchScript.getParams()); params.put("num_terms", queries.size()); diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java index cc89518154d..ed4c5f5a269 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java @@ -92,7 +92,7 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder propsMap; - { - final Properties props = new Properties(); - try (InputStream stream = Files.newInputStream(descriptor)) { - props.load(stream); - } - propsMap = props.stringPropertyNames().stream().collect(Collectors.toMap(Function.identity(), props::getProperty)); - } - - final String name = propsMap.remove("name"); - if (name == null || name.isEmpty()) { - throw new IllegalArgumentException( - "property [name] is missing for meta plugin in [" + descriptor + "]"); - } - final String description = propsMap.remove("description"); - if (description == null) { - throw new IllegalArgumentException( - "property [description] is missing for meta plugin [" + name + "]"); - } - - if (propsMap.isEmpty() == false) { - throw new IllegalArgumentException("Unknown properties in meta plugin descriptor: " + propsMap.keySet()); - } - - return new MetaPluginInfo(name, description); - } - - /** - * The name of the meta plugin. - * - * @return the meta plugin name - */ - public String getName() { - return name; - } - - /** - * The description of the meta plugin. - * - * @return the meta plugin description - */ - public String getDescription() { - return description; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - MetaPluginInfo that = (MetaPluginInfo) o; - - if (!name.equals(that.name)) return false; - - return true; - } - - @Override - public int hashCode() { - return name.hashCode(); - } - - @Override - public String toString() { - final StringBuilder information = new StringBuilder() - .append("- Plugin information:\n") - .append("Name: ").append(name).append("\n") - .append("Description: ").append(description); - return information.toString(); - } - -} diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index 4514691e4be..3bb2c3a1868 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -140,16 +140,12 @@ public class PluginsService extends AbstractComponent { // TODO: remove this leniency, but tests bogusly rely on it if (isAccessibleDirectory(pluginsDirectory, logger)) { checkForFailedPluginRemovals(pluginsDirectory); - // call findBundles directly to get the meta plugin names - List plugins = findBundles(pluginsDirectory, "plugin"); - for (final BundleCollection plugin : plugins) { - final Collection bundles = plugin.bundles(); - for (final Bundle bundle : bundles) { - pluginsList.add(bundle.plugin); - } - seenBundles.addAll(bundles); - pluginsNames.add(plugin.name()); + Set plugins = getPluginBundles(pluginsDirectory); + for (final Bundle bundle : plugins) { + pluginsList.add(bundle.plugin); + pluginsNames.add(bundle.plugin.getName()); } + seenBundles.addAll(plugins); } } catch (IOException ex) { throw new IllegalStateException("Unable to initialize plugins", ex); @@ -253,17 +249,8 @@ public class PluginsService extends AbstractComponent { return info; } - /** - * An abstraction over a single plugin and meta-plugins. - */ - interface BundleCollection { - String name(); - Collection bundles(); - } - - // a "bundle" is a group of plugins in a single classloader - // really should be 1-1, but we are not so fortunate - static class Bundle implements BundleCollection { + // a "bundle" is a group of jars in a single classloader + static class Bundle { final PluginInfo plugin; final Set urls; @@ -283,16 +270,6 @@ public class PluginsService extends AbstractComponent { this.urls = Objects.requireNonNull(urls); } - @Override - public String name() { - return plugin.getName(); - } - - @Override - public Collection bundles() { - return Collections.singletonList(this); - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -308,87 +285,30 @@ public class PluginsService extends AbstractComponent { } /** - * Represents a meta-plugin and the {@link Bundle}s corresponding to its constituents. - */ - static class MetaBundle implements BundleCollection { - private final String name; - private final List bundles; - - MetaBundle(final String name, final List bundles) { - this.name = name; - this.bundles = bundles; - } - - @Override - public String name() { - return name; - } - - @Override - public Collection bundles() { - return bundles; - } - - } - - /** - * Extracts all installed plugin directories from the provided {@code rootPath} expanding meta-plugins if needed. + * Extracts all installed plugin directories from the provided {@code rootPath}. * * @param rootPath the path where the plugins are installed * @return a list of all plugin paths installed in the {@code rootPath} * @throws IOException if an I/O exception occurred reading the directories */ public static List findPluginDirs(final Path rootPath) throws IOException { - final Tuple, Map>> groupedPluginDirs = findGroupedPluginDirs(rootPath); - return Stream.concat( - groupedPluginDirs.v1().stream(), - groupedPluginDirs.v2().values().stream().flatMap(Collection::stream)) - .collect(Collectors.toList()); - } - - /** - * Extracts all installed plugin directories from the provided {@code rootPath} expanding meta-plugins if needed. The plugins are - * grouped into plugins and meta-plugins. The meta-plugins are keyed by the meta-plugin name. - * - * @param rootPath the path where the plugins are installed - * @return a tuple of plugins as the first component and meta-plugins keyed by meta-plugin name as the second component - * @throws IOException if an I/O exception occurred reading the directories - */ - private static Tuple, Map>> findGroupedPluginDirs(final Path rootPath) throws IOException { final List plugins = new ArrayList<>(); - final Map> metaPlugins = new LinkedHashMap<>(); final Set seen = new HashSet<>(); if (Files.exists(rootPath)) { try (DirectoryStream stream = Files.newDirectoryStream(rootPath)) { for (Path plugin : stream) { if (FileSystemUtils.isDesktopServicesStore(plugin) || - plugin.getFileName().toString().startsWith(".removing-")) { + plugin.getFileName().toString().startsWith(".removing-")) { continue; } if (seen.add(plugin.getFileName().toString()) == false) { throw new IllegalStateException("duplicate plugin: " + plugin); } - if (MetaPluginInfo.isMetaPlugin(plugin)) { - final String name = plugin.getFileName().toString(); - try (DirectoryStream subStream = Files.newDirectoryStream(plugin)) { - for (Path subPlugin : subStream) { - if (MetaPluginInfo.isPropertiesFile(subPlugin) || - FileSystemUtils.isDesktopServicesStore(subPlugin)) { - continue; - } - if (seen.add(subPlugin.getFileName().toString()) == false) { - throw new IllegalStateException("duplicate plugin: " + subPlugin); - } - metaPlugins.computeIfAbsent(name, n -> new ArrayList<>()).add(subPlugin); - } - } - } else { - plugins.add(plugin); - } + plugins.add(plugin); } } } - return Tuple.tuple(plugins, metaPlugins); + return plugins; } /** @@ -425,32 +345,21 @@ public class PluginsService extends AbstractComponent { /** Get bundles for plugins installed in the given modules directory. */ static Set getModuleBundles(Path modulesDirectory) throws IOException { - return findBundles(modulesDirectory, "module").stream().flatMap(b -> b.bundles().stream()).collect(Collectors.toSet()); + return findBundles(modulesDirectory, "module"); } /** Get bundles for plugins installed in the given plugins directory. */ static Set getPluginBundles(final Path pluginsDirectory) throws IOException { - return findBundles(pluginsDirectory, "plugin").stream().flatMap(b -> b.bundles().stream()).collect(Collectors.toSet()); + return findBundles(pluginsDirectory, "plugin"); } // searches subdirectories under the given directory for plugin directories - private static List findBundles(final Path directory, String type) throws IOException { - final List bundles = new ArrayList<>(); - final Set seenBundles = new HashSet<>(); - final Tuple, Map>> groupedPluginDirs = findGroupedPluginDirs(directory); - for (final Path plugin : groupedPluginDirs.v1()) { - final Bundle bundle = readPluginBundle(seenBundles, plugin, type); + private static Set findBundles(final Path directory, String type) throws IOException { + final Set bundles = new HashSet<>(); + for (final Path plugin : findPluginDirs(directory)) { + final Bundle bundle = readPluginBundle(bundles, plugin, type); bundles.add(bundle); } - for (final Map.Entry> metaPlugin : groupedPluginDirs.v2().entrySet()) { - final List metaPluginBundles = new ArrayList<>(); - for (final Path metaPluginPlugin : metaPlugin.getValue()) { - final Bundle bundle = readPluginBundle(seenBundles, metaPluginPlugin, type); - metaPluginBundles.add(bundle); - } - final MetaBundle metaBundle = new MetaBundle(metaPlugin.getKey(), metaPluginBundles); - bundles.add(metaBundle); - } return bundles; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java index bc5db552b9d..9444563d1c6 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java @@ -51,18 +51,16 @@ public abstract class RestResizeHandler extends BaseRestHandler { final Boolean copySettings; if (rawCopySettings == null) { copySettings = resizeRequest.getCopySettings(); - } else if (rawCopySettings.isEmpty()) { - copySettings = true; } else { - copySettings = Booleans.parseBoolean(rawCopySettings); - if (copySettings == false) { - throw new IllegalArgumentException("parameter [copy_settings] can not be explicitly set to [false]"); + if (rawCopySettings.isEmpty()) { + copySettings = true; + } else { + copySettings = Booleans.parseBoolean(rawCopySettings); + if (copySettings == false) { + throw new IllegalArgumentException("parameter [copy_settings] can not be explicitly set to [false]"); + } } - } - if (copySettings == null) { - deprecationLogger.deprecated( - "resize operations without copying settings is deprecated; " - + "set parameter [copy_settings] to [true] for future default behavior"); + deprecationLogger.deprecated("parameter [copy_settings] is deprecated and will be removed in 8.0.0"); } resizeRequest.setCopySettings(copySettings); request.applyContentParser(resizeRequest::fromXContent); diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 5afb6ad28d7..583421be8e5 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -42,6 +42,9 @@ public class ScriptModule { CORE_CONTEXTS = Stream.of( SearchScript.CONTEXT, SearchScript.AGGS_CONTEXT, + SearchScript.SCRIPT_SCORE_CONTEXT, + SearchScript.SCRIPT_SORT_CONTEXT, + SearchScript.TERMS_SET_QUERY_CONTEXT, ExecutableScript.CONTEXT, ExecutableScript.AGGS_CONTEXT, ExecutableScript.UPDATE_CONTEXT, diff --git a/server/src/main/java/org/elasticsearch/script/SearchScript.java b/server/src/main/java/org/elasticsearch/script/SearchScript.java index d0c932a3490..e5762adb1bb 100644 --- a/server/src/main/java/org/elasticsearch/script/SearchScript.java +++ b/server/src/main/java/org/elasticsearch/script/SearchScript.java @@ -158,6 +158,12 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript { /** The context used to compile {@link SearchScript} factories. */ public static final ScriptContext CONTEXT = new ScriptContext<>("search", Factory.class); - // TODO: remove aggs context when it has its own interface + // TODO: remove these contexts when it has its own interface public static final ScriptContext AGGS_CONTEXT = new ScriptContext<>("aggs", Factory.class); -} \ No newline at end of file + // Can return a double. (For ScriptSortType#NUMBER only, for ScriptSortType#STRING normal CONTEXT should be used) + public static final ScriptContext SCRIPT_SORT_CONTEXT = new ScriptContext<>("sort", Factory.class); + // Can return a float + public static final ScriptContext SCRIPT_SCORE_CONTEXT = new ScriptContext<>("score", Factory.class); + // Can return a long + public static final ScriptContext TERMS_SET_QUERY_CONTEXT = new ScriptContext<>("terms_set", Factory.class); +} diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 99668515de5..6e52160238b 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -305,7 +305,7 @@ public class ScriptSortBuilder extends SortBuilder { @Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { - final SearchScript.Factory factory = context.getScriptService().compile(script, SearchScript.CONTEXT); + final SearchScript.Factory factory = context.getScriptService().compile(script, SearchScript.SCRIPT_SORT_CONTEXT); final SearchScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup()); MultiValueMode valueMode = null; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index d89a8a134ff..0c3cc7e4b15 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; @@ -84,7 +83,6 @@ public class ShrinkIndexIT extends ESIntegTestCase { return Arrays.asList(InternalSettingsPlugin.class); } - @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-8318") public void testCreateShrinkIndexToN() { int[][] possibleShardSplits = new int[][] {{8,4,2}, {9, 3, 1}, {4, 2, 1}, {15,5,1}}; int[] shardSplits = randomFrom(possibleShardSplits); @@ -113,9 +111,11 @@ public class ShrinkIndexIT extends ESIntegTestCase { ensureGreen(); // now merge source into a 4 shard index assertAcked(client().admin().indices().prepareResizeIndex("source", "first_shrink") - .setSettings(Settings.builder() - .put("index.number_of_replicas", 0) - .put("index.number_of_shards", shardSplits[1]).build()).get()); + .setSettings(Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", shardSplits[1]) + .putNull("index.blocks.write") + .build()).get()); ensureGreen(); assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20); @@ -135,9 +135,12 @@ public class ShrinkIndexIT extends ESIntegTestCase { ensureGreen(); // now merge source into a 2 shard index assertAcked(client().admin().indices().prepareResizeIndex("first_shrink", "second_shrink") - .setSettings(Settings.builder() - .put("index.number_of_replicas", 0) - .put("index.number_of_shards", shardSplits[2]).build()).get()); + .setSettings(Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", shardSplits[2]) + .putNull("index.blocks.write") + .putNull("index.routing.allocation.require._name") + .build()).get()); ensureGreen(); assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), 20); // let it be allocated anywhere and bump replicas @@ -272,8 +275,14 @@ public class ShrinkIndexIT extends ESIntegTestCase { // now merge source into a single shard index final boolean createWithReplicas = randomBoolean(); - assertAcked(client().admin().indices().prepareResizeIndex("source", "target") - .setSettings(Settings.builder().put("index.number_of_replicas", createWithReplicas ? 1 : 0).build()).get()); + assertAcked( + client().admin().indices().prepareResizeIndex("source", "target") + .setSettings( + Settings.builder() + .put("index.number_of_replicas", createWithReplicas ? 1 : 0) + .putNull("index.blocks.write") + .putNull("index.routing.allocation.require._name") + .build()).get()); ensureGreen(); // resolve true merge node - this is not always the node we required as all shards may be on another node @@ -444,19 +453,20 @@ public class ShrinkIndexIT extends ESIntegTestCase { // check that index sort cannot be set on the target index IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> client().admin().indices().prepareResizeIndex("source", "target") - .setSettings(Settings.builder() - .put("index.number_of_replicas", 0) - .put("index.number_of_shards", "2") - .put("index.sort.field", "foo") - .build()).get()); + () -> client().admin().indices().prepareResizeIndex("source", "target") + .setSettings(Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", "2") + .put("index.sort.field", "foo") + .build()).get()); assertThat(exc.getMessage(), containsString("can't override index sort when resizing an index")); // check that the index sort order of `source` is correctly applied to the `target` assertAcked(client().admin().indices().prepareResizeIndex("source", "target") - .setSettings(Settings.builder() - .put("index.number_of_replicas", 0) - .put("index.number_of_shards", "2").build()).get()); + .setSettings(Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", "2") + .putNull("index.blocks.write").build()).get()); ensureGreen(); flushAndRefresh(); GetSettingsResponse settingsResponse = diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index fe6e980ab42..a0fd40a649e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -193,8 +193,9 @@ public class SplitIndexIT extends ESIntegTestCase { .put("index.blocks.write", true)).get(); ensureGreen(); Settings.Builder firstSplitSettingsBuilder = Settings.builder() - .put("index.number_of_replicas", 0) - .put("index.number_of_shards", firstSplitShards); + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", firstSplitShards) + .putNull("index.blocks.write"); if (sourceShards == 1 && useRoutingPartition == false && randomBoolean()) { // try to set it if we have a source index with 1 shard firstSplitSettingsBuilder.put("index.number_of_routing_shards", secondSplitShards); } @@ -225,10 +226,12 @@ public class SplitIndexIT extends ESIntegTestCase { ensureGreen(); // now split source into a new index assertAcked(client().admin().indices().prepareResizeIndex("first_split", "second_split") - .setResizeType(ResizeType.SPLIT) - .setSettings(Settings.builder() - .put("index.number_of_replicas", 0) - .put("index.number_of_shards", secondSplitShards).build()).get()); + .setResizeType(ResizeType.SPLIT) + .setSettings(Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", secondSplitShards) + .putNull("index.blocks.write") + .build()).get()); ensureGreen(); assertHitCount(client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs); // let it be allocated anywhere and bump replicas @@ -340,7 +343,11 @@ public class SplitIndexIT extends ESIntegTestCase { // now split source into target final Settings splitSettings = - Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", numberOfTargetShards).build(); + Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", numberOfTargetShards) + .putNull("index.blocks.write") + .build(); assertAcked(client().admin().indices().prepareResizeIndex("source", "target") .setResizeType(ResizeType.SPLIT) .setSettings(splitSettings).get()); @@ -396,8 +403,10 @@ public class SplitIndexIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareResizeIndex("source", "target") .setResizeType(ResizeType.SPLIT) .setSettings(Settings.builder() - .put("index.number_of_replicas", createWithReplicas ? 1 : 0) - .put("index.number_of_shards", 2).build()).get()); + .put("index.number_of_replicas", createWithReplicas ? 1 : 0) + .put("index.number_of_shards", 2) + .putNull("index.blocks.write") + .build()).get()); ensureGreen(); final ClusterState state = client().admin().cluster().prepareState().get().getState(); @@ -507,10 +516,12 @@ public class SplitIndexIT extends ESIntegTestCase { // check that the index sort order of `source` is correctly applied to the `target` assertAcked(client().admin().indices().prepareResizeIndex("source", "target") - .setResizeType(ResizeType.SPLIT) - .setSettings(Settings.builder() - .put("index.number_of_replicas", 0) - .put("index.number_of_shards", 4).build()).get()); + .setResizeType(ResizeType.SPLIT) + .setSettings(Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", 4) + .putNull("index.blocks.write") + .build()).get()); ensureGreen(); flushAndRefresh(); GetSettingsResponse settingsResponse = diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java index 7a0a6816a66..12eb69bef39 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java @@ -50,19 +50,11 @@ public class ScriptDocValuesDatesTests extends ESTestCase { values[d][i] = expectedDates[d][i].getMillis(); } } - Set warnings = new HashSet<>(); - Dates dates = wrap(values, deprecationMessage -> { - warnings.add(deprecationMessage); - /* Create a temporary directory to prove we are running with the - * server's permissions. */ - createTempDir(); - }); - + Dates dates = wrap(values); for (int round = 0; round < 10; round++) { int d = between(0, values.length - 1); dates.setNextDocId(d); assertEquals(expectedDates[d].length > 0 ? expectedDates[d][0] : new DateTime(0, DateTimeZone.UTC), dates.getValue()); - assertEquals(expectedDates[d].length > 0 ? expectedDates[d][0] : new DateTime(0, DateTimeZone.UTC), dates.getDate()); assertEquals(values[d].length, dates.size()); for (int i = 0; i < values[d].length; i++) { @@ -72,33 +64,9 @@ public class ScriptDocValuesDatesTests extends ESTestCase { Exception e = expectThrows(UnsupportedOperationException.class, () -> dates.add(new DateTime())); assertEquals("doc values are unmodifiable", e.getMessage()); } - - /* - * Invoke getDates without any privileges to verify that - * it still works without any. In particularly, this - * verifies that the callback that we've configured - * above works. That callback creates a temporary - * directory which is not possible with "noPermissions". - */ - PermissionCollection noPermissions = new Permissions(); - AccessControlContext noPermissionsAcc = new AccessControlContext( - new ProtectionDomain[] { - new ProtectionDomain(null, noPermissions) - } - ); - AccessController.doPrivileged(new PrivilegedAction() { - public Void run() { - dates.getDates(); - return null; - } - }, noPermissionsAcc); - - assertThat(warnings, containsInAnyOrder( - "getDate is no longer necessary on date fields as the value is now a date.", - "getDates is no longer necessary on date fields as the values are now dates.")); } - private Dates wrap(long[][] values, Consumer deprecationHandler) { + private Dates wrap(long[][] values) { return new Dates(new AbstractSortedNumericDocValues() { long[] current; int i; @@ -117,6 +85,6 @@ public class ScriptDocValuesDatesTests extends ESTestCase { public long nextValue() { return current[i++]; } - }, deprecationHandler); + }); } } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java index 8b20e9a9f3a..5fd33da27e3 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java @@ -47,7 +47,7 @@ public class ScriptDocValuesLongsTests extends ESTestCase { values[d][i] = randomLong(); } } - Longs longs = wrap(values, deprecationMessage -> {fail("unexpected deprecation: " + deprecationMessage);}); + Longs longs = wrap(values); for (int round = 0; round < 10; round++) { int d = between(0, values.length - 1); @@ -66,69 +66,7 @@ public class ScriptDocValuesLongsTests extends ESTestCase { } } - public void testDates() throws IOException { - long[][] values = new long[between(3, 10)][]; - ReadableDateTime[][] dates = new ReadableDateTime[values.length][]; - for (int d = 0; d < values.length; d++) { - values[d] = new long[randomBoolean() ? randomBoolean() ? 0 : 1 : between(2, 100)]; - dates[d] = new ReadableDateTime[values[d].length]; - for (int i = 0; i < values[d].length; i++) { - dates[d][i] = new DateTime(randomNonNegativeLong(), DateTimeZone.UTC); - values[d][i] = dates[d][i].getMillis(); - } - } - Set warnings = new HashSet<>(); - Longs longs = wrap(values, deprecationMessage -> { - warnings.add(deprecationMessage); - /* Create a temporary directory to prove we are running with the - * server's permissions. */ - createTempDir(); - }); - - for (int round = 0; round < 10; round++) { - int d = between(0, values.length - 1); - longs.setNextDocId(d); - assertEquals(dates[d].length > 0 ? dates[d][0] : new DateTime(0, DateTimeZone.UTC), longs.getDate()); - - assertEquals(values[d].length, longs.getDates().size()); - for (int i = 0; i < values[d].length; i++) { - assertEquals(dates[d][i], longs.getDates().get(i)); - } - - Exception e = expectThrows(UnsupportedOperationException.class, () -> longs.getDates().add(new DateTime())); - assertEquals("doc values are unmodifiable", e.getMessage()); - } - - /* - * Invoke getDates without any privileges to verify that - * it still works without any. In particularly, this - * verifies that the callback that we've configured - * above works. That callback creates a temporary - * directory which is not possible with "noPermissions". - */ - PermissionCollection noPermissions = new Permissions(); - AccessControlContext noPermissionsAcc = new AccessControlContext( - new ProtectionDomain[] { - new ProtectionDomain(null, noPermissions) - } - ); - AccessController.doPrivileged(new PrivilegedAction() { - public Void run() { - try { - longs.getDates(); - } catch (IOException e) { - throw new RuntimeException("unexpected", e); - } - return null; - } - }, noPermissionsAcc); - - assertThat(warnings, containsInAnyOrder( - "getDate on numeric fields is deprecated. Use a date field to get dates.", - "getDates on numeric fields is deprecated. Use a date field to get dates.")); - } - - private Longs wrap(long[][] values, Consumer deprecationCallback) { + private Longs wrap(long[][] values) { return new Longs(new AbstractSortedNumericDocValues() { long[] current; int i; @@ -147,6 +85,6 @@ public class ScriptDocValuesLongsTests extends ESTestCase { public long nextValue() { return current[i++]; } - }, deprecationCallback); + }); } } diff --git a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 59156c91558..108b41d54a0 100644 --- a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -470,7 +470,8 @@ public class CorruptedFileIT extends ESIntegTestCase { * TODO once checksum verification on snapshotting is implemented this test needs to be fixed or split into several * parts... We should also corrupt files on the actual snapshot and check that we don't restore the corrupted shard. */ - @TestLogging("org.elasticsearch.repositories:TRACE,org.elasticsearch.snapshots:TRACE") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30577") + @TestLogging("org.elasticsearch.repositories:TRACE,org.elasticsearch.snapshots:TRACE,org.elasticsearch.index.engine:DEBUG") public void testCorruptFileThenSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException { int numDocs = scaledRandomIntBetween(100, 1000); internalCluster().ensureAtLeastNumDataNodes(2); diff --git a/server/src/test/java/org/elasticsearch/plugins/MetaPluginInfoTests.java b/server/src/test/java/org/elasticsearch/plugins/MetaPluginInfoTests.java deleted file mode 100644 index c54a13bd302..00000000000 --- a/server/src/test/java/org/elasticsearch/plugins/MetaPluginInfoTests.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugins; - -import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.Version; -import org.elasticsearch.test.ESTestCase; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.endsWith; - -@LuceneTestCase.SuppressFileSystems(value = "ExtrasFS") -public class MetaPluginInfoTests extends ESTestCase { - - public void testReadFromProperties() throws Exception { - Path pluginDir = createTempDir().resolve("fake-meta-plugin"); - PluginTestUtil.writeMetaPluginProperties(pluginDir, - "description", "fake desc", - "name", "my_meta_plugin"); - MetaPluginInfo info = MetaPluginInfo.readFromProperties(pluginDir); - assertEquals("my_meta_plugin", info.getName()); - assertEquals("fake desc", info.getDescription()); - } - - public void testReadFromPropertiesNameMissing() throws Exception { - Path pluginDir = createTempDir().resolve("fake-meta-plugin"); - PluginTestUtil.writeMetaPluginProperties(pluginDir); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> MetaPluginInfo.readFromProperties(pluginDir)); - assertThat(e.getMessage(), containsString("property [name] is missing for")); - - PluginTestUtil.writeMetaPluginProperties(pluginDir, "name", ""); - e = expectThrows(IllegalArgumentException.class, () -> MetaPluginInfo.readFromProperties(pluginDir)); - assertThat(e.getMessage(), containsString("property [name] is missing for")); - } - - public void testReadFromPropertiesDescriptionMissing() throws Exception { - Path pluginDir = createTempDir().resolve("fake-meta-plugin"); - PluginTestUtil.writeMetaPluginProperties(pluginDir, "name", "fake-meta-plugin"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> MetaPluginInfo.readFromProperties(pluginDir)); - assertThat(e.getMessage(), containsString("[description] is missing")); - } - - public void testUnknownProperties() throws Exception { - Path pluginDir = createTempDir().resolve("fake-meta-plugin"); - PluginTestUtil.writeMetaPluginProperties(pluginDir, - "extra", "property", - "unknown", "property", - "description", "fake desc", - "name", "my_meta_plugin"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> MetaPluginInfo.readFromProperties(pluginDir)); - assertThat(e.getMessage(), containsString("Unknown properties in meta plugin descriptor")); - } - - public void testExtractAllPluginsWithDuplicates() throws Exception { - Path pluginDir = createTempDir().resolve("plugins"); - // Simple plugin - Path plugin1 = pluginDir.resolve("plugin1"); - Files.createDirectories(plugin1); - PluginTestUtil.writePluginProperties(plugin1, - "description", "fake desc", - "name", "plugin1", - "version", "1.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "FakePlugin"); - - // Meta plugin - Path metaPlugin = pluginDir.resolve("meta_plugin"); - Files.createDirectory(metaPlugin); - PluginTestUtil.writeMetaPluginProperties(metaPlugin, - "description", "fake desc", - "name", "meta_plugin"); - Path plugin2 = metaPlugin.resolve("plugin1"); - Files.createDirectory(plugin2); - PluginTestUtil.writePluginProperties(plugin2, - "description", "fake desc", - "name", "plugin1", - "version", "1.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "FakePlugin"); - Path plugin3 = metaPlugin.resolve("plugin2"); - Files.createDirectory(plugin3); - PluginTestUtil.writePluginProperties(plugin3, - "description", "fake desc", - "name", "plugin2", - "version", "1.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "FakePlugin"); - - IllegalStateException exc = - expectThrows(IllegalStateException.class, () -> PluginsService.findPluginDirs(pluginDir)); - assertThat(exc.getMessage(), containsString("duplicate plugin")); - assertThat(exc.getMessage(), endsWith("plugin1")); - } -} diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 4d2eb6f2f36..ffecaca4525 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -620,34 +620,7 @@ public class PluginsServiceTests extends ESTestCase { Files.copy(jar, fake.resolve("plugin.jar")); } - final Path fakeMeta = plugins.resolve("fake-meta"); - - PluginTestUtil.writeMetaPluginProperties(fakeMeta, "description", "description", "name", "fake-meta"); - - final Path fakeMetaCore = fakeMeta.resolve("fake-meta-core"); - PluginTestUtil.writePluginProperties( - fakeMetaCore, - "description", "description", - "name", "fake-meta-core", - "version", "1.0.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "test.DummyPlugin"); - try (InputStream jar = PluginsServiceTests.class.getResourceAsStream("dummy-plugin.jar")) { - Files.copy(jar, fakeMetaCore.resolve("plugin.jar")); - } - - assertThat(PluginsService.findPluginDirs(plugins), containsInAnyOrder(fake, fakeMetaCore)); - } - - public void testMissingMandatoryPlugin() { - final Settings settings = - Settings.builder() - .put("path.home", createTempDir()) - .put("plugin.mandatory", "fake") - .build(); - final IllegalStateException e = expectThrows(IllegalStateException.class, () -> newPluginsService(settings)); - assertThat(e, hasToString(containsString("missing mandatory plugins [fake]"))); + assertThat(PluginsService.findPluginDirs(plugins), containsInAnyOrder(fake)); } public void testExistingMandatoryClasspathPlugin() { @@ -696,38 +669,4 @@ public class PluginsServiceTests extends ESTestCase { .build(); newPluginsService(settings); } - - public void testExistingMandatoryMetaPlugin() throws IOException { - // This test opens a child classloader, reading a jar under the test temp - // dir (a dummy plugin). Classloaders are closed by GC, so when test teardown - // occurs the jar is deleted while the classloader is still open. However, on - // windows, files cannot be deleted when they are still open by a process. - assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - final Path pathHome = createTempDir(); - final Path plugins = pathHome.resolve("plugins"); - final Path fakeMeta = plugins.resolve("fake-meta"); - - PluginTestUtil.writeMetaPluginProperties(fakeMeta, "description", "description", "name", "fake-meta"); - - final Path fake = fakeMeta.resolve("fake"); - PluginTestUtil.writePluginProperties( - fake, - "description", "description", - "name", "fake", - "version", "1.0.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "test.DummyPlugin"); - try (InputStream jar = PluginsServiceTests.class.getResourceAsStream("dummy-plugin.jar")) { - Files.copy(jar, fake.resolve("plugin.jar")); - } - - final Settings settings = - Settings.builder() - .put("path.home", pathHome) - .put("plugin.mandatory", "fake-meta") - .build(); - newPluginsService(settings); - } - } diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java index 2c30184ee4e..2eeb90d9ec9 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java @@ -70,10 +70,8 @@ public class RestResizeHandlerTests extends ESTestCase { assertThat(e, hasToString(containsString("parameter [copy_settings] can not be explicitly set to [false]"))); } else { handler.prepareRequest(request, mock(NodeClient.class)); - if (copySettings == null) { - assertWarnings( - "resize operations without copying settings is deprecated; " - + "set parameter [copy_settings] to [true] for future default behavior"); + if ("".equals(copySettings) || "true".equals(copySettings)) { + assertWarnings("parameter [copy_settings] is deprecated and will be removed in 8.0.0"); } } } diff --git a/server/src/test/java/org/elasticsearch/routing/PartitionedRoutingIT.java b/server/src/test/java/org/elasticsearch/routing/PartitionedRoutingIT.java index 74fbcc20a02..37ea53332af 100644 --- a/server/src/test/java/org/elasticsearch/routing/PartitionedRoutingIT.java +++ b/server/src/test/java/org/elasticsearch/routing/PartitionedRoutingIT.java @@ -108,10 +108,11 @@ public class PartitionedRoutingIT extends ESIntegTestCase { logger.info("--> shrinking index [" + previousIndex + "] to [" + index + "]"); client().admin().indices().prepareResizeIndex(previousIndex, index) - .setSettings(Settings.builder() - .put("index.number_of_shards", currentShards) - .put("index.number_of_replicas", numberOfReplicas()) - .build()).get(); + .setSettings(Settings.builder() + .put("index.number_of_shards", currentShards) + .put("index.number_of_replicas", numberOfReplicas()) + .putNull("index.routing.allocation.require._name") + .build()).get(); ensureGreen(); } } diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index 2bf691e6a36..842748107d1 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -76,7 +76,7 @@ public class ExplainableScriptIT extends ESIntegTestCase { @Override public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { assert scriptSource.equals("explainable_script"); - assert context == SearchScript.CONTEXT; + assert context == SearchScript.SCRIPT_SCORE_CONTEXT; SearchScript.Factory factory = (p, lookup) -> new SearchScript.LeafFactory() { @Override public SearchScript newInstance(LeafReaderContext context) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 8b31680370b..0396b8ac788 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -217,6 +217,9 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { // should we expose it, or maybe think about higher level integration of the // fake term frequency feature (LUCENE-7854) .put("delimitedtermfrequency", Void.class) + // LUCENE-8273: ConditionalTokenFilter allows analysis chains to skip + // particular token filters based on the attributes of the current token. + .put("termexclusion", Void.class) .immutableMap(); diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java b/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java index 5a92c99d618..ff996c800b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java @@ -27,9 +27,6 @@ import java.util.Properties; /** Utility methods for testing plugins */ public class PluginTestUtil { - public static void writeMetaPluginProperties(Path pluginDir, String... stringProps) throws IOException { - writeProperties(pluginDir.resolve(MetaPluginInfo.ES_META_PLUGIN_PROPERTIES), stringProps); - } public static void writePluginProperties(Path pluginDir, String... stringProps) throws IOException { writeProperties(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES), stringProps); diff --git a/x-pack/docs/en/index.asciidoc b/x-pack/docs/en/index.asciidoc deleted file mode 100644 index 3133053c5bd..00000000000 --- a/x-pack/docs/en/index.asciidoc +++ /dev/null @@ -1,22 +0,0 @@ - -include::{es-repo-dir}/index-shared1.asciidoc[] - -include::setup/setup-xes.asciidoc[] - -include::{es-repo-dir}/index-shared2.asciidoc[] - -include::{es-repo-dir}/index-shared3.asciidoc[] - -include::sql/index.asciidoc[] - -include::monitoring/index.asciidoc[] - -include::rollup/index.asciidoc[] - -include::rest-api/index.asciidoc[] - -include::commands/index.asciidoc[] - -include::{es-repo-dir}/index-shared4.asciidoc[] - -include::{es-repo-dir}/index-shared5.asciidoc[] diff --git a/x-pack/docs/en/ml/forecasting.asciidoc b/x-pack/docs/en/ml/forecasting.asciidoc index 95693a1677f..cd01aa0fb77 100644 --- a/x-pack/docs/en/ml/forecasting.asciidoc +++ b/x-pack/docs/en/ml/forecasting.asciidoc @@ -59,10 +59,7 @@ For more information about any of these functions, see <>. * Forecasts run concurrently with real-time {ml} analysis. That is to say, {ml} analysis does not stop while forecasts are generated. Forecasts can have an impact on {ml} jobs, however, especially in terms of memory usage. For this -reason, forecasts run only if the model memory status is acceptable and the -snapshot models for the forecast do not require more than 20 MB. If these memory -limits are reached, consider splitting the job into multiple smaller jobs and -creating forecasts for these. +reason, forecasts run only if the model memory status is acceptable. * The job must be open when you create a forecast. Otherwise, an error occurs. * If there is insufficient data to generate any meaningful predictions, an error occurs. In general, forecasts that are created early in the learning phase diff --git a/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc b/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc index 6ee9c29b44f..6cab0de510e 100644 --- a/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc +++ b/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc @@ -1,10 +1,6 @@ [role="xpack"] [[bootstrap-checks-xpack]] == Bootstrap Checks for {xpack} -++++ -Bootstrap Checks -++++ - In addition to the <>, there are checks that are specific to {xpack} features. diff --git a/x-pack/docs/en/setup/setup-xes.asciidoc b/x-pack/docs/en/setup/setup-xes.asciidoc index c14c7c585de..b0003b1e0b7 100644 --- a/x-pack/docs/en/setup/setup-xes.asciidoc +++ b/x-pack/docs/en/setup/setup-xes.asciidoc @@ -1,9 +1,7 @@ [role="xpack"] [[setup-xpack]] -= Set up {xpack} +== Set up {xpack} -[partintro] --- {xpack} is an Elastic Stack extension that provides security, alerting, monitoring, reporting, machine learning, and many other capabilities. By default, when you install {es}, {xpack} is installed. @@ -20,10 +18,3 @@ https://www.elastic.co/subscriptions. * <> * <> --- - -include::{xes-repo-dir}/monitoring/configuring-monitoring.asciidoc[] -include::{xes-repo-dir}/security/configuring-es.asciidoc[] -include::setup-xclient.asciidoc[] -include::{xes-repo-dir}/settings/configuring-xes.asciidoc[] -include::bootstrap-checks-xes.asciidoc[] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index c5402152fea..bdefabdb294 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -286,7 +286,8 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu DataCountsReporter.ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING, DataCountsReporter.ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING, AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE, - AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE)); + AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE, + AutodetectProcessManager.MIN_DISK_SPACE_OFF_HEAP)); } public Settings additionalSettings() { @@ -403,6 +404,9 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu // This object's constructor attaches to the license state, so there's no need to retain another reference to it new InvalidLicenseEnforcer(settings, getLicenseState(), threadPool, datafeedManager, autodetectProcessManager); + // run node startup tasks + autodetectProcessManager.onNodeStartup(); + return Arrays.asList( mlLifeCycleService, jobProvider, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java index 3b09377b477..aaa59e7e8ca 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java @@ -15,6 +15,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -28,6 +30,7 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManage import org.elasticsearch.xpack.ml.job.process.autodetect.params.ForecastParams; import java.io.IOException; +import java.nio.file.Path; import java.util.List; import java.util.function.Consumer; @@ -36,6 +39,8 @@ import static org.elasticsearch.xpack.core.ml.action.ForecastJobAction.Request.D public class TransportForecastJobAction extends TransportJobTaskAction { + private static final ByteSizeValue FORECAST_LOCAL_STORAGE_LIMIT = new ByteSizeValue(500, ByteSizeUnit.MB); + private final JobProvider jobProvider; @Inject public TransportForecastJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, @@ -73,6 +78,13 @@ public class TransportForecastJobAction extends TransportJobTaskAction { if (e == null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProvider.java new file mode 100644 index 00000000000..8a0268a8d07 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProvider.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.job.process; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.env.Environment; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +/** + * Provide storage for native components. + */ +public class NativeStorageProvider { + + private static final Logger LOGGER = Loggers.getLogger(NativeStorageProvider.class); + + + private static final String LOCAL_STORAGE_SUBFOLDER = "ml-local-data"; + private static final String LOCAL_STORAGE_TMP_FOLDER = "tmp"; + + private final Environment environment; + + // do not allow any usage below this threshold + private final ByteSizeValue minLocalStorageAvailable; + + public NativeStorageProvider(Environment environment, ByteSizeValue minDiskSpaceOffHeap) { + this.environment = environment; + this.minLocalStorageAvailable = minDiskSpaceOffHeap; + } + + /** + * Removes any temporary storage leftovers. + * + * Removes all temp files and folder which might be there as a result of an + * unclean node shutdown or broken clients. + * + * Do not call while there are running jobs. + * + * @throws IOException if cleanup fails + */ + public void cleanupLocalTmpStorageInCaseOfUncleanShutdown() throws IOException { + for (Path p : environment.dataFiles()) { + IOUtils.rm(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER)); + } + } + + /** + * Tries to find local storage for storing temporary data. + * + * @param uniqueIdentifier An identifier to be used as sub folder + * @param requestedSize The maximum size required + * @return Path for temporary storage if available, null otherwise + */ + public Path tryGetLocalTmpStorage(String uniqueIdentifier, ByteSizeValue requestedSize) { + for (Path path : environment.dataFiles()) { + try { + if (getUsableSpace(path) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes()) { + Path tmpDirectory = path.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER).resolve(uniqueIdentifier); + Files.createDirectories(tmpDirectory); + return tmpDirectory; + } + } catch (IOException e) { + LOGGER.debug("Failed to obtain information about path [{}]: {}", path, e); + } + + } + LOGGER.debug("Failed to find native storage for [{}], returning null", uniqueIdentifier); + return null; + } + + public boolean localTmpStorageHasEnoughSpace(Path path, ByteSizeValue requestedSize) { + Path realPath = path.toAbsolutePath(); + for (Path p : environment.dataFiles()) { + try { + if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { + return getUsableSpace(p) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes(); + } + } catch (IOException e) { + LOGGER.debug("Failed to optain information about path [{}]: {}", path, e); + } + } + + LOGGER.debug("Not enough space left for path [{}]", path); + return false; + } + + /** + * Delete temporary storage, previously allocated + * + * @param path + * Path to temporary storage + * @throws IOException + * if path can not be cleaned up + */ + public void cleanupLocalTmpStorage(Path path) throws IOException { + // do not allow to breakout from the tmp storage provided + Path realPath = path.toAbsolutePath(); + for (Path p : environment.dataFiles()) { + if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { + IOUtils.rm(path); + } + } + } + + long getUsableSpace(Path path) throws IOException { + long freeSpaceInBytes = Environment.getFileStore(path).getUsableSpace(); + + /* See: https://bugs.openjdk.java.net/browse/JDK-8162520 */ + if (freeSpaceInBytes < 0) { + freeSpaceInBytes = Long.MAX_VALUE; + } + return freeSpaceInBytes; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index cca591682d8..d3a848ef382 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.job.process.autodetect; import org.elasticsearch.common.xcontent.XContentElasticsearchExtension; import org.elasticsearch.core.internal.io.IOUtils; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; @@ -15,11 +16,12 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.AnalysisRegistry; @@ -47,6 +49,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersiste import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; +import org.elasticsearch.xpack.ml.job.process.NativeStorageProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.output.AutoDetectResultProcessor; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; @@ -59,6 +62,7 @@ import org.elasticsearch.xpack.ml.notifications.Auditor; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Path; import java.time.Duration; import java.time.ZonedDateTime; import java.util.Date; @@ -96,6 +100,10 @@ public class AutodetectProcessManager extends AbstractComponent { public static final Setting MAX_OPEN_JOBS_PER_NODE = Setting.intSetting("xpack.ml.max_open_jobs", MAX_RUNNING_JOBS_PER_NODE, 1, Property.NodeScope); + // Undocumented setting for integration test purposes + public static final Setting MIN_DISK_SPACE_OFF_HEAP = + Setting.byteSizeSetting("xpack.ml.min_disk_space_off_heap", new ByteSizeValue(5, ByteSizeUnit.GB), Property.NodeScope); + private final Client client; private final Environment environment; private final ThreadPool threadPool; @@ -107,8 +115,12 @@ public class AutodetectProcessManager extends AbstractComponent { private final JobResultsPersister jobResultsPersister; private final JobDataCountsPersister jobDataCountsPersister; + private NativeStorageProvider nativeStorageProvider; private final ConcurrentMap processByAllocation = new ConcurrentHashMap<>(); + // a map that manages the allocation of temporary space to jobs + private final ConcurrentMap nativeTmpStorage = new ConcurrentHashMap<>(); + private final int maxAllowedRunningJobs; private final NamedXContentRegistry xContentRegistry; @@ -133,6 +145,15 @@ public class AutodetectProcessManager extends AbstractComponent { this.jobResultsPersister = jobResultsPersister; this.jobDataCountsPersister = jobDataCountsPersister; this.auditor = auditor; + this.nativeStorageProvider = new NativeStorageProvider(environment, MIN_DISK_SPACE_OFF_HEAP.get(settings)); + } + + public void onNodeStartup() { + try { + nativeStorageProvider.cleanupLocalTmpStorageInCaseOfUncleanShutdown(); + } catch (Exception e) { + logger.warn("Failed to cleanup native storage from previous invocation", e); + } } public synchronized void closeAllJobsOnThisNode(String reason) throws IOException { @@ -251,6 +272,28 @@ public class AutodetectProcessManager extends AbstractComponent { }); } + /** + * Request temporary storage to be used for the job + * + * @param jobTask The job task + * @param requestedSize requested size + * @return a Path to local storage or null if storage is not available + */ + public Path tryGetTmpStorage(JobTask jobTask, ByteSizeValue requestedSize) { + String jobId = jobTask.getJobId(); + Path path = nativeTmpStorage.get(jobId); + if (path == null) { + path = nativeStorageProvider.tryGetLocalTmpStorage(jobId, requestedSize); + if (path != null) { + nativeTmpStorage.put(jobId, path); + } + } else if (!nativeStorageProvider.localTmpStorageHasEnoughSpace(path, requestedSize)) { + // the previous tmp location ran out of disk space, do not allow further usage + return null; + } + return path; + } + /** * Do a forecast for the running job. * @@ -258,10 +301,11 @@ public class AutodetectProcessManager extends AbstractComponent { * @param params Forecast parameters */ public void forecastJob(JobTask jobTask, ForecastParams params, Consumer handler) { - logger.debug("Forecasting job {}", jobTask.getJobId()); + String jobId = jobTask.getJobId(); + logger.debug("Forecasting job {}", jobId); AutodetectCommunicator communicator = getOpenAutodetectCommunicator(jobTask); if (communicator == null) { - String message = String.format(Locale.ROOT, "Cannot forecast because job [%s] is not open", jobTask.getJobId()); + String message = String.format(Locale.ROOT, "Cannot forecast because job [%s] is not open", jobId); logger.debug(message); handler.accept(ExceptionsHelper.conflictStatusException(message)); return; @@ -271,7 +315,7 @@ public class AutodetectProcessManager extends AbstractComponent { if (e == null) { handler.accept(null); } else { - String msg = String.format(Locale.ROOT, "[%s] exception while forecasting job", jobTask.getJobId()); + String msg = String.format(Locale.ROOT, "[%s] exception while forecasting job", jobId); logger.error(msg, e); handler.accept(ExceptionsHelper.serverError(msg, e)); } @@ -477,6 +521,11 @@ public class AutodetectProcessManager extends AbstractComponent { } } setJobState(jobTask, JobState.FAILED); + try { + removeTmpStorage(jobTask.getJobId()); + } catch (IOException e) { + logger.error(new ParameterizedMessage("[{}] Failed to delete temporary files", jobTask.getJobId()), e); + } }; } @@ -535,6 +584,12 @@ public class AutodetectProcessManager extends AbstractComponent { // thread that gets into this method blocks until the first thread has finished closing the job processContext.unlock(); } + // delete any tmp storage + try { + removeTmpStorage(jobId); + } catch (IOException e) { + logger.error(new ParameterizedMessage("[{}]Failed to delete temporary files", jobId), e); + } } int numberOfOpenJobs() { @@ -613,6 +668,13 @@ public class AutodetectProcessManager extends AbstractComponent { return Optional.of(new Tuple<>(communicator.getDataCounts(), communicator.getModelSizeStats())); } + private void removeTmpStorage(String jobId) throws IOException { + Path path = nativeTmpStorage.get(jobId); + if (path != null) { + nativeStorageProvider.cleanupLocalTmpStorage(path); + } + } + ExecutorService createAutodetectExecutorService(ExecutorService executorService) { AutodetectWorkerExecutorService autoDetectWorkerExecutor = new AutodetectWorkerExecutorService(threadPool.getThreadContext()); executorService.submit(autoDetectWorkerExecutor::start); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParams.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParams.java index 0afd3b8a473..f243195c3a7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParams.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParams.java @@ -16,12 +16,14 @@ public class ForecastParams { private final long createTime; private final long duration; private final long expiresIn; + private final String tmpStorage; - private ForecastParams(String forecastId, long createTime, long duration, long expiresIn) { + private ForecastParams(String forecastId, long createTime, long duration, long expiresIn, String tmpStorage) { this.forecastId = forecastId; this.createTime = createTime; this.duration = duration; this.expiresIn = expiresIn; + this.tmpStorage = tmpStorage; } public String getForecastId() { @@ -52,9 +54,18 @@ public class ForecastParams { return expiresIn; } + /** + * Temporary storage forecast is allowed to use for persisting models. + * + * @return path to tmp storage + */ + public String getTmpStorage() { + return tmpStorage; + } + @Override public int hashCode() { - return Objects.hash(forecastId, createTime, duration, expiresIn); + return Objects.hash(forecastId, createTime, duration, expiresIn, tmpStorage); } @Override @@ -69,7 +80,8 @@ public class ForecastParams { return Objects.equals(forecastId, other.forecastId) && Objects.equals(createTime, other.createTime) && Objects.equals(duration, other.duration) - && Objects.equals(expiresIn, other.expiresIn); + && Objects.equals(expiresIn, other.expiresIn) + && Objects.equals(tmpStorage, other.tmpStorage); } public static Builder builder() { @@ -81,6 +93,7 @@ public class ForecastParams { private final long createTimeEpochSecs; private long durationSecs; private long expiresInSecs; + private String tmpStorage; private Builder() { forecastId = UUIDs.base64UUID(); @@ -101,8 +114,13 @@ public class ForecastParams { return this; } + public Builder tmpStorage(String tmpStorage) { + this.tmpStorage = tmpStorage; + return this; + } + public ForecastParams build() { - return new ForecastParams(forecastId, createTimeEpochSecs, durationSecs, expiresInSecs); + return new ForecastParams(forecastId, createTimeEpochSecs, durationSecs, expiresInSecs, tmpStorage); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/ControlMsgToProcessWriter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/ControlMsgToProcessWriter.java index 2a91797d28d..2c026ec1550 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/ControlMsgToProcessWriter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/ControlMsgToProcessWriter.java @@ -164,6 +164,9 @@ public class ControlMsgToProcessWriter { if (params.getExpiresIn() != -1) { builder.field("expires_in", params.getExpiresIn()); } + if (params.getTmpStorage() != null) { + builder.field("tmp_storage", params.getTmpStorage()); + } builder.endObject(); writeMessage(FORECAST_MESSAGE_CODE + Strings.toString(builder)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProviderTests.java new file mode 100644 index 00000000000..3103e76c82b --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProviderTests.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.job.process; + +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESTestCase; +import org.junit.Assert; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.any; + +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.doAnswer; + +public class NativeStorageProviderTests extends ESTestCase { + + public void testTmpStorage() throws IOException { + Map storage = new HashMap<>(); + Path tmpDir = createTempDir(); + + storage.put(tmpDir, new ByteSizeValue(6, ByteSizeUnit.GB).getBytes()); + NativeStorageProvider storageProvider = createNativeStorageProvider(storage); + + Assert.assertNotNull( + storageProvider.tryGetLocalTmpStorage(randomAlphaOfLengthBetween(4, 10), new ByteSizeValue(100, ByteSizeUnit.BYTES))); + Assert.assertNull(storageProvider.tryGetLocalTmpStorage(randomAlphaOfLengthBetween(4, 10), + new ByteSizeValue(1024 * 1024 * 1024 + 1, ByteSizeUnit.BYTES))); + + String id = randomAlphaOfLengthBetween(4, 10); + Path path = storageProvider.tryGetLocalTmpStorage(id, new ByteSizeValue(1, ByteSizeUnit.GB)); + Assert.assertNotNull(path); + + Assert.assertEquals(tmpDir.resolve("ml-local-data").resolve("tmp").resolve(id).toString(), path.toString()); + } + + public void testTmpStorageChooseDisk() throws IOException { + Map storage = new HashMap<>(); + Path tmpDir = createTempDir(); + + // low disk space + Path disk1 = tmpDir.resolve(randomAlphaOfLengthBetween(4, 10)); + storage.put(disk1, new ByteSizeValue(1, ByteSizeUnit.GB).getBytes()); + + // sufficient disk space + Path disk2 = tmpDir.resolve(randomAlphaOfLengthBetween(4, 10)); + storage.put(disk2, new ByteSizeValue(20, ByteSizeUnit.GB).getBytes()); + + NativeStorageProvider storageProvider = createNativeStorageProvider(storage); + + String id = randomAlphaOfLengthBetween(4, 10); + Path path = storageProvider.tryGetLocalTmpStorage(id, new ByteSizeValue(1, ByteSizeUnit.GB)); + Assert.assertNotNull(path); + + // should resolve to disk2 as disk1 is low on space + Assert.assertEquals(disk2.resolve("ml-local-data").resolve("tmp").resolve(id).toString(), path.toString()); + } + + public void testTmpStorageCleanup() throws IOException { + Map storage = new HashMap<>(); + Path tmpDir = createTempDir(); + storage.put(tmpDir, new ByteSizeValue(6, ByteSizeUnit.GB).getBytes()); + NativeStorageProvider storageProvider = createNativeStorageProvider(storage); + String id = randomAlphaOfLengthBetween(4, 10); + + Path path = storageProvider.tryGetLocalTmpStorage(id, new ByteSizeValue(1, ByteSizeUnit.KB)); + + Assert.assertTrue(Files.exists(path)); + Path testFile = PathUtils.get(path.toString(), "testFile"); + BufferedWriter writer = Files.newBufferedWriter(testFile, StandardCharsets.UTF_8); + writer.write("created by NativeStorageProviderTests::testTmpStorageDelete"); + + writer.close(); + Assert.assertTrue(Files.exists(testFile)); + Assert.assertTrue(Files.isRegularFile(testFile)); + + // the native component should cleanup itself, but assume it has crashed + storageProvider.cleanupLocalTmpStorage(path); + Assert.assertFalse(Files.exists(testFile)); + Assert.assertFalse(Files.exists(path)); + } + + public void testTmpStorageCleanupOnStart() throws IOException { + Map storage = new HashMap<>(); + Path tmpDir = createTempDir(); + storage.put(tmpDir, new ByteSizeValue(6, ByteSizeUnit.GB).getBytes()); + NativeStorageProvider storageProvider = createNativeStorageProvider(storage); + String id = randomAlphaOfLengthBetween(4, 10); + + Path path = storageProvider.tryGetLocalTmpStorage(id, new ByteSizeValue(1, ByteSizeUnit.KB)); + + Assert.assertTrue(Files.exists(path)); + Path testFile = PathUtils.get(path.toString(), "testFile"); + + BufferedWriter writer = Files.newBufferedWriter(testFile, StandardCharsets.UTF_8); + writer.write("created by NativeStorageProviderTests::testTmpStorageWipe"); + + writer.close(); + Assert.assertTrue(Files.exists(testFile)); + Assert.assertTrue(Files.isRegularFile(testFile)); + + // create a new storage provider to test the case of a crashed node + storageProvider = createNativeStorageProvider(storage); + storageProvider.cleanupLocalTmpStorageInCaseOfUncleanShutdown(); + Assert.assertFalse(Files.exists(testFile)); + Assert.assertFalse(Files.exists(path)); + } + + private NativeStorageProvider createNativeStorageProvider(Map paths) throws IOException { + Environment environment = mock(Environment.class); + + when(environment.dataFiles()).thenReturn(paths.keySet().toArray(new Path[paths.size()])); + NativeStorageProvider storageProvider = spy(new NativeStorageProvider(environment, new ByteSizeValue(5, ByteSizeUnit.GB))); + + doAnswer(invocation -> { + return paths.getOrDefault(invocation.getArguments()[0], Long.valueOf(0)).longValue(); + } + + ).when(storageProvider).getUsableSpace(any(Path.class)); + + return storageProvider; + } + +} diff --git a/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..50392f59374 --- /dev/null +++ b/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +bf8f9e8284a54af18545574cb4a530da0deb968a \ No newline at end of file diff --git a/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 b/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c25718d0a9e..00000000000 --- a/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5167fb0a14434cb10ec3224e9e32ca668e9f9ad4 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 00000000000..50392f59374 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +bf8f9e8284a54af18545574cb4a530da0deb968a \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c25718d0a9e..00000000000 --- a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5167fb0a14434cb10ec3224e9e32ca668e9f9ad4 \ No newline at end of file diff --git a/x-pack/qa/ml-native-tests/build.gradle b/x-pack/qa/ml-native-tests/build.gradle index 01079ed967e..ab2ac3a3b89 100644 --- a/x-pack/qa/ml-native-tests/build.gradle +++ b/x-pack/qa/ml-native-tests/build.gradle @@ -62,6 +62,7 @@ integTestCluster { setting 'xpack.security.transport.ssl.verification_mode', 'certificate' setting 'xpack.security.audit.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.ml.min_disk_space_off_heap', '200mb' keystoreSetting 'bootstrap.password', 'x-pack-test-password' keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java index f54f1bf54e9..4e0aa9c7e06 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.lessThan; public class AutodetectMemoryLimitIT extends MlNativeAutodetectIntegTestCase { @After - public void cleanUpTest() throws Exception { + public void cleanUpTest() { cleanUp(); } @@ -75,19 +75,10 @@ public class AutodetectMemoryLimitIT extends MlNativeAutodetectIntegTestCase { closeJob(job.getId()); // Assert we haven't violated the limit too much - // and a balance of partitions/by fields were created GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0); ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(35000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(30000000L)); - - // it is important to check that while we rejected partitions, we still managed - // to create some by fields; it shows we utilize memory in a meaningful way - // rather than creating empty partitions - assertThat(modelSizeStats.getTotalPartitionFieldCount(), lessThan(900L)); - assertThat(modelSizeStats.getTotalPartitionFieldCount(), greaterThan(650L)); - assertThat(modelSizeStats.getTotalByFieldCount(), lessThan(900L)); - assertThat(modelSizeStats.getTotalByFieldCount(), greaterThan(650L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @@ -133,8 +124,6 @@ public class AutodetectMemoryLimitIT extends MlNativeAutodetectIntegTestCase { ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(36000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(30000000L)); - assertThat(modelSizeStats.getTotalByFieldCount(), lessThan(1900L)); - assertThat(modelSizeStats.getTotalByFieldCount(), greaterThan(1500L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @@ -184,9 +173,6 @@ public class AutodetectMemoryLimitIT extends MlNativeAutodetectIntegTestCase { ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(36000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(24000000L)); - assertThat(modelSizeStats.getTotalByFieldCount(), equalTo(7L)); - assertThat(modelSizeStats.getTotalOverFieldCount(), greaterThan(40000L)); - assertThat(modelSizeStats.getTotalOverFieldCount(), lessThan(50000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @@ -237,7 +223,6 @@ public class AutodetectMemoryLimitIT extends MlNativeAutodetectIntegTestCase { ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(90000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(75000000L)); - assertThat(modelSizeStats.getTotalOverFieldCount(), greaterThan(140000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.OK)); } diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java index 14bdd533c6b..81c54353a2d 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; @@ -206,8 +207,7 @@ public class ForecastIT extends MlNativeAutodetectIntegTestCase { assertThat(e.getMessage(), equalTo("Cannot run forecast: Forecast cannot be executed as model memory status is not OK")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/30399") - public void testMemoryLimit() throws Exception { + public void testOverflowToDisk() throws Exception { Detector.Builder detector = new Detector.Builder("mean", "value"); detector.setByFieldName("clientIP"); @@ -216,7 +216,9 @@ public class ForecastIT extends MlNativeAutodetectIntegTestCase { analysisConfig.setBucketSpan(bucketSpan); DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeFormat("epoch"); - Job.Builder job = new Job.Builder("forecast-it-test-memory-limit"); + Job.Builder job = new Job.Builder("forecast-it-test-overflow-to-disk"); + AnalysisLimits limits = new AnalysisLimits(1200L, null); + job.setAnalysisLimits(limits); job.setAnalysisConfig(analysisConfig); job.setDataDescription(dataDescription); @@ -224,28 +226,47 @@ public class ForecastIT extends MlNativeAutodetectIntegTestCase { putJob(job); openJob(job.getId()); createDataWithLotsOfClientIps(bucketSpan, job); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> forecast(job.getId(), TimeValue.timeValueMinutes(120), null)); - assertThat(e.getMessage(), - equalTo("Cannot run forecast: Forecast cannot be executed as forecast memory usage is predicted to exceed 20MB")); + + try { + String forecastId = forecast(job.getId(), TimeValue.timeValueHours(1), null); + + waitForecastToFinish(job.getId(), forecastId); + } catch (ElasticsearchStatusException e) { + if (e.getMessage().contains("disk space")) { + throw new ElasticsearchStatusException( + "Test likely fails due to insufficient disk space on test machine, please free up space.", e.status(), e); + } + throw e; + } + + closeJob(job.getId()); + + List forecastStats = getForecastStats(); + assertThat(forecastStats.size(), equalTo(1)); + ForecastRequestStats forecastRequestStats = forecastStats.get(0); + List forecasts = getForecasts(job.getId(), forecastRequestStats); + + assertThat(forecastRequestStats.getRecordCount(), equalTo(8000L)); + assertThat(forecasts.size(), equalTo(8000)); } private void createDataWithLotsOfClientIps(TimeValue bucketSpan, Job.Builder job) throws IOException { long now = Instant.now().getEpochSecond(); - long timestamp = now - 50 * bucketSpan.seconds(); - while (timestamp < now) { - for (int i = 1; i < 256; i++) { + long timestamp = now - 15 * bucketSpan.seconds(); + + for (int h = 0; h < 15; h++) { + for (int i = 1; i < 101; i++) { List data = new ArrayList<>(); - for (int j = 1; j < 100; j++) { + for (int j = 1; j < 81; j++) { Map record = new HashMap<>(); record.put("time", timestamp); - record.put("value", 10.0); + record.put("value", 10.0 + h); record.put("clientIP", String.format(Locale.ROOT, "192.168.%d.%d", i, j)); data.add(createJsonRecord(record)); } postData(job.getId(), data.stream().collect(Collectors.joining())); - timestamp += bucketSpan.seconds(); } + timestamp += bucketSpan.seconds(); } flushJob(job.getId(), false); } diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java index eb0c125a13c..81a44cd1336 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java @@ -30,7 +30,9 @@ import java.util.stream.Collectors; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ModelPlotsIT extends MlNativeAutodetectIntegTestCase { @@ -83,7 +85,11 @@ public class ModelPlotsIT extends MlNativeAutodetectIntegTestCase { startDatafeed(datafeedId, 0, System.currentTimeMillis()); waitUntilJobIsClosed(job.getId()); - assertThat(getBuckets(job.getId()).size(), equalTo(23)); + // As the initial time is random, there's a chance the first record is + // aligned on a bucket start. Thus we check the buckets are in [23, 24] + assertThat(getBuckets(job.getId()).size(), greaterThanOrEqualTo(23)); + assertThat(getBuckets(job.getId()).size(), lessThanOrEqualTo(24)); + Set modelPlotTerms = modelPlotTerms(job.getId(), "partition_field_value"); assertThat(modelPlotTerms, containsInAnyOrder("user_1", "user_2", "user_3")); } @@ -101,7 +107,11 @@ public class ModelPlotsIT extends MlNativeAutodetectIntegTestCase { startDatafeed(datafeedId, 0, System.currentTimeMillis()); waitUntilJobIsClosed(job.getId()); - assertThat(getBuckets(job.getId()).size(), equalTo(23)); + // As the initial time is random, there's a chance the first record is + // aligned on a bucket start. Thus we check the buckets are in [23, 24] + assertThat(getBuckets(job.getId()).size(), greaterThanOrEqualTo(23)); + assertThat(getBuckets(job.getId()).size(), lessThanOrEqualTo(24)); + Set modelPlotTerms = modelPlotTerms(job.getId(), "partition_field_value"); assertThat(modelPlotTerms, containsInAnyOrder("user_2", "user_3")); } diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index e65e9fd6176..ad7051ce425 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -1,6 +1,5 @@ import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.MavenFilteringHack -import org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.elasticsearch.gradle.test.NodeInfo diff --git a/x-pack/qa/vagrant/build.gradle b/x-pack/qa/vagrant/build.gradle index c69214578fd..411b8d90c6d 100644 --- a/x-pack/qa/vagrant/build.gradle +++ b/x-pack/qa/vagrant/build.gradle @@ -1,8 +1,3 @@ -import org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin -import org.elasticsearch.gradle.plugin.MetaPluginPropertiesExtension -import org.elasticsearch.gradle.plugin.PluginBuildPlugin -import org.elasticsearch.gradle.plugin.PluginPropertiesExtension - apply plugin: 'elasticsearch.vagrantsupport' apply plugin: 'elasticsearch.vagrant'