Merge branch 'master' into index-lifecycle

This commit is contained in:
Colin Goodheart-Smithe 2018-05-21 12:05:24 +01:00
commit 46a3aba798
No known key found for this signature in database
GPG Key ID: F975E7BDD739B3C7
145 changed files with 795 additions and 2095 deletions

View File

@ -25,6 +25,46 @@ run it using Gradle:
./gradlew run
-------------------------------------
==== Launching and debugging from an IDE
If you want to run Elasticsearch from your IDE, the `./gradlew run` task
supports a remote debugging option:
---------------------------------------------------------------------------
./gradlew run --debug-jvm
---------------------------------------------------------------------------
==== Distribution
By default a node is started with the zip distribution.
In order to start with a different distribution use the `-Drun.distribution` argument.
To for example start the open source distribution:
-------------------------------------
./gradlew run -Drun.distribution=oss-zip
-------------------------------------
==== License type
By default a node is started with the `basic` license type.
In order to start with a different license type use the `-Drun.license_type` argument.
In order to start a node with a trial license execute the following command:
-------------------------------------
./gradlew run -Drun.license_type=trial
-------------------------------------
This enables security and other paid features and adds a superuser with the username: `elastic-admin` and
password: `elastic-password`.
==== Other useful arguments
In order to start a node with a different max heap space add: `-Dtests.heap.size=4G`
In order to disable annotations add: `-Dtests.asserts=false`
In order to set an Elasticsearch setting, provide a setting with the following prefix: `-Dtests.es.`
=== Test case filtering.
- `tests.class` is a class-filtering shell-like glob pattern,
@ -572,15 +612,6 @@ as its build system. Since the switch to Gradle though, this is no longer possib
the code currently used to build Elasticsearch does not allow JaCoCo to recognize its tests.
For more information on this, see the discussion in https://github.com/elastic/elasticsearch/issues/28867[issue #28867].
== Launching and debugging from an IDE
If you want to run Elasticsearch from your IDE, the `./gradlew run` task
supports a remote debugging option:
---------------------------------------------------------------------------
./gradlew run --debug-jvm
---------------------------------------------------------------------------
== Debugging remotely from an IDE
If you want to run Elasticsearch and be able to remotely attach the process

View File

@ -1,106 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.plugin
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.test.RestTestPlugin
import org.elasticsearch.gradle.test.RunTask
import org.elasticsearch.gradle.test.StandaloneRestTestPlugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.file.FileCopyDetails
import org.gradle.api.file.RelativePath
import org.gradle.api.tasks.bundling.Zip
class MetaPluginBuildPlugin implements Plugin<Project> {
@Override
void apply(Project project) {
project.plugins.apply(StandaloneRestTestPlugin)
project.plugins.apply(RestTestPlugin)
createBundleTask(project)
boolean isModule = project.path.startsWith(':modules:') || project.path.startsWith(':x-pack:plugin')
project.integTestCluster {
dependsOn(project.bundlePlugin)
distribution = 'integ-test-zip'
}
BuildPlugin.configurePomGeneration(project)
project.afterEvaluate {
PluginBuildPlugin.addZipPomGeneration(project)
if (isModule) {
if (project.integTestCluster.distribution == 'integ-test-zip') {
project.integTestCluster.module(project)
}
} else {
project.integTestCluster.plugin(project.path)
}
}
RunTask run = project.tasks.create('run', RunTask)
run.dependsOn(project.bundlePlugin)
if (isModule == false) {
run.clusterConfig.plugin(project.path)
}
}
private static void createBundleTask(Project project) {
MetaPluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', MetaPluginPropertiesTask.class)
// create the actual bundle task, which zips up all the files for the plugin
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [buildProperties]) {
from(buildProperties.descriptorOutput.parentFile) {
// plugin properties file
include(buildProperties.descriptorOutput.name)
}
// due to how the renames work for each bundled plugin, we must exclude empty dirs or every subdir
// within bundled plugin zips will show up at the root as an empty dir
includeEmptyDirs = false
}
project.assemble.dependsOn(bundle)
// also make the zip available as a configuration (used when depending on this project)
project.configurations.create('zip')
project.artifacts.add('zip', bundle)
// a super hacky way to inject code to run at the end of each of the bundled plugin's configuration
// to add itself back to this meta plugin zip
project.afterEvaluate {
buildProperties.extension.plugins.each { String bundledPluginProjectName ->
Project bundledPluginProject = project.project(bundledPluginProjectName)
bundledPluginProject.afterEvaluate {
String bundledPluginName = bundledPluginProject.esplugin.name
bundle.configure {
dependsOn bundledPluginProject.bundlePlugin
from(project.zipTree(bundledPluginProject.bundlePlugin.outputs.files.singleFile)) {
eachFile { FileCopyDetails details ->
// we want each path to have the plugin name interjected
details.relativePath = new RelativePath(true, bundledPluginName, details.relativePath.toString())
}
}
}
}
}
}
}
}

View File

@ -1,46 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.plugin
import org.gradle.api.Project
import org.gradle.api.tasks.Input
/**
* A container for meta plugin properties that will be written to the meta plugin descriptor, for easy
* manipulation in the gradle DSL.
*/
class MetaPluginPropertiesExtension {
@Input
String name
@Input
String description
/**
* The plugins this meta plugin wraps.
* Note this is not written to the plugin descriptor, but used to setup the final zip file task.
*/
@Input
List<String> plugins
MetaPluginPropertiesExtension(Project project) {
name = project.name
}
}

View File

@ -1,68 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.plugin
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Task
import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.OutputFile
class MetaPluginPropertiesTask extends Copy {
MetaPluginPropertiesExtension extension
@OutputFile
File descriptorOutput = new File(project.buildDir, 'generated-resources/meta-plugin-descriptor.properties')
MetaPluginPropertiesTask() {
File templateFile = new File(project.buildDir, "templates/${descriptorOutput.name}")
Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') {
doLast {
InputStream resourceTemplate = PluginPropertiesTask.getResourceAsStream("/${descriptorOutput.name}")
templateFile.parentFile.mkdirs()
templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8')
}
}
dependsOn(copyPluginPropertiesTemplate)
extension = project.extensions.create('es_meta_plugin', MetaPluginPropertiesExtension, project)
project.afterEvaluate {
// check require properties are set
if (extension.name == null) {
throw new InvalidUserDataException('name is a required setting for es_meta_plugin')
}
if (extension.description == null) {
throw new InvalidUserDataException('description is a required setting for es_meta_plugin')
}
// configure property substitution
from(templateFile.parentFile).include(descriptorOutput.name)
into(descriptorOutput.parentFile)
Map<String, String> properties = generateSubstitutions()
expand(properties)
inputs.properties(properties)
}
}
Map<String, String> generateSubstitutions() {
return ['name': extension.name,
'description': extension.description
]
}
}

View File

@ -24,7 +24,7 @@ import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension
import org.gradle.api.AntBuilder
@ -842,19 +842,15 @@ class ClusterFormationTasks {
}
static void verifyProjectHasBuildPlugin(String name, Version version, Project project, Project pluginProject) {
if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false && pluginProject.plugins.hasPlugin(MetaPluginBuildPlugin) == false) {
if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false) {
throw new GradleException("Task [${name}] cannot add plugin [${pluginProject.path}] with version [${version}] to project's " +
"[${project.path}] dependencies: the plugin is not an esplugin or es_meta_plugin")
"[${project.path}] dependencies: the plugin is not an esplugin")
}
}
/** Find the plugin name in the given project, whether a regular plugin or meta plugin. */
/** Find the plugin name in the given project. */
static String findPluginName(Project pluginProject) {
PluginPropertiesExtension extension = pluginProject.extensions.findByName('esplugin')
if (extension != null) {
return extension.name
} else {
return pluginProject.extensions.findByName('es_meta_plugin').name
}
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.gradle.BuildPlugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.api.tasks.compile.JavaCompile
/**
* Configures the build to compile against Elasticsearch's test framework and
@ -49,5 +50,12 @@ public class StandaloneTestPlugin implements Plugin<Project> {
test.testClassesDir project.sourceSets.test.output.classesDir
test.mustRunAfter(project.precommit)
project.check.dependsOn(test)
project.tasks.withType(JavaCompile) {
// This will be the default in Gradle 5.0
if (options.compilerArgs.contains("-processor") == false) {
options.compilerArgs << '-proc:none'
}
}
}
}

View File

@ -1,20 +0,0 @@
#
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
implementation-class=org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin

View File

@ -1,20 +0,0 @@
# Elasticsearch meta plugin descriptor file
# This file must exist as 'meta-plugin-descriptor.properties' inside a plugin.
#
### example meta plugin for "meta-foo"
#
# meta-foo.zip <-- zip file for the meta plugin, with this structure:
# |____ <bundled_plugin_1> <-- The plugin files for bundled_plugin_1
# |____ <bundled_plugin_2> <-- The plugin files for bundled_plugin_2
# |____ meta-plugin-descriptor.properties <-- example contents below:
#
# description=My meta plugin
# name=meta-foo
#
### mandatory elements for all meta plugins:
#
# 'description': simple summary of the meta plugin
description=${description}
#
# 'name': the meta plugin name
name=${name}

View File

@ -1,5 +1,5 @@
elasticsearch = 7.0.0-alpha1
lucene = 7.4.0-snapshot-6705632810
lucene = 7.4.0-snapshot-59f2b7aec2
# optional dependencies
spatial4j = 0.7

View File

@ -642,7 +642,12 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
ResizeRequest resizeRequest = new ResizeRequest("target", "source");
resizeRequest.setResizeType(ResizeType.SHRINK);
Settings targetSettings = Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build();
Settings targetSettings =
Settings.builder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 0)
.putNull("index.routing.allocation.require._name")
.build();
resizeRequest.setTargetIndex(new CreateIndexRequest("target").settings(targetSettings).alias(new Alias("alias")));
ResizeResponse resizeResponse = highLevelClient().indices().shrink(resizeRequest);
assertTrue(resizeResponse.isAcknowledged());

View File

@ -1305,7 +1305,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
// end::shrink-index-request-waitForActiveShards
// tag::shrink-index-request-settings
request.getTargetIndexRequest().settings(Settings.builder()
.put("index.number_of_shards", 2)); // <1>
.put("index.number_of_shards", 2) // <1>
.putNull("index.routing.allocation.require._name")); // <2>
// end::shrink-index-request-settings
// tag::shrink-index-request-aliases
request.getTargetIndexRequest().alias(new Alias("target_alias")); // <1>

View File

@ -310,12 +310,14 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
task run(type: RunTask) {
distribution = System.getProperty('run.distribution', 'zip')
if (distribution == 'zip') {
String licenseType = System.getProperty("license_type", "basic")
String licenseType = System.getProperty("run.license_type", "basic")
if (licenseType == 'trial') {
setting 'xpack.ml.enabled', 'true'
setting 'xpack.graph.enabled', 'true'
setting 'xpack.watcher.enabled', 'true'
setting 'xpack.license.self_generated.type', 'trial'
setupCommand 'setupTestAdmin',
'bin/elasticsearch-users', 'useradd', 'elastic-admin', '-p', 'elastic-password', '-r', 'superuser'
} else if (licenseType != 'basic') {
throw new IllegalArgumentException("Unsupported self-generated license type: [" + licenseType + "[basic] or [trial].")
}

View File

@ -87,8 +87,8 @@ import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE;
* <li>A URL to a plugin zip</li>
* </ul>
*
* Plugins are packaged as zip files. Each packaged plugin must contain a plugin properties file
* or a meta plugin properties file. See {@link PluginInfo} and {@link MetaPluginInfo}, respectively.
* Plugins are packaged as zip files. Each packaged plugin must contain a plugin properties file.
* See {@link PluginInfo}.
* <p>
* The installation process first extracts the plugin files into a temporary
* directory in order to verify the plugin satisfies the following requirements:
@ -106,11 +106,6 @@ import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE;
* files specific to the plugin. The config files be installed into a subdirectory of the
* elasticsearch config directory, using the name of the plugin. If any files to be installed
* already exist, they will be skipped.
* <p>
* If the plugin is a meta plugin, the installation process installs each plugin separately
* inside the meta plugin directory. The {@code bin} and {@code config} directory are also moved
* inside the meta plugin directory.
* </p>
*/
class InstallPluginCommand extends EnvironmentAwareCommand {
@ -550,7 +545,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
}
// checking for existing version of the plugin
private void verifyPluginName(Path pluginPath, String pluginName, Path candidateDir) throws UserException, IOException {
private void verifyPluginName(Path pluginPath, String pluginName) throws UserException, IOException {
// don't let user install plugin conflicting with module...
// they might be unavoidably in maven central and are packaged up the same way)
if (MODULES.contains(pluginName)) {
@ -567,28 +562,10 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
pluginName);
throw new UserException(PLUGIN_EXISTS, message);
}
// checks meta plugins too
try (DirectoryStream<Path> stream = Files.newDirectoryStream(pluginPath)) {
for (Path plugin : stream) {
if (candidateDir.equals(plugin.resolve(pluginName))) {
continue;
}
if (MetaPluginInfo.isMetaPlugin(plugin) && Files.exists(plugin.resolve(pluginName))) {
final MetaPluginInfo info = MetaPluginInfo.readFromProperties(plugin);
final String message = String.format(
Locale.ROOT,
"plugin name [%s] already exists in a meta plugin; if you need to update the meta plugin, " +
"uninstall it first using command 'remove %s'",
plugin.resolve(pluginName).toAbsolutePath(),
info.getName());
throw new UserException(PLUGIN_EXISTS, message);
}
}
}
}
/** Load information about the plugin, and verify it can be installed with no errors. */
private PluginInfo loadPluginInfo(Terminal terminal, Path pluginRoot, boolean isBatch, Environment env) throws Exception {
private PluginInfo loadPluginInfo(Terminal terminal, Path pluginRoot, Environment env) throws Exception {
final PluginInfo info = PluginInfo.readFromProperties(pluginRoot);
if (info.hasNativeController()) {
throw new IllegalStateException("plugins can not have native controllers");
@ -596,7 +573,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
PluginsService.verifyCompatibility(info);
// checking for existing version of the plugin
verifyPluginName(env.pluginsFile(), info.getName(), pluginRoot);
verifyPluginName(env.pluginsFile(), info.getName());
PluginsService.checkForFailedPluginRemovals(env.pluginsFile());
@ -635,11 +612,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
List<Path> deleteOnFailure = new ArrayList<>();
deleteOnFailure.add(tmpRoot);
try {
if (MetaPluginInfo.isMetaPlugin(tmpRoot)) {
installMetaPlugin(terminal, isBatch, tmpRoot, env, deleteOnFailure);
} else {
installPlugin(terminal, isBatch, tmpRoot, env, deleteOnFailure);
}
} catch (Exception installProblem) {
try {
IOUtils.rm(deleteOnFailure.toArray(new Path[0]));
@ -650,71 +623,13 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
}
}
/**
* Installs the meta plugin and all the bundled plugins from {@code tmpRoot} into the plugins dir.
* If a bundled plugin has a bin dir and/or a config dir, those are copied.
*/
private void installMetaPlugin(Terminal terminal, boolean isBatch, Path tmpRoot,
Environment env, List<Path> deleteOnFailure) throws Exception {
final MetaPluginInfo metaInfo = MetaPluginInfo.readFromProperties(tmpRoot);
verifyPluginName(env.pluginsFile(), metaInfo.getName(), tmpRoot);
final Path destination = env.pluginsFile().resolve(metaInfo.getName());
deleteOnFailure.add(destination);
terminal.println(VERBOSE, metaInfo.toString());
final List<Path> pluginPaths = new ArrayList<>();
try (DirectoryStream<Path> paths = Files.newDirectoryStream(tmpRoot)) {
// Extract bundled plugins path and validate plugin names
for (Path plugin : paths) {
if (MetaPluginInfo.isPropertiesFile(plugin)) {
continue;
}
final PluginInfo info = PluginInfo.readFromProperties(plugin);
PluginsService.verifyCompatibility(info);
verifyPluginName(env.pluginsFile(), info.getName(), plugin);
pluginPaths.add(plugin);
}
}
// read optional security policy from each bundled plugin, and confirm all exceptions one time with user
Set<String> permissions = new HashSet<>();
final List<PluginInfo> pluginInfos = new ArrayList<>();
for (Path plugin : pluginPaths) {
final PluginInfo info = loadPluginInfo(terminal, plugin, isBatch, env);
pluginInfos.add(info);
Path policy = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
if (Files.exists(policy)) {
permissions.addAll(PluginSecurity.parsePermissions(policy, env.tmpFile()));
}
}
PluginSecurity.confirmPolicyExceptions(terminal, permissions, isBatch);
// move support files and rename as needed to prepare the exploded plugin for its final location
for (int i = 0; i < pluginPaths.size(); ++i) {
Path pluginPath = pluginPaths.get(i);
PluginInfo info = pluginInfos.get(i);
installPluginSupportFiles(info, pluginPath, env.binFile().resolve(metaInfo.getName()),
env.configFile().resolve(metaInfo.getName()), deleteOnFailure);
// ensure the plugin dir within the tmpRoot has the correct name
if (pluginPath.getFileName().toString().equals(info.getName()) == false) {
Files.move(pluginPath, pluginPath.getParent().resolve(info.getName()), StandardCopyOption.ATOMIC_MOVE);
}
}
movePlugin(tmpRoot, destination);
String[] plugins = pluginInfos.stream().map(PluginInfo::getName).toArray(String[]::new);
terminal.println("-> Installed " + metaInfo.getName() + " with: " + Strings.arrayToCommaDelimitedString(plugins));
}
/**
* Installs the plugin from {@code tmpRoot} into the plugins dir.
* If the plugin has a bin dir and/or a config dir, those are moved.
*/
private void installPlugin(Terminal terminal, boolean isBatch, Path tmpRoot,
Environment env, List<Path> deleteOnFailure) throws Exception {
final PluginInfo info = loadPluginInfo(terminal, tmpRoot, isBatch, env);
final PluginInfo info = loadPluginInfo(terminal, tmpRoot, env);
// read optional security policy (extra permissions), if it exists, confirm or warn the user
Path policy = tmpRoot.resolve(PluginInfo.ES_PLUGIN_POLICY);
final Set<String> permissions;

View File

@ -61,27 +61,9 @@ class ListPluginsCommand extends EnvironmentAwareCommand {
}
Collections.sort(plugins);
for (final Path plugin : plugins) {
if (MetaPluginInfo.isMetaPlugin(plugin)) {
MetaPluginInfo metaInfo = MetaPluginInfo.readFromProperties(plugin);
List<Path> subPluginPaths = new ArrayList<>();
try (DirectoryStream<Path> subPaths = Files.newDirectoryStream(plugin)) {
for (Path subPlugin : subPaths) {
if (MetaPluginInfo.isPropertiesFile(subPlugin)) {
continue;
}
subPluginPaths.add(subPlugin);
}
}
Collections.sort(subPluginPaths);
terminal.println(Terminal.Verbosity.SILENT, metaInfo.getName());
for (Path subPlugin : subPluginPaths) {
printPlugin(env, terminal, subPlugin, "\t");
}
} else {
printPlugin(env, terminal, plugin, "");
}
}
}
private void printPlugin(Environment env, Terminal terminal, Path plugin, String prefix) throws IOException {
terminal.println(Terminal.Verbosity.SILENT, prefix + plugin.getFileName().toString());

View File

@ -219,18 +219,6 @@ public class InstallPluginCommandTests extends ESTestCase {
return createPlugin(name, structure, additionalProps).toUri().toURL().toString();
}
/** creates an meta plugin .zip and returns the url for testing */
static String createMetaPluginUrl(String name, Path structure) throws IOException {
return createMetaPlugin(name, structure).toUri().toURL().toString();
}
static void writeMetaPlugin(String name, Path structure) throws IOException {
PluginTestUtil.writeMetaPluginProperties(structure,
"description", "fake desc",
"name", name
);
}
static void writePlugin(String name, Path structure, String... additionalProps) throws IOException {
String[] properties = Stream.concat(Stream.of(
"description", "fake desc",
@ -261,11 +249,6 @@ public class InstallPluginCommandTests extends ESTestCase {
return writeZip(structure, null);
}
static Path createMetaPlugin(String name, Path structure) throws IOException {
writeMetaPlugin(name, structure);
return writeZip(structure, null);
}
void installPlugin(String pluginUrl, Path home) throws Exception {
installPlugin(pluginUrl, home, skipJarHellCommand);
}
@ -275,11 +258,6 @@ public class InstallPluginCommandTests extends ESTestCase {
command.execute(terminal, pluginUrl, false, env);
}
void assertMetaPlugin(String metaPlugin, String name, Path original, Environment env) throws IOException {
assertPluginInternal(name, env.pluginsFile().resolve(metaPlugin));
assertConfigAndBin(metaPlugin, original, env);
}
void assertPlugin(String name, Path original, Environment env) throws IOException {
assertPluginInternal(name, env.pluginsFile());
assertConfigAndBin(name, original, env);
@ -388,23 +366,9 @@ public class InstallPluginCommandTests extends ESTestCase {
assertPlugin("fake", pluginDir, env.v2());
}
public void testWithMetaPlugin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createDirectory(pluginDir.resolve("fake1"));
writePlugin("fake1", pluginDir.resolve("fake1"));
Files.createDirectory(pluginDir.resolve("fake2"));
writePlugin("fake2", pluginDir.resolve("fake2"));
String pluginZip = createMetaPluginUrl("my_plugins", pluginDir);
installPlugin(pluginZip, env.v1());
assertMetaPlugin("my_plugins", "fake1", pluginDir, env.v2());
assertMetaPlugin("my_plugins", "fake2", pluginDir, env.v2());
}
public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
final Path removing = env.v2().pluginsFile().resolve(".removing-failed");
Files.createDirectory(removing);
@ -414,11 +378,6 @@ public class InstallPluginCommandTests extends ESTestCase {
"found file [%s] from a failed attempt to remove the plugin [failed]; execute [elasticsearch-plugin remove failed]",
removing);
assertThat(e, hasToString(containsString(expected)));
// test with meta plugin
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
final IllegalStateException e1 = expectThrows(IllegalStateException.class, () -> installPlugin(metaZip, env.v1()));
assertThat(e1, hasToString(containsString(expected)));
}
public void testSpaceInUrl() throws Exception {
@ -500,23 +459,6 @@ public class InstallPluginCommandTests extends ESTestCase {
assertInstallCleaned(environment.v2());
}
public void testJarHellInMetaPlugin() throws Exception {
// jar hell test needs a real filesystem
assumeTrue("real filesystem", isReal);
Tuple<Path, Environment> environment = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createDirectory(pluginDir.resolve("fake1"));
writePlugin("fake1", pluginDir.resolve("fake1"));
Files.createDirectory(pluginDir.resolve("fake2"));
writePlugin("fake2", pluginDir.resolve("fake2")); // adds plugin.jar with Fake2Plugin
writeJar(pluginDir.resolve("fake2").resolve("other.jar"), "Fake2Plugin");
String pluginZip = createMetaPluginUrl("my_plugins", pluginDir);
IllegalStateException e = expectThrows(IllegalStateException.class,
() -> installPlugin(pluginZip, environment.v1(), defaultCommand));
assertTrue(e.getMessage(), e.getMessage().contains("jar hell"));
assertInstallCleaned(environment.v2());
}
public void testIsolatedPlugins() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
// these both share the same FakePlugin class
@ -540,23 +482,6 @@ public class InstallPluginCommandTests extends ESTestCase {
assertInstallCleaned(env.v2());
}
public void testExistingMetaPlugin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaZip = createPluginDir(temp);
Path pluginDir = metaZip.resolve("fake");
Files.createDirectory(pluginDir);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("already exists"));
assertInstallCleaned(env.v2());
String anotherZip = createMetaPluginUrl("another_plugins", metaZip);
e = expectThrows(UserException.class, () -> installPlugin(anotherZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("already exists"));
assertInstallCleaned(env.v2());
}
public void testBin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
@ -568,43 +493,20 @@ public class InstallPluginCommandTests extends ESTestCase {
assertPlugin("fake", pluginDir, env.v2());
}
public void testMetaBin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
writePlugin("fake", pluginDir);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createMetaPluginUrl("my_plugins", metaDir);
installPlugin(pluginZip, env.v1());
assertMetaPlugin("my_plugins","fake", pluginDir, env.v2());
}
public void testBinNotDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
Path pluginDir = createPluginDir(temp);
Path binDir = pluginDir.resolve("bin");
Files.createFile(binDir);
String pluginZip = createPluginUrl("fake", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
}
public void testBinContainsDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
Path pluginDir = createPluginDir(temp);
Path dirInBinDir = pluginDir.resolve("bin").resolve("foo");
Files.createDirectories(dirInBinDir);
Files.createFile(dirInBinDir.resolve("somescript"));
@ -612,11 +514,6 @@ public class InstallPluginCommandTests extends ESTestCase {
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin"));
assertInstallCleaned(env.v2());
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin"));
assertInstallCleaned(env.v2());
}
public void testBinConflict() throws Exception {
@ -649,27 +546,6 @@ public class InstallPluginCommandTests extends ESTestCase {
}
}
public void testMetaBinPermissions() throws Exception {
assumeTrue("posix filesystem", isPosix);
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
writePlugin("fake", pluginDir);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createMetaPluginUrl("my_plugins", metaDir);
try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) {
Set<PosixFilePermission> perms = binAttrs.getCopyPermissions();
// make sure at least one execute perm is missing, so we know we forced it during installation
perms.remove(PosixFilePermission.GROUP_EXECUTE);
binAttrs.setPermissions(perms);
installPlugin(pluginZip, env.v1());
assertMetaPlugin("my_plugins", "fake", pluginDir, env.v2());
}
}
public void testPluginPermissions() throws Exception {
assumeTrue("posix filesystem", isPosix);
@ -761,32 +637,9 @@ public class InstallPluginCommandTests extends ESTestCase {
assertTrue(Files.exists(envConfigDir.resolve("other.yml")));
}
public void testExistingMetaConfig() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path envConfigDir = env.v2().configFile().resolve("my_plugins");
Files.createDirectories(envConfigDir);
Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8));
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
writePlugin("fake", pluginDir);
Path configDir = pluginDir.resolve("config");
Files.createDirectory(configDir);
Files.write(configDir.resolve("custom.yml"), "new config".getBytes(StandardCharsets.UTF_8));
Files.createFile(configDir.resolve("other.yml"));
String pluginZip = createMetaPluginUrl("my_plugins", metaDir);
installPlugin(pluginZip, env.v1());
assertMetaPlugin("my_plugins", "fake", pluginDir, env.v2());
List<String> configLines = Files.readAllLines(envConfigDir.resolve("custom.yml"), StandardCharsets.UTF_8);
assertEquals(1, configLines.size());
assertEquals("existing config", configLines.get(0));
assertTrue(Files.exists(envConfigDir.resolve("other.yml")));
}
public void testConfigNotDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Path pluginDir = createPluginDir(temp);
Files.createDirectories(pluginDir);
Path configDir = pluginDir.resolve("config");
Files.createFile(configDir);
@ -794,11 +647,6 @@ public class InstallPluginCommandTests extends ESTestCase {
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
}
public void testConfigContainsDir() throws Exception {
@ -815,19 +663,12 @@ public class InstallPluginCommandTests extends ESTestCase {
public void testMissingDescriptor() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path pluginDir = metaDir.resolve("fake");
Files.createDirectory(pluginDir);
Path pluginDir = createPluginDir(temp);
Files.createFile(pluginDir.resolve("fake.yml"));
String pluginZip = writeZip(pluginDir, null).toUri().toURL().toString();
NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties"));
assertInstallCleaned(env.v2());
String metaZip = createMetaPluginUrl("my_plugins", metaDir);
e = expectThrows(NoSuchFileException.class, () -> installPlugin(metaZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties"));
assertInstallCleaned(env.v2());
}
public void testContainsIntermediateDirectory() throws Exception {
@ -840,16 +681,6 @@ public class InstallPluginCommandTests extends ESTestCase {
assertInstallCleaned(env.v2());
}
public void testContainsIntermediateDirectoryMeta() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createFile(pluginDir.resolve(MetaPluginInfo.ES_META_PLUGIN_PROPERTIES));
String pluginZip = writeZip(pluginDir, "elasticsearch").toUri().toURL().toString();
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertThat(e.getMessage(), containsString("This plugin was built with an older plugin structure"));
assertInstallCleaned(env.v2());
}
public void testZipRelativeOutsideEntryName() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path zip = createTempDir().resolve("broken.zip");
@ -958,29 +789,6 @@ public class InstallPluginCommandTests extends ESTestCase {
"if you need to update the plugin, uninstall it first using command 'remove fake'"));
}
public void testMetaPluginAlreadyInstalled() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
{
// install fake plugin
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
}
Path pluginDir = createPluginDir(temp);
Files.createDirectory(pluginDir.resolve("fake"));
writePlugin("fake", pluginDir.resolve("fake"));
Files.createDirectory(pluginDir.resolve("other"));
writePlugin("other", pluginDir.resolve("other"));
String metaZip = createMetaPluginUrl("meta", pluginDir);
final UserException e = expectThrows(UserException.class,
() -> installPlugin(metaZip, env.v1(), randomFrom(skipJarHellCommand, defaultCommand)));
assertThat(
e.getMessage(),
equalTo("plugin directory [" + env.v2().pluginsFile().resolve("fake") + "] already exists; " +
"if you need to update the plugin, uninstall it first using command 'remove fake'"));
}
private void installPlugin(MockTerminal terminal, boolean isBatch) throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
@ -1224,24 +1032,6 @@ public class InstallPluginCommandTests extends ESTestCase {
assertPlugin("fake", pluginDir, env.v2());
}
public void testMetaPluginPolicyConfirmation() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path fake1Dir = metaDir.resolve("fake1");
Files.createDirectory(fake1Dir);
writePluginSecurityPolicy(fake1Dir, "setAccessible", "setFactory");
writePlugin("fake1", fake1Dir);
Path fake2Dir = metaDir.resolve("fake2");
Files.createDirectory(fake2Dir);
writePluginSecurityPolicy(fake2Dir, "setAccessible", "accessDeclaredMembers");
writePlugin("fake2", fake2Dir);
String pluginZip = createMetaPluginUrl("meta-plugin", metaDir);
assertPolicyConfirmation(env, pluginZip, "plugin requires additional permissions");
assertMetaPlugin("meta-plugin", "fake1", metaDir, env.v2());
assertMetaPlugin("meta-plugin", "fake2", metaDir, env.v2());
}
public void testPluginWithNativeController() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
@ -1250,21 +1040,4 @@ public class InstallPluginCommandTests extends ESTestCase {
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1()));
assertThat(e, hasToString(containsString("plugins can not have native controllers")));
}
public void testMetaPluginWithNativeController() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path metaDir = createPluginDir(temp);
Path fake1Dir = metaDir.resolve("fake1");
Files.createDirectory(fake1Dir);
writePluginSecurityPolicy(fake1Dir, "setAccessible", "setFactory");
writePlugin("fake1", fake1Dir);
Path fake2Dir = metaDir.resolve("fake2");
Files.createDirectory(fake2Dir);
writePlugin("fake2", fake2Dir, "has.native.controller", "true");
String pluginZip = createMetaPluginUrl("meta-plugin", metaDir);
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1()));
assertThat(e, hasToString(containsString("plugins can not have native controllers")));
}
}

View File

@ -92,16 +92,7 @@ public class ListPluginsCommandTests extends ESTestCase {
final String description,
final String name,
final String classname) throws IOException {
buildFakePlugin(env, null, description, name, classname, false);
}
private static void buildFakePlugin(
final Environment env,
final String metaPlugin,
final String description,
final String name,
final String classname) throws IOException {
buildFakePlugin(env, metaPlugin, description, name, classname, false);
buildFakePlugin(env, description, name, classname, false);
}
private static void buildFakePlugin(
@ -110,19 +101,8 @@ public class ListPluginsCommandTests extends ESTestCase {
final String name,
final String classname,
final boolean hasNativeController) throws IOException {
buildFakePlugin(env, null, description, name, classname, hasNativeController);
}
private static void buildFakePlugin(
final Environment env,
final String metaPlugin,
final String description,
final String name,
final String classname,
final boolean hasNativeController) throws IOException {
Path dest = metaPlugin != null ? env.pluginsFile().resolve(metaPlugin) : env.pluginsFile();
PluginTestUtil.writePluginProperties(
dest.resolve(name),
env.pluginsFile().resolve(name),
"description", description,
"name", name,
"version", "1.0",
@ -132,16 +112,6 @@ public class ListPluginsCommandTests extends ESTestCase {
"has.native.controller", Boolean.toString(hasNativeController));
}
private static void buildFakeMetaPlugin(
final Environment env,
final String description,
final String name) throws IOException {
PluginTestUtil.writeMetaPluginProperties(
env.pluginsFile().resolve(name),
"description", description,
"name", name);
}
public void testPluginsDirMissing() throws Exception {
Files.delete(env.pluginsFile());
IOException e = expectThrows(IOException.class, () -> listPlugins(home));
@ -166,16 +136,6 @@ public class ListPluginsCommandTests extends ESTestCase {
assertEquals(buildMultiline("fake1", "fake2"), terminal.getOutput());
}
public void testMetaPlugin() throws Exception {
buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin");
buildFakePlugin(env, "meta_plugin", "fake desc", "fake1", "org.fake1");
buildFakePlugin(env, "meta_plugin", "fake desc 2", "fake2", "org.fake2");
buildFakePlugin(env, "fake desc 3", "fake3", "org.fake3");
buildFakePlugin(env, "fake desc 4", "fake4", "org.fake4");
MockTerminal terminal = listPlugins(home);
assertEquals(buildMultiline("fake3", "fake4", "meta_plugin", "\tfake1", "\tfake2"), terminal.getOutput());
}
public void testPluginWithVerbose() throws Exception {
buildFakePlugin(env, "fake desc", "fake_plugin", "org.fake");
String[] params = { "-v" };
@ -247,39 +207,6 @@ public class ListPluginsCommandTests extends ESTestCase {
terminal.getOutput());
}
public void testPluginWithVerboseMetaPlugins() throws Exception {
buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin");
buildFakePlugin(env, "meta_plugin", "fake desc 1", "fake_plugin1", "org.fake");
buildFakePlugin(env, "meta_plugin", "fake desc 2", "fake_plugin2", "org.fake2");
String[] params = { "-v" };
MockTerminal terminal = listPlugins(home, params);
assertEquals(
buildMultiline(
"Plugins directory: " + env.pluginsFile(),
"meta_plugin",
"\tfake_plugin1",
"\t- Plugin information:",
"\tName: fake_plugin1",
"\tDescription: fake desc 1",
"\tVersion: 1.0",
"\tElasticsearch Version: " + Version.CURRENT.toString(),
"\tJava Version: 1.8",
"\tNative Controller: false",
"\tExtended Plugins: []",
"\t * Classname: org.fake",
"\tfake_plugin2",
"\t- Plugin information:",
"\tName: fake_plugin2",
"\tDescription: fake desc 2",
"\tVersion: 1.0",
"\tElasticsearch Version: " + Version.CURRENT.toString(),
"\tJava Version: 1.8",
"\tNative Controller: false",
"\tExtended Plugins: []",
"\t * Classname: org.fake2"),
terminal.getOutput());
}
public void testPluginWithoutVerboseMultiplePlugins() throws Exception {
buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake");
buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2");
@ -307,19 +234,6 @@ public class ListPluginsCommandTests extends ESTestCase {
e.getMessage());
}
public void testMetaPluginWithWrongDescriptorFile() throws Exception{
buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin");
final Path pluginDir = env.pluginsFile().resolve("meta_plugin").resolve("fake_plugin1");
PluginTestUtil.writePluginProperties(pluginDir, "description", "fake desc");
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> listPlugins(home));
final Path descriptorPath = pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES);
assertEquals(
"property [name] is missing in [" + descriptorPath.toString() + "]",
e.getMessage());
}
public void testExistingIncompatiblePlugin() throws Exception {
PluginTestUtil.writePluginProperties(env.pluginsFile().resolve("fake_plugin1"),
"description", "fake desc 1",
@ -340,27 +254,4 @@ public class ListPluginsCommandTests extends ESTestCase {
terminal = listPlugins(home, params);
assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput());
}
public void testExistingIncompatibleMetaPlugin() throws Exception {
buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin");
PluginTestUtil.writePluginProperties(env.pluginsFile().resolve("meta_plugin").resolve("fake_plugin1"),
"description", "fake desc 1",
"name", "fake_plugin1",
"version", "1.0",
"elasticsearch.version", Version.fromString("1.0.0").toString(),
"java.version", System.getProperty("java.specification.version"),
"classname", "org.fake1");
buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2");
MockTerminal terminal = listPlugins(home);
String message = "plugin [fake_plugin1] was built for Elasticsearch version 1.0 but version " + Version.CURRENT + " is required";
assertEquals(
"fake_plugin2\nmeta_plugin\n\tfake_plugin1\n" + "WARNING: " + message + "\n",
terminal.getOutput());
String[] params = {"-s"};
terminal = listPlugins(home, params);
assertEquals("fake_plugin2\nmeta_plugin\n\tfake_plugin1\n", terminal.getOutput());
}
}

View File

@ -103,16 +103,6 @@ public class RemovePluginCommandTests extends ESTestCase {
"classname", "SomeClass");
}
void createMetaPlugin(String name, String... plugins) throws Exception {
PluginTestUtil.writeMetaPluginProperties(
env.pluginsFile().resolve(name),
"description", "dummy",
"name", name);
for (String plugin : plugins) {
createPlugin(env.pluginsFile().resolve(name), plugin);
}
}
static MockTerminal removePlugin(String name, Path home, boolean purge) throws Exception {
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build());
MockTerminal terminal = new MockTerminal();
@ -159,19 +149,6 @@ public class RemovePluginCommandTests extends ESTestCase {
assertRemoveCleaned(env);
}
public void testBasicMeta() throws Exception {
createMetaPlugin("meta", "fake1");
createPlugin("other");
removePlugin("meta", home, randomBoolean());
assertFalse(Files.exists(env.pluginsFile().resolve("meta")));
assertTrue(Files.exists(env.pluginsFile().resolve("other")));
assertRemoveCleaned(env);
UserException exc =
expectThrows(UserException.class, () -> removePlugin("fake1", home, randomBoolean()));
assertThat(exc.getMessage(), containsString("plugin [fake1] not found"));
}
public void testBin() throws Exception {
createPlugin("fake");
Path binDir = env.binFile().resolve("fake");

View File

@ -6,7 +6,7 @@ See: https://github.com/elastic/docs
Snippets marked with `// CONSOLE` are automatically annotated with "VIEW IN
CONSOLE" and "COPY AS CURL" in the documentation and are automatically tested
by the command `gradle :docs:check`. To test just the docs from a single page,
use e.g. `gradle :docs:check -Dtests.method="\*rollover*"`.
use e.g. `gradle :docs:check -Dtests.method="*rollover*"`.
By default each `// CONSOLE` snippet runs as its own isolated test. You can
manipulate the test execution in the following ways:

View File

@ -209,6 +209,9 @@ The following projects appear to be abandoned:
* https://github.com/ropensci/elasticdsl[elasticdsl]:
A high-level R DSL for Elasticsearch, wrapping the elastic R client.
* https://github.com/UptakeOpenSource/uptasticsearch[uptasticsearch]:
An R client tailored to data science workflows.
The following projects appear to be abandoned:
* https://github.com/Tomesch/elasticsearch[elasticsearch]

View File

@ -45,8 +45,8 @@ returns a response, as an `ActiveShardCount`
--------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[shrink-index-request-settings]
--------------------------------------------------
<1> The settings to apply to the target index, which include the number of
shards to create for it
<1> The number of shards on the target of the shrink index request
<2> Remove the allocation requirement copied from the source index
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------

View File

@ -1,6 +1,8 @@
[[painless-execute-api]]
=== Painless execute API
experimental[The painless execute api is new and the request / response format may change in a breaking way in the future]
The Painless execute API allows an arbitrary script to be executed and a result to be returned.
[[painless-execute-api-parameters]]

View File

@ -13,8 +13,6 @@ The Elasticsearch repository contains examples of:
which contains a rescore plugin.
* a https://github.com/elastic/elasticsearch/tree/master/plugins/examples/script-expert-scoring[Java plugin]
which contains a script plugin.
* a https://github.com/elastic/elasticsearch/tree/master/plugins/examples/meta-plugin[Java plugin]
which contains a meta plugin.
These examples provide the bare bones needed to get started. For more
information about how to write a plugin, we recommend looking at the plugins
@ -120,19 +118,3 @@ AccessController.doPrivileged(
See http://www.oracle.com/technetwork/java/seccodeguide-139067.html[Secure Coding Guidelines for Java SE]
for more information.
[float]
=== Meta Plugin
It is also possible to bundle multiple plugins into a meta plugin.
A directory for each sub-plugin must be contained in a directory called `elasticsearch.
The meta plugin must also contain a file called `meta-plugin-descriptor.properties` in the directory named
`elasticsearch`.
The format for this file is described in detail in this example:
["source","properties",subs="attributes"]
--------------------------------------------------
include::{plugin-properties-files}/meta-plugin-descriptor.properties[]
--------------------------------------------------
A meta plugin can be installed/removed like a normal plugin with the `bin/elasticsearch-plugin` command.

View File

@ -1,6 +0,0 @@
include::getting-started.asciidoc[]
include::setup.asciidoc[]
include::upgrade.asciidoc[]

View File

@ -1,2 +0,0 @@
include::migration/index.asciidoc[]

View File

@ -1,26 +0,0 @@
include::api-conventions.asciidoc[]
include::docs.asciidoc[]
include::search.asciidoc[]
include::aggregations.asciidoc[]
include::indices.asciidoc[]
include::cat.asciidoc[]
include::cluster.asciidoc[]
include::query-dsl.asciidoc[]
include::mapping.asciidoc[]
include::analysis.asciidoc[]
include::modules.asciidoc[]
include::index-modules.asciidoc[]
include::ingest.asciidoc[]

View File

@ -1,10 +0,0 @@
include::how-to.asciidoc[]
include::testing.asciidoc[]
include::glossary.asciidoc[]
include::release-notes/highlights.asciidoc[]
include::release-notes.asciidoc[]

View File

@ -1,2 +0,0 @@
include::redirects.asciidoc[]

View File

@ -1,12 +1,79 @@
[[elasticsearch-reference]]
= Elasticsearch Reference
:include-xpack: true
:es-test-dir: {docdir}/../src/test
:plugins-examples-dir: {docdir}/../../plugins/examples
:xes-repo-dir: {docdir}/../../x-pack/docs/{lang}
:es-repo-dir: {docdir}
include::../Versions.asciidoc[]
include::index-shared1.asciidoc[]
include::index-shared2.asciidoc[]
include::index-shared3.asciidoc[]
include::index-shared4.asciidoc[]
include::index-shared5.asciidoc[]
include::getting-started.asciidoc[]
include::setup.asciidoc[]
include::{xes-repo-dir}/setup/setup-xes.asciidoc[]
include::{xes-repo-dir}/monitoring/configuring-monitoring.asciidoc[]
include::{xes-repo-dir}/security/configuring-es.asciidoc[]
include::{xes-repo-dir}/setup/setup-xclient.asciidoc[]
include::{xes-repo-dir}/settings/configuring-xes.asciidoc[]
include::{xes-repo-dir}/setup/bootstrap-checks-xes.asciidoc[]
include::upgrade.asciidoc[]
include::migration/index.asciidoc[]
include::api-conventions.asciidoc[]
include::docs.asciidoc[]
include::search.asciidoc[]
include::aggregations.asciidoc[]
include::indices.asciidoc[]
include::cat.asciidoc[]
include::cluster.asciidoc[]
include::query-dsl.asciidoc[]
include::mapping.asciidoc[]
include::analysis.asciidoc[]
include::modules.asciidoc[]
include::index-modules.asciidoc[]
include::ingest.asciidoc[]
include::{xes-repo-dir}/sql/index.asciidoc[]
include::{xes-repo-dir}/monitoring/index.asciidoc[]
include::{xes-repo-dir}/rollup/index.asciidoc[]
include::{xes-repo-dir}/rest-api/index.asciidoc[]
include::{xes-repo-dir}/commands/index.asciidoc[]
include::how-to.asciidoc[]
include::testing.asciidoc[]
include::glossary.asciidoc[]
include::release-notes/highlights.asciidoc[]
include::release-notes.asciidoc[]
include::redirects.asciidoc[]

View File

@ -1,12 +1 @@
[[elasticsearch-reference]]
= Elasticsearch Reference
:include-xpack: true
:es-test-dir: {docdir}/../src/test
:plugins-examples-dir: {docdir}/../../plugins/examples
:xes-repo-dir: {docdir}/../../x-pack/docs/{lang}
:es-repo-dir: {docdir}
include::../Versions.asciidoc[]
include::{xes-repo-dir}/index.asciidoc[]
include::index.asciidoc[]

View File

@ -25,7 +25,7 @@ PUT twitter
}
--------------------------------------------------
// CONSOLE
<1> Default for `number_of_shards` is 5
<1> Default for `number_of_shards` is 1
<2> Default for `number_of_replicas` is 1 (ie one replica for each primary shard)
The above second curl example shows how an index called `twitter` can be

View File

@ -62,7 +62,7 @@ the following request:
[source,js]
--------------------------------------------------
POST my_source_index/_shrink/my_target_index?copy_settings=true
POST my_source_index/_shrink/my_target_index
{
"settings": {
"index.routing.allocation.require._name": null, <1>
@ -106,7 +106,7 @@ and accepts `settings` and `aliases` parameters for the target index:
[source,js]
--------------------------------------------------
POST my_source_index/_shrink/my_target_index?copy_settings=true
POST my_source_index/_shrink/my_target_index
{
"settings": {
"index.number_of_replicas": 1,
@ -130,16 +130,6 @@ POST my_source_index/_shrink/my_target_index?copy_settings=true
NOTE: Mappings may not be specified in the `_shrink` request.
NOTE: By default, with the exception of `index.analysis`, `index.similarity`,
and `index.sort` settings, index settings on the source index are not copied
during a shrink operation. With the exception of non-copyable settings, settings
from the source index can be copied to the target index by adding the URL
parameter `copy_settings=true` to the request. Note that `copy_settings` can not
be set to `false`. The parameter `copy_settings` will be removed in 8.0.0
deprecated[6.4.0, not copying settings is deprecated, copying settings will be
the default behavior in 7.x]
[float]
=== Monitoring the shrink process

View File

@ -123,7 +123,7 @@ the following request:
[source,js]
--------------------------------------------------
POST my_source_index/_split/my_target_index?copy_settings=true
POST my_source_index/_split/my_target_index
{
"settings": {
"index.number_of_shards": 2
@ -158,7 +158,7 @@ and accepts `settings` and `aliases` parameters for the target index:
[source,js]
--------------------------------------------------
POST my_source_index/_split/my_target_index?copy_settings=true
POST my_source_index/_split/my_target_index
{
"settings": {
"index.number_of_shards": 5 <1>
@ -177,16 +177,6 @@ POST my_source_index/_split/my_target_index?copy_settings=true
NOTE: Mappings may not be specified in the `_split` request.
NOTE: By default, with the exception of `index.analysis`, `index.similarity`,
and `index.sort` settings, index settings on the source index are not copied
during a split operation. With the exception of non-copyable settings, settings
from the source index can be copied to the target index by adding the URL
parameter `copy_settings=true` to the request. Note that `copy_settings` can not
be set to `false`. The parameter `copy_settings` will be removed in 8.0.0
deprecated[6.4.0, not copying settings is deprecated, copying settings will be
the default behavior in 7.x]
[float]
=== Monitoring the split process

View File

@ -34,6 +34,7 @@ Elasticsearch 6.x in order to be readable by Elasticsearch 7.x.
* <<breaking_70_api_changes>>
* <<breaking_70_java_changes>>
* <<breaking_70_settings_changes>>
* <<breaking_70_scripting_changes>>
include::migrate_7_0/aggregations.asciidoc[]
@ -47,3 +48,4 @@ include::migrate_7_0/plugins.asciidoc[]
include::migrate_7_0/api.asciidoc[]
include::migrate_7_0/java.asciidoc[]
include::migrate_7_0/settings.asciidoc[]
include::migrate_7_0/scripting.asciidoc[]

View File

@ -65,3 +65,13 @@ deprecated in 6.3.0 and now removed in 7.0.0.
In the past, `fields` could be provided either as a parameter, or as part of the request
body. Specifying `fields` in the request body as opposed to a parameter was deprecated
in 6.4.0, and is now unsupported in 7.0.0.
==== `copy_settings` is deprecated on shrink and split APIs
Versions of Elasticsearch prior to 6.4.0 did not copy index settings on shrink
and split operations. Starting with Elasticsearch 7.0.0, the default behavior
will be for such settings to be copied on such operations. To enable users in
6.4.0 to transition in 6.4.0 to the default behavior in 7.0.0, the
`copy_settings` parameter was added on the REST layer. As this behavior will be
the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in
8.0.0.

View File

@ -0,0 +1,13 @@
[[breaking_70_scripting_changes]]
=== Scripting changes
==== getDate() and getDates() removed
Fields of type `long` and `date` had `getDate()` and `getDates()` methods
(for multi valued fields) to get an object with date specific helper methods
for the current doc value. In 5.3.0, `date` fields were changed to expose
this same date object directly when calling `doc["myfield"].value`, and
the getter methods for date objects were deprecated. These methods have
now been removed. Instead, use `.value` on `date` fields, or explicitly
parse `long` fields into a date object using
`Instance.ofEpochMillis(doc["myfield"].value)`.

View File

@ -0,0 +1 @@
a3dba337d06e1f5930cb7ae638c1655b99ce0cb7

View File

@ -1 +0,0 @@
f72ad4b6474c2d59b0eed0ca84eddd1f99d29129

View File

@ -74,16 +74,12 @@ class org.elasticsearch.index.fielddata.ScriptDocValues$Longs {
Long get(int)
long getValue()
List getValues()
org.joda.time.ReadableDateTime getDate()
List getDates()
}
class org.elasticsearch.index.fielddata.ScriptDocValues$Dates {
org.joda.time.ReadableDateTime get(int)
org.joda.time.ReadableDateTime getValue()
List getValues()
org.joda.time.ReadableDateTime getDate()
List getDates()
}
class org.elasticsearch.index.fielddata.ScriptDocValues$Doubles {

View File

@ -106,28 +106,6 @@ setup:
source: "doc.date.value"
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' }
- do:
warnings:
- getDate is no longer necessary on date fields as the value is now a date.
search:
body:
script_fields:
field:
script:
source: "doc['date'].date"
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' }
- do:
warnings:
- getDates is no longer necessary on date fields as the values are now dates.
search:
body:
script_fields:
field:
script:
source: "doc['date'].dates.get(0)"
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' }
---
"geo_point":
- do:
@ -213,28 +191,6 @@ setup:
source: "doc['long'].value"
- match: { hits.hits.0.fields.field.0: 12348732141234 }
- do:
warnings:
- getDate on numeric fields is deprecated. Use a date field to get dates.
search:
body:
script_fields:
field:
script:
source: "doc['long'].date"
- match: { hits.hits.0.fields.field.0: '2361-04-26T03:22:21.234Z' }
- do:
warnings:
- getDates on numeric fields is deprecated. Use a date field to get dates.
search:
body:
script_fields:
field:
script:
source: "doc['long'].dates.get(0)"
- match: { hits.hits.0.fields.field.0: '2361-04-26T03:22:21.234Z' }
---
"integer":
- do:

View File

@ -0,0 +1 @@
473a7f4d955f132bb498482648266653f8da85bd

View File

@ -1 +0,0 @@
b4e19c53f29fa9b40bd7ad12ff598e3f08d507a3

View File

@ -0,0 +1 @@
c5a72b9a790e2552248c8bbb36af47c4c399ba27

View File

@ -1 +0,0 @@
23dd8cb3834f3641d9b3e8bc3d38281389a597bc

View File

@ -0,0 +1 @@
14f680ab9b886c7c5224ff682a7fa70b6df44a05

View File

@ -1 +0,0 @@
e8119a17448a6f5512ded0bd2a6faa7fc8e70890

View File

@ -0,0 +1 @@
e033c68c9ec1ba9cd8439758adf7eb5fee22acef

View File

@ -1 +0,0 @@
336d9ac698066b8cf8a448f193e4a29ef163baa8

View File

@ -0,0 +1 @@
08df0a5029f11c109b22064dec78c05dfa25f9e3

View File

@ -1 +0,0 @@
e1e77951a83fc6a9deab884773314992fefa14f3

View File

@ -0,0 +1 @@
a9d1819b2b13f134f6a605ab5a59ce3c602c0460

View File

@ -1 +0,0 @@
d4da149a16673c6326f4898ad877756259f676f8

View File

@ -0,0 +1 @@
47bc91ccb0cdf0c1c404646ffe0d5fd6b020a4ab

View File

@ -1 +0,0 @@
ab4141b43cc6c2680d5f5a0b5086299f38ebec4d

View File

@ -1,28 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// A meta plugin packaging example that bundles multiple plugins in a single zip.
apply plugin: 'elasticsearch.es-meta-plugin'
es_meta_plugin {
name 'meta-plugin'
description 'example meta plugin'
plugins = ['dummy-plugin1', 'dummy-plugin2']
}

View File

@ -1,29 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
apply plugin: 'elasticsearch.esplugin'
esplugin {
name 'dummy-plugin1'
description 'A dummy plugin'
classname 'org.elasticsearch.example.DummyPlugin1'
}
test.enabled = false
integTestRunner.enabled = false

View File

@ -1,29 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.example;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import java.util.List;
import static java.util.Collections.singletonList;
public class DummyPlugin1 extends Plugin {}

View File

@ -1,29 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
apply plugin: 'elasticsearch.esplugin'
esplugin {
name 'dummy-plugin2'
description 'Another dummy plugin'
classname 'org.elasticsearch.example.DummyPlugin2'
}
test.enabled = false
integTestRunner.enabled = false

View File

@ -1,29 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.example;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import java.util.List;
import static java.util.Collections.singletonList;
public class DummyPlugin2 extends Plugin {}

View File

@ -1,4 +0,0 @@
# The name of the meta plugin
name=my_meta_plugin
# The description of the meta plugin
description=A meta plugin example

View File

@ -1,39 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.smoketest;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
public class SmokeTestPluginsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
public SmokeTestPluginsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
}

View File

@ -1,14 +0,0 @@
# Integration tests for testing meta plugins
#
"Check meta plugin install":
- do:
cluster.state: {}
# Get master node id
- set: { master_node: master }
- do:
nodes.info: {}
- match: { nodes.$master.plugins.0.name: dummy-plugin1 }
- match: { nodes.$master.plugins.1.name: dummy-plugin2 }

View File

@ -54,7 +54,7 @@ public class ExpertScriptPlugin extends Plugin implements ScriptPlugin {
@Override
public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) {
if (context.equals(SearchScript.CONTEXT) == false) {
if (context.equals(SearchScript.SCRIPT_SCORE_CONTEXT) == false) {
throw new IllegalArgumentException(getType() + " scripts cannot be used for context [" + context.name + "]");
}
// we use the script "source" as the script identifier

View File

@ -170,91 +170,6 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase {
}
}
/**
* Two plugins in a meta module - one with a controller daemon and one without.
*/
public void testControllerSpawnMeta() throws Exception {
runTestControllerSpawnMeta(Environment::pluginsFile, false);
runTestControllerSpawnMeta(Environment::modulesFile, true);
}
private void runTestControllerSpawnMeta(
final Function<Environment, Path> pluginsDirFinder, final boolean expectSpawn) throws Exception {
/*
* On Windows you can not directly run a batch file - you have to run cmd.exe with the batch
* file as an argument and that's out of the remit of the controller daemon process spawner.
*/
assumeFalse("This test does not work on Windows", Constants.WINDOWS);
Path esHome = createTempDir().resolve("esHome");
Settings.Builder settingsBuilder = Settings.builder();
settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString());
Settings settings = settingsBuilder.build();
Environment environment = TestEnvironment.newEnvironment(settings);
Path metaModule = pluginsDirFinder.apply(environment).resolve("meta_module");
Files.createDirectories(environment.modulesFile());
Files.createDirectories(metaModule);
PluginTestUtil.writeMetaPluginProperties(
metaModule,
"description", "test_plugin",
"name", "meta_plugin",
"plugins", "test_plugin,other_plugin");
// this plugin will have a controller daemon
Path plugin = metaModule.resolve("test_plugin");
Files.createDirectories(plugin);
PluginTestUtil.writePluginProperties(
plugin,
"description", "test_plugin",
"version", Version.CURRENT.toString(),
"elasticsearch.version", Version.CURRENT.toString(),
"name", "test_plugin",
"java.version", "1.8",
"classname", "TestPlugin",
"has.native.controller", "true");
Path controllerProgram = Platforms.nativeControllerPath(plugin);
createControllerProgram(controllerProgram);
// this plugin will not have a controller daemon
Path otherPlugin = metaModule.resolve("other_plugin");
Files.createDirectories(otherPlugin);
PluginTestUtil.writePluginProperties(
otherPlugin,
"description", "other_plugin",
"version", Version.CURRENT.toString(),
"elasticsearch.version", Version.CURRENT.toString(),
"name", "other_plugin",
"java.version", "1.8",
"classname", "OtherPlugin",
"has.native.controller", "false");
Spawner spawner = new Spawner();
spawner.spawnNativeControllers(environment);
List<Process> processes = spawner.getProcesses();
if (expectSpawn) {
// as there should only be a reference in the list for the plugin that had the controller daemon, we expect one here
assertThat(processes, hasSize(1));
Process process = processes.get(0);
final InputStreamReader in =
new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8);
try (BufferedReader stdoutReader = new BufferedReader(in)) {
String line = stdoutReader.readLine();
assertEquals("I am alive", line);
spawner.close();
// fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling
assertTrue(process.waitFor(1, TimeUnit.SECONDS));
}
} else {
assertThat(processes, hasSize(0));
}
}
public void testControllerSpawnWithIncorrectDescriptor() throws IOException {
// this plugin will have a controller daemon
Path esHome = createTempDir().resolve("esHome");

View File

@ -1,8 +1,8 @@
---
"Shrink index via API":
- skip:
version: " - 6.3.99"
reason: expects warnings that pre-6.4.0 will not send
version: " - 6.9.99"
reason: expects warnings that pre-7.0.0 will not send
features: "warnings"
# creates an index with one document solely allocated on the master node
# and shrinks it into a new index with a single shard
@ -67,8 +67,6 @@
body:
settings:
index.number_of_replicas: 0
warnings:
- "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior"
- do:
cluster.health:

View File

@ -1,8 +1,8 @@
---
"Shrink index ignores target template mapping":
- skip:
version: " - 6.3.99"
reason: expects warnings that pre-6.4.0 will not send
version: " - 6.9.99"
reason: expects warnings that pre-7.0.0 will not send
features: "warnings"
- do:
@ -71,8 +71,6 @@
body:
settings:
index.number_of_replicas: 0
warnings:
- "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior"
- do:
cluster.health:

View File

@ -1,8 +1,8 @@
---
"Copy settings during shrink index":
- skip:
version: " - 6.3.99"
reason: expects warnings that pre-6.4.0 will not send
version: " - 6.9.99"
reason: expects warnings that pre-7.0.0 will not send
features: "warnings"
- do:
@ -48,6 +48,8 @@
settings:
index.number_of_replicas: 0
index.merge.scheduler.max_thread_count: 2
warnings:
- "parameter [copy_settings] is deprecated and will be removed in 8.0.0"
- do:
cluster.health:
@ -63,19 +65,17 @@
- match: { copy-settings-target.settings.index.blocks.write: "true" }
- match: { copy-settings-target.settings.index.routing.allocation.include._id: $master }
# now we do a actual shrink and do not copy settings (by default)
# now we do a actual shrink and copy settings (by default)
- do:
indices.shrink:
index: "source"
target: "no-copy-settings-target"
target: "default-copy-settings-target"
wait_for_active_shards: 1
master_timeout: 10s
body:
settings:
index.number_of_replicas: 0
index.merge.scheduler.max_thread_count: 2
warnings:
- "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior"
- do:
cluster.health:
@ -83,13 +83,13 @@
- do:
indices.get_settings:
index: "no-copy-settings-target"
index: "default-copy-settings-target"
# only the request setting should be copied
- is_false: no-copy-settings-target.settings.index.merge.scheduler.max_merge_count
- match: { no-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" }
- is_false: no-copy-settings-target.settings.index.blocks.write
- is_false: no-copy-settings-target.settings.index.routing.allocation.include._id
# settings should be copied
- match: { default-copy-settings-target.settings.index.merge.scheduler.max_merge_count: "4" }
- match: { default-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" }
- match: { default-copy-settings-target.settings.index.blocks.write: "true" }
- match: { default-copy-settings-target.settings.index.routing.allocation.include._id: $master }
# now we do a actual shrink and try to set no copy settings
- do:

View File

@ -33,8 +33,8 @@ setup:
---
"Split index via API":
- skip:
version: " - 6.3.99"
reason: expects warnings that pre-6.4.0 will not send
version: " - 6.9.99"
reason: pre-7.0.0 will send warnings
features: "warnings"
# make it read-only
@ -61,8 +61,6 @@ setup:
settings:
index.number_of_replicas: 0
index.number_of_shards: 4
warnings:
- "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior"
- do:
cluster.health:
@ -108,8 +106,7 @@ setup:
"Split from 1 to N":
- skip:
version: " - 6.99.99"
reason: Automatic preparation for splitting was added in 7.0.0
features: "warnings"
reason: automatic preparation for splitting was added in 7.0.0
- do:
indices.create:
index: source_one_shard
@ -163,8 +160,6 @@ setup:
settings:
index.number_of_replicas: 0
index.number_of_shards: 5
warnings:
- "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior"
- do:
cluster.health:
@ -205,13 +200,11 @@ setup:
- match: { _id: "3" }
- match: { _source: { foo: "hello world 3" } }
---
"Create illegal split indices":
- skip:
version: " - 6.3.99"
reason: expects warnings that pre-6.4.0 will not send
version: " - 6.9.99"
reason: pre-7.0.0 will send warnings
features: "warnings"
# try to do an illegal split with number_of_routing_shards set
@ -227,8 +220,6 @@ setup:
index.number_of_replicas: 0
index.number_of_shards: 4
index.number_of_routing_shards: 8
warnings:
- "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior"
# try to do an illegal split with illegal number_of_shards
- do:
@ -242,5 +233,3 @@ setup:
settings:
index.number_of_replicas: 0
index.number_of_shards: 6
warnings:
- "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior"

View File

@ -1,8 +1,8 @@
---
"Split index ignores target template mapping":
- skip:
version: " - 6.3.99"
reason: expects warnings that pre-6.4.0 will not send
version: " - 6.9.99"
reason: pre-7.0.0 will send warnings
features: "warnings"
# create index
@ -65,8 +65,6 @@
settings:
index.number_of_shards: 2
index.number_of_replicas: 0
warnings:
- "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior"
- do:
cluster.health:

View File

@ -1,8 +1,8 @@
---
"Copy settings during split index":
- skip:
version: " - 6.3.99"
reason: expects warnings that pre-6.4.0 will not send
version: " - 6.9.99"
reason: expects warnings that pre-7.0.0 will not send
features: "warnings"
- do:
@ -50,6 +50,9 @@
index.number_of_replicas: 0
index.number_of_shards: 2
index.merge.scheduler.max_thread_count: 2
warnings:
- "parameter [copy_settings] is deprecated and will be removed in 8.0.0"
- do:
cluster.health:
@ -65,11 +68,11 @@
- match: { copy-settings-target.settings.index.blocks.write: "true" }
- match: { copy-settings-target.settings.index.routing.allocation.include._id: $master }
# now we do a actual shrink and do not copy settings (by default)
# now we do a actual shrink and copy settings (by default)
- do:
indices.split:
index: "source"
target: "no-copy-settings-target"
target: "default-copy-settings-target"
wait_for_active_shards: 1
master_timeout: 10s
body:
@ -77,8 +80,6 @@
index.number_of_replicas: 0
index.number_of_shards: 2
index.merge.scheduler.max_thread_count: 2
warnings:
- "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior"
- do:
cluster.health:
@ -86,13 +87,13 @@
- do:
indices.get_settings:
index: "no-copy-settings-target"
index: "default-copy-settings-target"
# only the request setting should be copied
- is_false: no-copy-settings-target.settings.index.merge.scheduler.max_merge_count
- match: { no-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" }
- is_false: no-copy-settings-target.settings.index.blocks.write
- is_false: no-copy-settings-target.settings.index.routing.allocation.include._id
# settings should be copied
- match: { default-copy-settings-target.settings.index.merge.scheduler.max_merge_count: "4" }
- match: { default-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" }
- match: { default-copy-settings-target.settings.index.blocks.write: "true" }
- match: { default-copy-settings-target.settings.index.routing.allocation.include._id: $master }
- do:
catch: /illegal_argument_exception/

View File

@ -0,0 +1 @@
b70d03784d06a643e096fae4d959200aa246ba16

View File

@ -1 +0,0 @@
f465718b3db829e7660009aac2c1211fd5d74ca0

View File

@ -0,0 +1 @@
d660a63ac0f7ab2772a45ae518518472bf620620

View File

@ -1 +0,0 @@
d502441e830e1a9d30270442f8e3fd8317fe7bba

View File

@ -0,0 +1 @@
bf8f9e8284a54af18545574cb4a530da0deb968a

View File

@ -1 +0,0 @@
5167fb0a14434cb10ec3224e9e32ca668e9f9ad4

View File

@ -0,0 +1 @@
9eaae9dcd4ec88227475cb81d3be9afa767f1b22

View File

@ -1 +0,0 @@
488aeecf49413b63a404989ae00b07b20951e76e

View File

@ -0,0 +1 @@
cd15f0008742c84899d678cb0cecda06d0a6d63e

View File

@ -1 +0,0 @@
107755edd67cddb3fb9817de50c0bed3a10da19c

View File

@ -0,0 +1 @@
5ce38b8610a7f402f2da3b0e408e508151d979c5

View File

@ -1 +0,0 @@
9226fab3b9c6250af52b87061f637c0f8e3114b6

View File

@ -0,0 +1 @@
53819f03a07050a4af28361d64395c86f2cea008

View File

@ -1 +0,0 @@
2b7bf384c1933225972f04224d867ec800f5e3a7

View File

@ -0,0 +1 @@
8cdc0e2b65d146ed11f4d2507109e530d59ff33d

View File

@ -1 +0,0 @@
18b770c35db8757dc036b1506870a4ddaad7b1ab

View File

@ -0,0 +1 @@
e56090463703112ad64ad457d18bae9a5b2966b8

View File

@ -1 +0,0 @@
683f6436938c67709d0c665c9e1fdef7bd893e4a

View File

@ -0,0 +1 @@
9faf974b77058e44a6d35e956db4f5fb67389dfa

View File

@ -1 +0,0 @@
1df20ba64b9aa68f1fa9a15c9ff75f87f94dec47

View File

@ -0,0 +1 @@
b852b1fe70ef70736b2b1a9ad57eb93cbaed0423

View File

@ -1 +0,0 @@
895ca714fc62b66ba63d43931730cdc4ef56d35f

View File

@ -0,0 +1 @@
d2fa99ec7140fcf35db16ac1feb78ef142750d39

View File

@ -1 +0,0 @@
95ab7e9421bbeb8229d83ac72700b37a521fdf4f

View File

@ -0,0 +1 @@
c9963f60d3a0924b877a6f910650c5f2384822a0

View File

@ -1 +0,0 @@
773ff8c8425d32609ccec6956759ad377dfb8f6b

View File

@ -0,0 +1 @@
3f33ba54da5e0e125f4c5ef7dd800dd6185e4f61

View File

@ -1 +0,0 @@
ea711541e243ee768f950041e6e2843d0cc5e695

View File

@ -0,0 +1 @@
bb3c18c987395dae6fe63744f5a50fd367ea5a74

View File

@ -1 +0,0 @@
2ca005cf25722ba3777ed93f720f40c937081fa6

View File

@ -169,7 +169,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final int V_6_2_5_ID = 6020599;
public static final Version V_6_2_5 = new Version(V_6_2_5_ID, LUCENE_7_2_1);
public static final int V_6_3_0_ID = 6030099;
public static final Version V_6_3_0 = new Version(V_6_3_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_0);
public static final Version V_6_3_0 = new Version(V_6_3_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final int V_6_4_0_ID = 6040099;
public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final int V_7_0_0_alpha1_ID = 7000001;

Some files were not shown because too many files have changed in this diff Show More