Merge branch 'master' into getClassLoader
This commit is contained in:
commit
46377778a9
|
@ -109,6 +109,7 @@ subprojects {
|
|||
"org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec',
|
||||
"org.elasticsearch:elasticsearch:${version}": ':core',
|
||||
"org.elasticsearch:test-framework:${version}": ':test-framework',
|
||||
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip',
|
||||
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip',
|
||||
"org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar',
|
||||
"org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm',
|
||||
|
|
|
@ -23,40 +23,41 @@ import org.elasticsearch.gradle.test.RestIntegTestTask
|
|||
import org.elasticsearch.gradle.test.RunTask
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.tasks.SourceSet
|
||||
import org.gradle.api.tasks.bundling.Zip
|
||||
|
||||
/**
|
||||
* Encapsulates build configuration for an Elasticsearch plugin.
|
||||
*/
|
||||
class PluginBuildPlugin extends BuildPlugin {
|
||||
public class PluginBuildPlugin extends BuildPlugin {
|
||||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
public void apply(Project project) {
|
||||
super.apply(project)
|
||||
configureDependencies(project)
|
||||
// this afterEvaluate must happen before the afterEvaluate added by integTest configure,
|
||||
// this afterEvaluate must happen before the afterEvaluate added by integTest creation,
|
||||
// so that the file name resolution for installing the plugin will be setup
|
||||
project.afterEvaluate {
|
||||
String name = project.pluginProperties.extension.name
|
||||
project.jar.baseName = name
|
||||
project.bundlePlugin.baseName = name
|
||||
|
||||
project.integTest.dependsOn(project.bundlePlugin)
|
||||
project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
|
||||
project.tasks.run.dependsOn(project.bundlePlugin)
|
||||
project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
|
||||
}
|
||||
RestIntegTestTask.configure(project)
|
||||
RunTask.configure(project)
|
||||
Task bundle = configureBundleTask(project)
|
||||
project.configurations.archives.artifacts.removeAll { it.archiveTask.is project.jar }
|
||||
project.configurations.getByName('default').extendsFrom = []
|
||||
project.artifacts {
|
||||
archives bundle
|
||||
'default' bundle
|
||||
if (project.path.startsWith(':modules:')) {
|
||||
project.integTest.clusterConfig.module(project)
|
||||
project.tasks.run.clusterConfig.module(project)
|
||||
} else {
|
||||
project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
|
||||
project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
|
||||
}
|
||||
}
|
||||
createIntegTestTask(project)
|
||||
createBundleTask(project)
|
||||
project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build
|
||||
}
|
||||
|
||||
static void configureDependencies(Project project) {
|
||||
private static void configureDependencies(Project project) {
|
||||
project.dependencies {
|
||||
provided "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}"
|
||||
testCompile "org.elasticsearch:test-framework:${project.versions.elasticsearch}"
|
||||
|
@ -72,21 +73,36 @@ class PluginBuildPlugin extends BuildPlugin {
|
|||
}
|
||||
}
|
||||
|
||||
static Task configureBundleTask(Project project) {
|
||||
PluginPropertiesTask buildProperties = project.tasks.create(name: 'pluginProperties', type: PluginPropertiesTask)
|
||||
File pluginMetadata = project.file("src/main/plugin-metadata")
|
||||
project.sourceSets.test {
|
||||
output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties')
|
||||
resources {
|
||||
srcDir pluginMetadata
|
||||
}
|
||||
}
|
||||
Task bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties])
|
||||
bundle.configure {
|
||||
from buildProperties
|
||||
from pluginMetadata
|
||||
from project.jar
|
||||
from bundle.project.configurations.runtime - bundle.project.configurations.provided
|
||||
/** Adds an integTest task which runs rest tests */
|
||||
private static void createIntegTestTask(Project project) {
|
||||
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
||||
integTest.mustRunAfter(project.precommit, project.test)
|
||||
project.check.dependsOn(integTest)
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a bundlePlugin task which builds the zip containing the plugin jars,
|
||||
* metadata, properties, and packaging files
|
||||
*/
|
||||
private static void createBundleTask(Project project) {
|
||||
File pluginMetadata = project.file('src/main/plugin-metadata')
|
||||
|
||||
// create a task to build the properties file for this plugin
|
||||
PluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', PluginPropertiesTask.class)
|
||||
|
||||
// add the plugin properties and metadata to test resources, so unit tests can
|
||||
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
|
||||
SourceSet testSourceSet = project.sourceSets.test
|
||||
testSourceSet.output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties')
|
||||
testSourceSet.resources.srcDir(pluginMetadata)
|
||||
|
||||
// create the actual bundle task, which zips up all the files for the plugin
|
||||
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) {
|
||||
from buildProperties // plugin properties file
|
||||
from pluginMetadata // metadata (eg custom security policy)
|
||||
from project.jar // this plugin's jar
|
||||
from project.configurations.runtime - project.configurations.provided // the dep jars
|
||||
// extra files for the plugin to go into the zip
|
||||
from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging
|
||||
from('src/main') {
|
||||
include 'config/**'
|
||||
|
@ -97,6 +113,13 @@ class PluginBuildPlugin extends BuildPlugin {
|
|||
}
|
||||
}
|
||||
project.assemble.dependsOn(bundle)
|
||||
return bundle
|
||||
|
||||
// remove jar from the archives (things that will be published), and set it to the zip
|
||||
project.configurations.archives.artifacts.removeAll { it.archiveTask.is project.jar }
|
||||
project.artifacts.add('archives', bundle)
|
||||
|
||||
// also make the zip the default artifact (used when depending on this project)
|
||||
project.configurations.getByName('default').extendsFrom = []
|
||||
project.artifacts.add('default', bundle)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.gradle.api.tasks.Input
|
|||
class ClusterConfiguration {
|
||||
|
||||
@Input
|
||||
String distribution = 'zip'
|
||||
String distribution = 'integ-test-zip'
|
||||
|
||||
@Input
|
||||
int numNodes = 1
|
||||
|
@ -71,6 +71,8 @@ class ClusterConfiguration {
|
|||
|
||||
LinkedHashMap<String, Object> plugins = new LinkedHashMap<>()
|
||||
|
||||
List<Project> modules = new ArrayList<>()
|
||||
|
||||
LinkedHashMap<String, Object[]> setupCommands = new LinkedHashMap<>()
|
||||
|
||||
@Input
|
||||
|
@ -93,6 +95,12 @@ class ClusterConfiguration {
|
|||
plugins.put(name, pluginProject)
|
||||
}
|
||||
|
||||
/** Add a module to the cluster. The project must be an esplugin and have a single zip default artifact. */
|
||||
@Input
|
||||
void module(Project moduleProject) {
|
||||
modules.add(moduleProject)
|
||||
}
|
||||
|
||||
@Input
|
||||
void setupCommand(String name, Object... args) {
|
||||
setupCommands.put(name, args)
|
||||
|
|
|
@ -60,7 +60,12 @@ class ClusterFormationTasks {
|
|||
/** Adds a dependency on the given distribution */
|
||||
static void configureDistributionDependency(Project project, String distro) {
|
||||
String elasticsearchVersion = VersionProperties.elasticsearch
|
||||
String packaging = distro == 'tar' ? 'tar.gz' : distro
|
||||
String packaging = distro
|
||||
if (distro == 'tar') {
|
||||
packaging = 'tar.gz'
|
||||
} else if (distro == 'integ-test-zip') {
|
||||
packaging = 'zip'
|
||||
}
|
||||
project.configurations {
|
||||
elasticsearchDistro
|
||||
}
|
||||
|
@ -103,6 +108,12 @@ class ClusterFormationTasks {
|
|||
setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
|
||||
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
|
||||
|
||||
// install modules
|
||||
for (Project module : node.config.modules) {
|
||||
String actionName = pluginTaskName('install', module.name, 'Module')
|
||||
setup = configureInstallModuleTask(taskName(task, node, actionName), project, setup, node, module)
|
||||
}
|
||||
|
||||
// install plugins
|
||||
for (Map.Entry<String, Object> plugin : node.config.plugins.entrySet()) {
|
||||
String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin')
|
||||
|
@ -138,6 +149,7 @@ class ClusterFormationTasks {
|
|||
by the source tree. If it isn't then Bad Things(TM) will happen. */
|
||||
Task extract
|
||||
switch (node.config.distribution) {
|
||||
case 'integ-test-zip':
|
||||
case 'zip':
|
||||
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
||||
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) }
|
||||
|
@ -286,6 +298,20 @@ class ClusterFormationTasks {
|
|||
return copyPlugins
|
||||
}
|
||||
|
||||
static Task configureInstallModuleTask(String name, Project project, Task setup, NodeInfo node, Project module) {
|
||||
if (node.config.distribution != 'integ-test-zip') {
|
||||
throw new GradleException("Module ${module.path} not allowed be installed distributions other than integ-test-zip because they should already have all modules bundled!")
|
||||
}
|
||||
if (module.plugins.hasPlugin(PluginBuildPlugin) == false) {
|
||||
throw new GradleException("Task ${name} cannot include module ${module.path} which is not an esplugin")
|
||||
}
|
||||
Copy installModule = project.tasks.create(name, Copy.class)
|
||||
installModule.dependsOn(setup)
|
||||
installModule.into(new File(node.homeDir, "modules/${module.name}"))
|
||||
installModule.from({ project.zipTree(module.tasks.bundlePlugin.outputs.files.singleFile) })
|
||||
return installModule
|
||||
}
|
||||
|
||||
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Object plugin) {
|
||||
FileCollection pluginZip
|
||||
if (plugin instanceof Project) {
|
||||
|
|
|
@ -173,6 +173,7 @@ class NodeInfo {
|
|||
static File homeDir(File baseDir, String distro) {
|
||||
String path
|
||||
switch (distro) {
|
||||
case 'integ-test-zip':
|
||||
case 'zip':
|
||||
case 'tar':
|
||||
path = "elasticsearch-${VersionProperties.elasticsearch}"
|
||||
|
@ -188,8 +189,8 @@ class NodeInfo {
|
|||
}
|
||||
|
||||
static File confDir(File baseDir, String distro) {
|
||||
String Path
|
||||
switch (distro) {
|
||||
case 'integ-test-zip':
|
||||
case 'zip':
|
||||
case 'tar':
|
||||
return new File(homeDir(baseDir, distro), 'config')
|
||||
|
|
|
@ -31,55 +31,38 @@ import org.gradle.util.ConfigureUtil
|
|||
* Runs integration tests, but first starts an ES cluster,
|
||||
* and passes the ES cluster info as parameters to the tests.
|
||||
*/
|
||||
class RestIntegTestTask extends RandomizedTestingTask {
|
||||
public class RestIntegTestTask extends RandomizedTestingTask {
|
||||
|
||||
ClusterConfiguration clusterConfig = new ClusterConfiguration()
|
||||
|
||||
/** Flag indicating whether the rest tests in the rest spec should be run. */
|
||||
@Input
|
||||
boolean includePackaged = false
|
||||
|
||||
static RestIntegTestTask configure(Project project) {
|
||||
Map integTestOptions = [
|
||||
name: 'integTest',
|
||||
type: RestIntegTestTask,
|
||||
dependsOn: 'testClasses',
|
||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Runs rest tests against an elasticsearch cluster.'
|
||||
]
|
||||
RestIntegTestTask integTest = project.tasks.create(integTestOptions)
|
||||
integTest.configure(BuildPlugin.commonTestConfig(project))
|
||||
integTest.configure {
|
||||
include '**/*IT.class'
|
||||
systemProperty 'tests.rest.load_packaged', 'false'
|
||||
}
|
||||
RandomizedTestingTask test = project.tasks.findByName('test')
|
||||
if (test != null) {
|
||||
integTest.classpath = test.classpath
|
||||
integTest.testClassesDir = test.testClassesDir
|
||||
integTest.mustRunAfter(test)
|
||||
}
|
||||
integTest.mustRunAfter(project.precommit)
|
||||
project.check.dependsOn(integTest)
|
||||
public RestIntegTestTask() {
|
||||
description = 'Runs rest tests against an elasticsearch cluster.'
|
||||
group = JavaBasePlugin.VERIFICATION_GROUP
|
||||
dependsOn(project.testClasses)
|
||||
classpath = project.sourceSets.test.runtimeClasspath
|
||||
testClassesDir = project.sourceSets.test.output.classesDir
|
||||
|
||||
// start with the common test configuration
|
||||
configure(BuildPlugin.commonTestConfig(project))
|
||||
// override/add more for rest tests
|
||||
parallelism = '1'
|
||||
include('**/*IT.class')
|
||||
systemProperty('tests.rest.load_packaged', 'false')
|
||||
|
||||
// copy the rest spec/tests into the test resources
|
||||
RestSpecHack.configureDependencies(project)
|
||||
project.afterEvaluate {
|
||||
integTest.dependsOn(RestSpecHack.configureTask(project, integTest.includePackaged))
|
||||
dependsOn(RestSpecHack.configureTask(project, includePackaged))
|
||||
systemProperty('tests.cluster', "localhost:${clusterConfig.baseTransportPort}")
|
||||
}
|
||||
return integTest
|
||||
}
|
||||
|
||||
RestIntegTestTask() {
|
||||
// this must run after all projects have been configured, so we know any project
|
||||
// references can be accessed as a fully configured
|
||||
project.gradle.projectsEvaluated {
|
||||
Task test = project.tasks.findByName('test')
|
||||
if (test != null) {
|
||||
mustRunAfter(test)
|
||||
}
|
||||
ClusterFormationTasks.setup(project, this, clusterConfig)
|
||||
configure {
|
||||
parallelism '1'
|
||||
systemProperty 'tests.cluster', "localhost:${clusterConfig.baseTransportPort}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,11 +75,11 @@ class RestIntegTestTask extends RandomizedTestingTask {
|
|||
}
|
||||
|
||||
@Input
|
||||
void cluster(Closure closure) {
|
||||
public void cluster(Closure closure) {
|
||||
ConfigureUtil.configure(closure, clusterConfig)
|
||||
}
|
||||
|
||||
ClusterConfiguration getCluster() {
|
||||
public ClusterConfiguration getCluster() {
|
||||
return clusterConfig
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,12 +28,12 @@ import org.gradle.api.tasks.Copy
|
|||
* currently must be available on the local filesystem. This class encapsulates
|
||||
* setting up tasks to copy the rest spec api to test resources.
|
||||
*/
|
||||
class RestSpecHack {
|
||||
public class RestSpecHack {
|
||||
/**
|
||||
* Sets dependencies needed to copy the rest spec.
|
||||
* @param project The project to add rest spec dependency to
|
||||
*/
|
||||
static void configureDependencies(Project project) {
|
||||
public static void configureDependencies(Project project) {
|
||||
project.configurations {
|
||||
restSpec
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ class RestSpecHack {
|
|||
* @param project The project to add the copy task to
|
||||
* @param includePackagedTests true if the packaged tests should be copied, false otherwise
|
||||
*/
|
||||
static Task configureTask(Project project, boolean includePackagedTests) {
|
||||
public static Task configureTask(Project project, boolean includePackagedTests) {
|
||||
Map copyRestSpecProps = [
|
||||
name : 'copyRestSpec',
|
||||
type : Copy,
|
||||
|
@ -65,7 +65,6 @@ class RestSpecHack {
|
|||
project.idea {
|
||||
module {
|
||||
if (scopes.TEST != null) {
|
||||
// TODO: need to add the TEST scope somehow for rest test plugin...
|
||||
scopes.TEST.plus.add(project.configurations.restSpec)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,22 +18,19 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle.test
|
||||
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
|
||||
/** Configures the build to have a rest integration test. */
|
||||
class RestTestPlugin implements Plugin<Project> {
|
||||
/** A plugin to add rest integration tests. Used for qa projects. */
|
||||
public class RestTestPlugin implements Plugin<Project> {
|
||||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
public void apply(Project project) {
|
||||
project.pluginManager.apply(StandaloneTestBasePlugin)
|
||||
|
||||
RandomizedTestingTask integTest = RestIntegTestTask.configure(project)
|
||||
RestSpecHack.configureDependencies(project)
|
||||
integTest.configure {
|
||||
classpath = project.sourceSets.test.runtimeClasspath
|
||||
testClassesDir project.sourceSets.test.output.classesDir
|
||||
}
|
||||
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
||||
integTest.cluster.distribution = 'zip' // rest tests should run with the real zip
|
||||
integTest.mustRunAfter(project.precommit)
|
||||
project.check.dependsOn(integTest)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,13 +2,17 @@ package org.elasticsearch.gradle.test
|
|||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.internal.tasks.options.Option
|
||||
import org.gradle.util.ConfigureUtil
|
||||
|
||||
class RunTask extends DefaultTask {
|
||||
public class RunTask extends DefaultTask {
|
||||
|
||||
ClusterConfiguration clusterConfig = new ClusterConfiguration(baseHttpPort: 9200, baseTransportPort: 9300, daemonize: false)
|
||||
|
||||
RunTask() {
|
||||
public RunTask() {
|
||||
description = "Runs elasticsearch with '${project.path}'"
|
||||
group = 'Verification'
|
||||
project.afterEvaluate {
|
||||
ClusterFormationTasks.setup(project, this, clusterConfig)
|
||||
}
|
||||
|
@ -22,11 +26,10 @@ class RunTask extends DefaultTask {
|
|||
clusterConfig.debug = enabled;
|
||||
}
|
||||
|
||||
static void configure(Project project) {
|
||||
RunTask task = project.tasks.create(
|
||||
name: 'run',
|
||||
type: RunTask,
|
||||
description: "Runs elasticsearch with '${project.path}'",
|
||||
group: 'Verification')
|
||||
/** Configure the cluster that will be run. */
|
||||
@Override
|
||||
public Task configure(Closure closure) {
|
||||
ConfigureUtil.configure(closure, clusterConfig)
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,35 +27,26 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks
|
|||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.plugins.ide.eclipse.model.EclipseClasspath
|
||||
|
||||
/** Configures the build to have a rest integration test. */
|
||||
class StandaloneTestBasePlugin implements Plugin<Project> {
|
||||
public class StandaloneTestBasePlugin implements Plugin<Project> {
|
||||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
public void apply(Project project) {
|
||||
project.pluginManager.apply(JavaBasePlugin)
|
||||
project.pluginManager.apply(RandomizedTestingPlugin)
|
||||
|
||||
BuildPlugin.globalBuildInfo(project)
|
||||
BuildPlugin.configureRepositories(project)
|
||||
|
||||
// remove some unnecessary tasks for a qa test
|
||||
project.tasks.removeAll { it.name in ['assemble', 'buildDependents'] }
|
||||
|
||||
// only setup tests to build
|
||||
project.sourceSets {
|
||||
test
|
||||
}
|
||||
project.dependencies {
|
||||
testCompile "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}"
|
||||
}
|
||||
project.sourceSets.create('test')
|
||||
project.dependencies.add('testCompile', "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}")
|
||||
|
||||
project.eclipse.classpath.sourceSets = [project.sourceSets.test]
|
||||
project.eclipse.classpath.plusConfigurations = [project.configurations.testRuntime]
|
||||
|
||||
project.eclipse {
|
||||
classpath {
|
||||
sourceSets = [project.sourceSets.test]
|
||||
plusConfigurations = [project.configurations.testRuntime]
|
||||
}
|
||||
}
|
||||
PrecommitTasks.create(project, false)
|
||||
project.check.dependsOn(project.precommit)
|
||||
}
|
||||
|
|
|
@ -25,11 +25,11 @@ import org.gradle.api.Plugin
|
|||
import org.gradle.api.Project
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
|
||||
/** Configures the build to have only unit tests. */
|
||||
class StandaloneTestPlugin implements Plugin<Project> {
|
||||
/** A plugin to add tests only. Used for QA tests that run arbitrary unit tests. */
|
||||
public class StandaloneTestPlugin implements Plugin<Project> {
|
||||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
public void apply(Project project) {
|
||||
project.pluginManager.apply(StandaloneTestBasePlugin)
|
||||
|
||||
Map testOptions = [
|
||||
|
@ -41,10 +41,8 @@ class StandaloneTestPlugin implements Plugin<Project> {
|
|||
]
|
||||
RandomizedTestingTask test = project.tasks.create(testOptions)
|
||||
test.configure(BuildPlugin.commonTestConfig(project))
|
||||
test.configure {
|
||||
classpath = project.sourceSets.test.runtimeClasspath
|
||||
testClassesDir project.sourceSets.test.output.classesDir
|
||||
}
|
||||
test.classpath = project.sourceSets.test.runtimeClasspath
|
||||
test.testClassesDir project.sourceSets.test.output.classesDir
|
||||
test.mustRunAfter(project.precommit)
|
||||
project.check.dependsOn(test)
|
||||
}
|
||||
|
|
|
@ -72,14 +72,14 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
private HttpInfo http;
|
||||
|
||||
@Nullable
|
||||
private PluginsInfo plugins;
|
||||
private PluginsAndModules plugins;
|
||||
|
||||
NodeInfo() {
|
||||
}
|
||||
|
||||
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map<String, String> serviceAttributes, @Nullable Settings settings,
|
||||
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool,
|
||||
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsInfo plugins) {
|
||||
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins) {
|
||||
super(node);
|
||||
this.version = version;
|
||||
this.build = build;
|
||||
|
@ -172,7 +172,7 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
public PluginsInfo getPlugins() {
|
||||
public PluginsAndModules getPlugins() {
|
||||
return this.plugins;
|
||||
}
|
||||
|
||||
|
@ -217,7 +217,8 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
http = HttpInfo.readHttpInfo(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
plugins = PluginsInfo.readPluginsInfo(in);
|
||||
plugins = new PluginsAndModules();
|
||||
plugins.readFrom(in);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.node.info;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.plugins.PluginInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Information about plugins and modules
|
||||
*/
|
||||
public class PluginsAndModules implements Streamable, ToXContent {
|
||||
private List<PluginInfo> plugins;
|
||||
private List<PluginInfo> modules;
|
||||
|
||||
public PluginsAndModules() {
|
||||
plugins = new ArrayList<>();
|
||||
modules = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an ordered list based on plugins name
|
||||
*/
|
||||
public List<PluginInfo> getPluginInfos() {
|
||||
List<PluginInfo> plugins = new ArrayList<>(this.plugins);
|
||||
Collections.sort(plugins, (p1, p2) -> p1.getName().compareTo(p2.getName()));
|
||||
return plugins;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an ordered list based on modules name
|
||||
*/
|
||||
public List<PluginInfo> getModuleInfos() {
|
||||
List<PluginInfo> modules = new ArrayList<>(this.modules);
|
||||
Collections.sort(modules, (p1, p2) -> p1.getName().compareTo(p2.getName()));
|
||||
return modules;
|
||||
}
|
||||
|
||||
public void addPlugin(PluginInfo info) {
|
||||
plugins.add(info);
|
||||
}
|
||||
|
||||
public void addModule(PluginInfo info) {
|
||||
modules.add(info);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
if (plugins.isEmpty() == false || modules.isEmpty() == false) {
|
||||
throw new IllegalStateException("instance is already populated");
|
||||
}
|
||||
int plugins_size = in.readInt();
|
||||
for (int i = 0; i < plugins_size; i++) {
|
||||
plugins.add(PluginInfo.readFromStream(in));
|
||||
}
|
||||
int modules_size = in.readInt();
|
||||
for (int i = 0; i < modules_size; i++) {
|
||||
modules.add(PluginInfo.readFromStream(in));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeInt(plugins.size());
|
||||
for (PluginInfo plugin : getPluginInfos()) {
|
||||
plugin.writeTo(out);
|
||||
}
|
||||
out.writeInt(modules.size());
|
||||
for (PluginInfo module : getModuleInfos()) {
|
||||
module.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startArray("plugins");
|
||||
for (PluginInfo pluginInfo : getPluginInfos()) {
|
||||
pluginInfo.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
// TODO: not ideal, make a better api for this (e.g. with jar metadata, and so on)
|
||||
builder.startArray("modules");
|
||||
for (PluginInfo moduleInfo : getModuleInfos()) {
|
||||
moduleInfo.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -1,101 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.node.info;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.plugins.PluginInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
public class PluginsInfo implements Streamable, ToXContent {
|
||||
static final class Fields {
|
||||
static final XContentBuilderString PLUGINS = new XContentBuilderString("plugins");
|
||||
}
|
||||
|
||||
private List<PluginInfo> infos;
|
||||
|
||||
public PluginsInfo() {
|
||||
infos = new ArrayList<>();
|
||||
}
|
||||
|
||||
public PluginsInfo(int size) {
|
||||
infos = new ArrayList<>(size);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return an ordered list based on plugins name
|
||||
*/
|
||||
public List<PluginInfo> getInfos() {
|
||||
Collections.sort(infos, new Comparator<PluginInfo>() {
|
||||
@Override
|
||||
public int compare(final PluginInfo o1, final PluginInfo o2) {
|
||||
return o1.getName().compareTo(o2.getName());
|
||||
}
|
||||
});
|
||||
|
||||
return infos;
|
||||
}
|
||||
|
||||
public void add(PluginInfo info) {
|
||||
infos.add(info);
|
||||
}
|
||||
|
||||
public static PluginsInfo readPluginsInfo(StreamInput in) throws IOException {
|
||||
PluginsInfo infos = new PluginsInfo();
|
||||
infos.readFrom(in);
|
||||
return infos;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
int plugins_size = in.readInt();
|
||||
for (int i = 0; i < plugins_size; i++) {
|
||||
infos.add(PluginInfo.readFromStream(in));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeInt(infos.size());
|
||||
for (PluginInfo plugin : getInfos()) {
|
||||
plugin.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startArray(Fields.PLUGINS);
|
||||
for (PluginInfo pluginInfo : getInfos()) {
|
||||
pluginInfo.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -74,7 +74,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
versions.add(nodeResponse.nodeInfo().getVersion());
|
||||
process.addNodeStats(nodeResponse.nodeStats());
|
||||
jvm.addNodeInfoStats(nodeResponse.nodeInfo(), nodeResponse.nodeStats());
|
||||
plugins.addAll(nodeResponse.nodeInfo().getPlugins().getInfos());
|
||||
plugins.addAll(nodeResponse.nodeInfo().getPlugins().getPluginInfos());
|
||||
|
||||
// now do the stats that should be deduped by hardware (implemented by ip deduping)
|
||||
TransportAddress publishAddress = nodeResponse.nodeInfo().getTransport().address().publishAddress();
|
||||
|
|
|
@ -131,34 +131,48 @@ final class Security {
|
|||
@SuppressForbidden(reason = "proper use of URL")
|
||||
static Map<String,Policy> getPluginPermissions(Environment environment) throws IOException, NoSuchAlgorithmException {
|
||||
Map<String,Policy> map = new HashMap<>();
|
||||
// collect up lists of plugins and modules
|
||||
List<Path> pluginsAndModules = new ArrayList<>();
|
||||
if (Files.exists(environment.pluginsFile())) {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
|
||||
for (Path plugin : stream) {
|
||||
Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
|
||||
if (Files.exists(policyFile)) {
|
||||
// first get a list of URLs for the plugins' jars:
|
||||
// we resolve symlinks so map is keyed on the normalize codebase name
|
||||
List<URL> codebases = new ArrayList<>();
|
||||
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
|
||||
for (Path jar : jarStream) {
|
||||
codebases.add(jar.toRealPath().toUri().toURL());
|
||||
}
|
||||
}
|
||||
|
||||
// parse the plugin's policy file into a set of permissions
|
||||
Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
|
||||
|
||||
// consult this policy for each of the plugin's jars:
|
||||
for (URL url : codebases) {
|
||||
if (map.put(url.getFile(), policy) != null) {
|
||||
// just be paranoid ok?
|
||||
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
|
||||
}
|
||||
}
|
||||
pluginsAndModules.add(plugin);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (Files.exists(environment.modulesFile())) {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.modulesFile())) {
|
||||
for (Path plugin : stream) {
|
||||
pluginsAndModules.add(plugin);
|
||||
}
|
||||
}
|
||||
}
|
||||
// now process each one
|
||||
for (Path plugin : pluginsAndModules) {
|
||||
Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
|
||||
if (Files.exists(policyFile)) {
|
||||
// first get a list of URLs for the plugins' jars:
|
||||
// we resolve symlinks so map is keyed on the normalize codebase name
|
||||
List<URL> codebases = new ArrayList<>();
|
||||
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
|
||||
for (Path jar : jarStream) {
|
||||
codebases.add(jar.toRealPath().toUri().toURL());
|
||||
}
|
||||
}
|
||||
|
||||
// parse the plugin's policy file into a set of permissions
|
||||
Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
|
||||
|
||||
// consult this policy for each of the plugin's jars:
|
||||
for (URL url : codebases) {
|
||||
if (map.put(url.getFile(), policy) != null) {
|
||||
// just be paranoid ok?
|
||||
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
|
@ -228,6 +242,7 @@ final class Security {
|
|||
// read-only dirs
|
||||
addPath(policy, "path.home", environment.binFile(), "read,readlink");
|
||||
addPath(policy, "path.home", environment.libFile(), "read,readlink");
|
||||
addPath(policy, "path.home", environment.modulesFile(), "read,readlink");
|
||||
addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink");
|
||||
addPath(policy, "path.conf", environment.configFile(), "read,readlink");
|
||||
addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink");
|
||||
|
|
|
@ -125,7 +125,7 @@ public class TransportClient extends AbstractClient {
|
|||
.put(CLIENT_TYPE_SETTING, CLIENT_TYPE)
|
||||
.build();
|
||||
|
||||
PluginsService pluginsService = new PluginsService(settings, null, pluginClasses);
|
||||
PluginsService pluginsService = new PluginsService(settings, null, null, pluginClasses);
|
||||
this.settings = pluginsService.updatedSettings();
|
||||
|
||||
Version version = Version.CURRENT;
|
||||
|
|
|
@ -58,6 +58,8 @@ public class Environment {
|
|||
|
||||
private final Path pluginsFile;
|
||||
|
||||
private final Path modulesFile;
|
||||
|
||||
private final Path sharedDataFile;
|
||||
|
||||
/** location of bin/, used by plugin manager */
|
||||
|
@ -157,6 +159,7 @@ public class Environment {
|
|||
|
||||
binFile = homeFile.resolve("bin");
|
||||
libFile = homeFile.resolve("lib");
|
||||
modulesFile = homeFile.resolve("modules");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -275,6 +278,10 @@ public class Environment {
|
|||
return libFile;
|
||||
}
|
||||
|
||||
public Path modulesFile() {
|
||||
return modulesFile;
|
||||
}
|
||||
|
||||
public Path logsFile() {
|
||||
return logsFile;
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.index.engine.FlushNotAllowedEngineException;
|
|||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.IndexShardState;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -200,23 +199,17 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
return translogBuffer;
|
||||
}
|
||||
|
||||
|
||||
protected List<ShardId> availableShards() {
|
||||
ArrayList<ShardId> list = new ArrayList<>();
|
||||
protected List<IndexShard> availableShards() {
|
||||
List<IndexShard> activeShards = new ArrayList<>();
|
||||
|
||||
for (IndexService indexService : indicesService) {
|
||||
for (IndexShard indexShard : indexService) {
|
||||
if (shardAvailable(indexShard)) {
|
||||
list.add(indexShard.shardId());
|
||||
for (IndexShard shard : indexService) {
|
||||
if (shardAvailable(shard)) {
|
||||
activeShards.add(shard);
|
||||
}
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/** returns true if shard exists and is availabe for updates */
|
||||
protected boolean shardAvailable(ShardId shardId) {
|
||||
return shardAvailable(getShard(shardId));
|
||||
return activeShards;
|
||||
}
|
||||
|
||||
/** returns true if shard exists and is availabe for updates */
|
||||
|
@ -225,19 +218,8 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
return shard != null && shard.canIndex() && CAN_UPDATE_INDEX_BUFFER_STATES.contains(shard.state());
|
||||
}
|
||||
|
||||
/** gets an {@link IndexShard} instance for the given shard. returns null if the shard doesn't exist */
|
||||
protected IndexShard getShard(ShardId shardId) {
|
||||
IndexService indexService = indicesService.indexService(shardId.index().name());
|
||||
if (indexService != null) {
|
||||
IndexShard indexShard = indexService.getShardOrNull(shardId.id());
|
||||
return indexShard;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** set new indexing and translog buffers on this shard. this may cause the shard to refresh to free up heap. */
|
||||
protected void updateShardBuffers(ShardId shardId, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
|
||||
final IndexShard shard = getShard(shardId);
|
||||
protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
|
||||
if (shard != null) {
|
||||
try {
|
||||
shard.updateBufferSize(shardIndexingBufferSize, shardTranslogBufferSize);
|
||||
|
@ -246,113 +228,33 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
} catch (FlushNotAllowedEngineException e) {
|
||||
// ignore
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to set shard {} index buffer to [{}]", e, shardId, shardIndexingBufferSize);
|
||||
logger.warn("failed to set shard {} index buffer to [{}]", e, shard.shardId(), shardIndexingBufferSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** returns {@link IndexShard#getActive} if the shard exists, else null */
|
||||
protected Boolean getShardActive(ShardId shardId) {
|
||||
final IndexShard indexShard = getShard(shardId);
|
||||
if (indexShard == null) {
|
||||
return null;
|
||||
}
|
||||
return indexShard.getActive();
|
||||
}
|
||||
|
||||
/** check if any shards active status changed, now. */
|
||||
public void forceCheck() {
|
||||
statusChecker.run();
|
||||
}
|
||||
|
||||
class ShardsIndicesStatusChecker implements Runnable {
|
||||
|
||||
// True if the shard was active last time we checked
|
||||
private final Map<ShardId,Boolean> shardWasActive = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public synchronized void run() {
|
||||
EnumSet<ShardStatusChangeType> changes = purgeDeletedAndClosedShards();
|
||||
|
||||
updateShardStatuses(changes);
|
||||
|
||||
if (changes.isEmpty() == false) {
|
||||
// Something changed: recompute indexing buffers:
|
||||
calcAndSetShardBuffers("[" + changes + "]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* goes through all existing shards and check whether there are changes in their active status
|
||||
*/
|
||||
private void updateShardStatuses(EnumSet<ShardStatusChangeType> changes) {
|
||||
for (ShardId shardId : availableShards()) {
|
||||
|
||||
// Is the shard active now?
|
||||
Boolean isActive = getShardActive(shardId);
|
||||
|
||||
if (isActive == null) {
|
||||
// shard was closed..
|
||||
continue;
|
||||
}
|
||||
|
||||
// Was the shard active last time we checked?
|
||||
Boolean wasActive = shardWasActive.get(shardId);
|
||||
if (wasActive == null) {
|
||||
// First time we are seeing this shard
|
||||
shardWasActive.put(shardId, isActive);
|
||||
changes.add(ShardStatusChangeType.ADDED);
|
||||
} else if (isActive) {
|
||||
// Shard is active now
|
||||
if (wasActive == false) {
|
||||
// Shard became active itself, since we last checked (due to new indexing op arriving)
|
||||
changes.add(ShardStatusChangeType.BECAME_ACTIVE);
|
||||
logger.debug("marking shard {} as active indexing wise", shardId);
|
||||
shardWasActive.put(shardId, true);
|
||||
} else if (checkIdle(shardId) == Boolean.TRUE) {
|
||||
// Make shard inactive now
|
||||
changes.add(ShardStatusChangeType.BECAME_INACTIVE);
|
||||
|
||||
shardWasActive.put(shardId, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* purge any existing statuses that are no longer updated
|
||||
*
|
||||
* @return the changes applied
|
||||
*/
|
||||
private EnumSet<ShardStatusChangeType> purgeDeletedAndClosedShards() {
|
||||
EnumSet<ShardStatusChangeType> changes = EnumSet.noneOf(ShardStatusChangeType.class);
|
||||
|
||||
Iterator<ShardId> statusShardIdIterator = shardWasActive.keySet().iterator();
|
||||
while (statusShardIdIterator.hasNext()) {
|
||||
ShardId shardId = statusShardIdIterator.next();
|
||||
if (shardAvailable(shardId) == false) {
|
||||
changes.add(ShardStatusChangeType.DELETED);
|
||||
statusShardIdIterator.remove();
|
||||
}
|
||||
}
|
||||
return changes;
|
||||
}
|
||||
|
||||
private void calcAndSetShardBuffers(String reason) {
|
||||
|
||||
// Count how many shards are now active:
|
||||
int activeShardCount = 0;
|
||||
for (Map.Entry<ShardId,Boolean> ent : shardWasActive.entrySet()) {
|
||||
if (ent.getValue()) {
|
||||
activeShardCount++;
|
||||
List<IndexShard> availableShards = availableShards();
|
||||
List<IndexShard> activeShards = new ArrayList<>();
|
||||
for (IndexShard shard : availableShards) {
|
||||
if (!checkIdle(shard)) {
|
||||
activeShards.add(shard);
|
||||
}
|
||||
}
|
||||
int activeShardCount = activeShards.size();
|
||||
|
||||
// TODO: we could be smarter here by taking into account how RAM the IndexWriter on each shard
|
||||
// is actually using (using IW.ramBytesUsed), so that small indices (e.g. Marvel) would not
|
||||
// get the same indexing buffer as large indices. But it quickly gets tricky...
|
||||
if (activeShardCount == 0) {
|
||||
logger.debug("no active shards (reason={})", reason);
|
||||
logger.debug("no active shards");
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -372,13 +274,10 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
shardTranslogBufferSize = maxShardTranslogBufferSize;
|
||||
}
|
||||
|
||||
logger.debug("recalculating shard indexing buffer (reason={}), total is [{}] with [{}] active shards, each shard set to indexing=[{}], translog=[{}]", reason, indexingBuffer, activeShardCount, shardIndexingBufferSize, shardTranslogBufferSize);
|
||||
logger.debug("recalculating shard indexing buffer, total is [{}] with [{}] active shards, each shard set to indexing=[{}], translog=[{}]", indexingBuffer, activeShardCount, shardIndexingBufferSize, shardTranslogBufferSize);
|
||||
|
||||
for (Map.Entry<ShardId,Boolean> ent : shardWasActive.entrySet()) {
|
||||
if (ent.getValue()) {
|
||||
// This shard is active
|
||||
updateShardBuffers(ent.getKey(), shardIndexingBufferSize, shardTranslogBufferSize);
|
||||
}
|
||||
for (IndexShard shard : activeShards) {
|
||||
updateShardBuffers(shard, shardIndexingBufferSize, shardTranslogBufferSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -389,14 +288,13 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
|
||||
/** ask this shard to check now whether it is inactive, and reduces its indexing and translog buffers if so. returns Boolean.TRUE if
|
||||
* it did deactive, Boolean.FALSE if it did not, and null if the shard is unknown */
|
||||
protected Boolean checkIdle(ShardId shardId) {
|
||||
String ignoreReason; // eclipse compiler does not know it is really final
|
||||
final IndexShard shard = getShard(shardId);
|
||||
protected Boolean checkIdle(IndexShard shard) {
|
||||
String ignoreReason = null; // eclipse compiler does not know it is really final
|
||||
if (shard != null) {
|
||||
try {
|
||||
if (shard.checkIdle()) {
|
||||
logger.debug("marking shard {} as inactive (inactive_time[{}]) indexing wise",
|
||||
shardId,
|
||||
shard.shardId(),
|
||||
shard.getInactiveTime());
|
||||
return Boolean.TRUE;
|
||||
}
|
||||
|
@ -412,15 +310,11 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
ignoreReason = "shard not found";
|
||||
}
|
||||
if (ignoreReason != null) {
|
||||
logger.trace("ignore [{}] while marking shard {} as inactive", ignoreReason, shardId);
|
||||
logger.trace("ignore [{}] while marking shard {} as inactive", ignoreReason, shard.shardId());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static enum ShardStatusChangeType {
|
||||
ADDED, DELETED, BECAME_ACTIVE, BECAME_INACTIVE
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onShardActive(IndexShard indexShard) {
|
||||
// At least one shard used to be inactive ie. a new write operation just showed up.
|
||||
|
|
|
@ -147,7 +147,7 @@ public class Node implements Releasable {
|
|||
tmpEnv.configFile(), Arrays.toString(tmpEnv.dataFiles()), tmpEnv.logsFile(), tmpEnv.pluginsFile());
|
||||
}
|
||||
|
||||
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.pluginsFile(), classpathPlugins);
|
||||
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.modulesFile(), tmpEnv.pluginsFile(), classpathPlugins);
|
||||
this.settings = pluginsService.updatedSettings();
|
||||
// create the environment based on the finalized (processed) view of the settings
|
||||
this.environment = new Environment(this.settings());
|
||||
|
|
|
@ -71,7 +71,7 @@ public abstract class Plugin {
|
|||
}
|
||||
|
||||
/**
|
||||
* Called before a new index is created on a node. The given module can be used to regsiter index-leve
|
||||
* Called before a new index is created on a node. The given module can be used to register index-level
|
||||
* extensions.
|
||||
*/
|
||||
public void onIndexModule(IndexModule indexModule) {}
|
||||
|
|
|
@ -66,6 +66,10 @@ public class PluginManager {
|
|||
"plugin",
|
||||
"plugin.bat",
|
||||
"service.bat"));
|
||||
|
||||
static final Set<String> MODULES = unmodifiableSet(newHashSet(
|
||||
"lang-expression",
|
||||
"lang-groovy"));
|
||||
|
||||
static final Set<String> OFFICIAL_PLUGINS = unmodifiableSet(newHashSet(
|
||||
"analysis-icu",
|
||||
|
@ -78,8 +82,6 @@ public class PluginManager {
|
|||
"discovery-ec2",
|
||||
"discovery-gce",
|
||||
"discovery-multicast",
|
||||
"lang-expression",
|
||||
"lang-groovy",
|
||||
"lang-javascript",
|
||||
"lang-python",
|
||||
"mapper-attachments",
|
||||
|
@ -221,6 +223,12 @@ public class PluginManager {
|
|||
PluginInfo info = PluginInfo.readFromProperties(root);
|
||||
terminal.println(VERBOSE, "%s", info);
|
||||
|
||||
// don't let luser install plugin as a module...
|
||||
// they might be unavoidably in maven central and are packaged up the same way)
|
||||
if (MODULES.contains(info.getName())) {
|
||||
throw new IOException("plugin '" + info.getName() + "' cannot be installed like this, it is a system module");
|
||||
}
|
||||
|
||||
// update name in handle based on 'name' property found in descriptor file
|
||||
pluginHandle = new PluginHandle(info.getName(), pluginHandle.version, pluginHandle.user);
|
||||
final Path extractLocation = pluginHandle.extractedDir(environment);
|
||||
|
|
|
@ -25,9 +25,8 @@ import org.apache.lucene.analysis.util.TokenizerFactory;
|
|||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.DocValuesFormat;
|
||||
import org.apache.lucene.codecs.PostingsFormat;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
|
||||
import org.elasticsearch.bootstrap.JarHell;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
|
@ -39,10 +38,7 @@ import org.elasticsearch.common.logging.ESLogger;
|
|||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
@ -69,10 +65,10 @@ import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory;
|
|||
public class PluginsService extends AbstractComponent {
|
||||
|
||||
/**
|
||||
* We keep around a list of plugins
|
||||
* We keep around a list of plugins and modules
|
||||
*/
|
||||
private final List<Tuple<PluginInfo, Plugin>> plugins;
|
||||
private final PluginsInfo info;
|
||||
private final PluginsAndModules info;
|
||||
|
||||
private final Map<Plugin, List<OnModuleReference>> onModuleReferences;
|
||||
|
||||
|
@ -89,13 +85,15 @@ public class PluginsService extends AbstractComponent {
|
|||
/**
|
||||
* Constructs a new PluginService
|
||||
* @param settings The settings of the system
|
||||
* @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem
|
||||
* @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem
|
||||
* @param classpathPlugins Plugins that exist in the classpath which should be loaded
|
||||
*/
|
||||
public PluginsService(Settings settings, Path pluginsDirectory, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
public PluginsService(Settings settings, Path modulesDirectory, Path pluginsDirectory, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
super(settings);
|
||||
info = new PluginsAndModules();
|
||||
|
||||
List<Tuple<PluginInfo, Plugin>> tupleBuilder = new ArrayList<>();
|
||||
List<Tuple<PluginInfo, Plugin>> pluginsLoaded = new ArrayList<>();
|
||||
|
||||
// first we load plugins that are on the classpath. this is for tests and transport clients
|
||||
for (Class<? extends Plugin> pluginClass : classpathPlugins) {
|
||||
|
@ -104,24 +102,39 @@ public class PluginsService extends AbstractComponent {
|
|||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("plugin loaded from classpath [{}]", pluginInfo);
|
||||
}
|
||||
tupleBuilder.add(new Tuple<>(pluginInfo, plugin));
|
||||
pluginsLoaded.add(new Tuple<>(pluginInfo, plugin));
|
||||
info.addPlugin(pluginInfo);
|
||||
}
|
||||
|
||||
// load modules
|
||||
if (modulesDirectory != null) {
|
||||
try {
|
||||
List<Bundle> bundles = getModuleBundles(modulesDirectory);
|
||||
List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles);
|
||||
pluginsLoaded.addAll(loaded);
|
||||
for (Tuple<PluginInfo, Plugin> module : loaded) {
|
||||
info.addModule(module.v1());
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new IllegalStateException("Unable to initialize modules", ex);
|
||||
}
|
||||
}
|
||||
|
||||
// now, find all the ones that are in plugins/
|
||||
if (pluginsDirectory != null) {
|
||||
try {
|
||||
List<Bundle> bundles = getPluginBundles(pluginsDirectory);
|
||||
tupleBuilder.addAll(loadBundles(bundles));
|
||||
List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles);
|
||||
pluginsLoaded.addAll(loaded);
|
||||
for (Tuple<PluginInfo, Plugin> plugin : loaded) {
|
||||
info.addPlugin(plugin.v1());
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new IllegalStateException("Unable to initialize plugins", ex);
|
||||
}
|
||||
}
|
||||
|
||||
plugins = Collections.unmodifiableList(tupleBuilder);
|
||||
info = new PluginsInfo();
|
||||
for (Tuple<PluginInfo, Plugin> tuple : plugins) {
|
||||
info.add(tuple.v1());
|
||||
}
|
||||
plugins = Collections.unmodifiableList(pluginsLoaded);
|
||||
|
||||
// We need to build a List of jvm and site plugins for checking mandatory plugins
|
||||
Map<String, Plugin> jvmPlugins = new HashMap<>();
|
||||
|
@ -151,7 +164,18 @@ public class PluginsService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
logger.info("loaded {}, sites {}", jvmPlugins.keySet(), sitePlugins);
|
||||
// we don't log jars in lib/ we really shouldnt log modules,
|
||||
// but for now: just be transparent so we can debug any potential issues
|
||||
Set<String> moduleNames = new HashSet<>();
|
||||
Set<String> jvmPluginNames = new HashSet<>();
|
||||
for (PluginInfo moduleInfo : info.getModuleInfos()) {
|
||||
moduleNames.add(moduleInfo.getName());
|
||||
}
|
||||
for (PluginInfo pluginInfo : info.getPluginInfos()) {
|
||||
jvmPluginNames.add(pluginInfo.getName());
|
||||
}
|
||||
|
||||
logger.info("modules {}, plugins {}, sites {}", moduleNames, jvmPluginNames, sitePlugins);
|
||||
|
||||
Map<Plugin, List<OnModuleReference>> onModuleReferences = new HashMap<>();
|
||||
for (Plugin plugin : jvmPlugins.values()) {
|
||||
|
@ -160,6 +184,10 @@ public class PluginsService extends AbstractComponent {
|
|||
if (!method.getName().equals("onModule")) {
|
||||
continue;
|
||||
}
|
||||
// this is a deprecated final method, so all Plugin subclasses have it
|
||||
if (method.getParameterTypes().length == 1 && method.getParameterTypes()[0].equals(IndexModule.class)) {
|
||||
continue;
|
||||
}
|
||||
if (method.getParameterTypes().length == 0 || method.getParameterTypes().length > 1) {
|
||||
logger.warn("Plugin: {} implementing onModule with no parameters or more than one parameter", plugin.name());
|
||||
continue;
|
||||
|
@ -178,7 +206,7 @@ public class PluginsService extends AbstractComponent {
|
|||
this.onModuleReferences = Collections.unmodifiableMap(onModuleReferences);
|
||||
}
|
||||
|
||||
public List<Tuple<PluginInfo, Plugin>> plugins() {
|
||||
private List<Tuple<PluginInfo, Plugin>> plugins() {
|
||||
return plugins;
|
||||
}
|
||||
|
||||
|
@ -249,9 +277,9 @@ public class PluginsService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
/**
|
||||
* Get information about plugins (jvm and site plugins).
|
||||
* Get information about plugins and modules
|
||||
*/
|
||||
public PluginsInfo info() {
|
||||
public PluginsAndModules info() {
|
||||
return info;
|
||||
}
|
||||
|
||||
|
@ -262,6 +290,40 @@ public class PluginsService extends AbstractComponent {
|
|||
List<URL> urls = new ArrayList<>();
|
||||
}
|
||||
|
||||
// similar in impl to getPluginBundles, but DO NOT try to make them share code.
|
||||
// we don't need to inherit all the leniency, and things are different enough.
|
||||
static List<Bundle> getModuleBundles(Path modulesDirectory) throws IOException {
|
||||
// damn leniency
|
||||
if (Files.notExists(modulesDirectory)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
List<Bundle> bundles = new ArrayList<>();
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(modulesDirectory)) {
|
||||
for (Path module : stream) {
|
||||
if (FileSystemUtils.isHidden(module)) {
|
||||
continue; // skip over .DS_Store etc
|
||||
}
|
||||
PluginInfo info = PluginInfo.readFromProperties(module);
|
||||
if (!info.isJvm()) {
|
||||
throw new IllegalStateException("modules must be jvm plugins: " + info);
|
||||
}
|
||||
if (!info.isIsolated()) {
|
||||
throw new IllegalStateException("modules must be isolated: " + info);
|
||||
}
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.plugins.add(info);
|
||||
// gather urls for jar files
|
||||
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(module, "*.jar")) {
|
||||
for (Path jar : jarStream) {
|
||||
bundle.urls.add(jar.toUri().toURL());
|
||||
}
|
||||
}
|
||||
bundles.add(bundle);
|
||||
}
|
||||
}
|
||||
return bundles;
|
||||
}
|
||||
|
||||
static List<Bundle> getPluginBundles(Path pluginsDirectory) throws IOException {
|
||||
ESLogger logger = Loggers.getLogger(PluginsService.class);
|
||||
|
||||
|
|
|
@ -45,9 +45,6 @@ public class RestForceMergeAction extends BaseRestHandler {
|
|||
super(settings, controller, client);
|
||||
controller.registerHandler(POST, "/_forcemerge", this);
|
||||
controller.registerHandler(POST, "/{index}/_forcemerge", this);
|
||||
|
||||
controller.registerHandler(GET, "/_forcemerge", this);
|
||||
controller.registerHandler(GET, "/{index}/_forcemerge", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -95,7 +95,7 @@ public class RestPluginsAction extends AbstractCatAction {
|
|||
for (DiscoveryNode node : nodes) {
|
||||
NodeInfo info = nodesInfo.getNodesMap().get(node.id());
|
||||
|
||||
for (PluginInfo pluginInfo : info.getPlugins().getInfos()) {
|
||||
for (PluginInfo pluginInfo : info.getPlugins().getPluginInfos()) {
|
||||
table.startRow();
|
||||
table.addCell(node.id());
|
||||
table.addCell(node.name());
|
||||
|
|
|
@ -43,8 +43,6 @@ OFFICIAL PLUGINS
|
|||
- discovery-ec2
|
||||
- discovery-gce
|
||||
- discovery-multicast
|
||||
- lang-expression
|
||||
- lang-groovy
|
||||
- lang-javascript
|
||||
- lang-python
|
||||
- mapper-attachments
|
||||
|
|
|
@ -22,54 +22,51 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public class IndexingMemoryControllerTests extends ESTestCase {
|
||||
public class IndexingMemoryControllerTests extends ESSingleNodeTestCase {
|
||||
|
||||
static class MockController extends IndexingMemoryController {
|
||||
|
||||
final static ByteSizeValue INACTIVE = new ByteSizeValue(-1);
|
||||
|
||||
final Map<ShardId, ByteSizeValue> indexingBuffers = new HashMap<>();
|
||||
final Map<ShardId, ByteSizeValue> translogBuffers = new HashMap<>();
|
||||
final Map<IndexShard, ByteSizeValue> indexingBuffers = new HashMap<>();
|
||||
final Map<IndexShard, ByteSizeValue> translogBuffers = new HashMap<>();
|
||||
|
||||
final Map<ShardId, Long> lastIndexTimeNanos = new HashMap<>();
|
||||
final Set<ShardId> activeShards = new HashSet<>();
|
||||
final Map<IndexShard, Long> lastIndexTimeNanos = new HashMap<>();
|
||||
final Set<IndexShard> activeShards = new HashSet<>();
|
||||
|
||||
long currentTimeSec = TimeValue.timeValueNanos(System.nanoTime()).seconds();
|
||||
|
||||
public MockController(Settings settings) {
|
||||
super(Settings.builder()
|
||||
.put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it
|
||||
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate
|
||||
.put(settings)
|
||||
.build(),
|
||||
null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb
|
||||
.put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it
|
||||
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate
|
||||
.put(settings)
|
||||
.build(),
|
||||
null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb
|
||||
}
|
||||
|
||||
public void deleteShard(ShardId id) {
|
||||
public void deleteShard(IndexShard id) {
|
||||
indexingBuffers.remove(id);
|
||||
translogBuffers.remove(id);
|
||||
}
|
||||
|
||||
public void assertBuffers(ShardId id, ByteSizeValue indexing, ByteSizeValue translog) {
|
||||
public void assertBuffers(IndexShard id, ByteSizeValue indexing, ByteSizeValue translog) {
|
||||
assertThat(indexingBuffers.get(id), equalTo(indexing));
|
||||
assertThat(translogBuffers.get(id), equalTo(translog));
|
||||
}
|
||||
|
||||
public void assertInActive(ShardId id) {
|
||||
public void assertInactive(IndexShard id) {
|
||||
assertThat(indexingBuffers.get(id), equalTo(INACTIVE));
|
||||
assertThat(translogBuffers.get(id), equalTo(INACTIVE));
|
||||
}
|
||||
|
@ -80,36 +77,31 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected List<ShardId> availableShards() {
|
||||
protected List<IndexShard> availableShards() {
|
||||
return new ArrayList<>(indexingBuffers.keySet());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean shardAvailable(ShardId shardId) {
|
||||
return indexingBuffers.containsKey(shardId);
|
||||
protected boolean shardAvailable(IndexShard shard) {
|
||||
return indexingBuffers.containsKey(shard);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean getShardActive(ShardId shardId) {
|
||||
return activeShards.contains(shardId);
|
||||
protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
|
||||
indexingBuffers.put(shard, shardIndexingBufferSize);
|
||||
translogBuffers.put(shard, shardTranslogBufferSize);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateShardBuffers(ShardId shardId, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
|
||||
indexingBuffers.put(shardId, shardIndexingBufferSize);
|
||||
translogBuffers.put(shardId, shardTranslogBufferSize);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean checkIdle(ShardId shardId) {
|
||||
protected Boolean checkIdle(IndexShard shard) {
|
||||
final TimeValue inactiveTime = settings.getAsTime(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5));
|
||||
Long ns = lastIndexTimeNanos.get(shardId);
|
||||
Long ns = lastIndexTimeNanos.get(shard);
|
||||
if (ns == null) {
|
||||
return null;
|
||||
} else if (currentTimeInNanos() - ns >= inactiveTime.nanos()) {
|
||||
indexingBuffers.put(shardId, INACTIVE);
|
||||
translogBuffers.put(shardId, INACTIVE);
|
||||
activeShards.remove(shardId);
|
||||
indexingBuffers.put(shard, INACTIVE);
|
||||
translogBuffers.put(shard, INACTIVE);
|
||||
activeShards.remove(shard);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
|
@ -120,118 +112,126 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
|||
currentTimeSec += sec;
|
||||
}
|
||||
|
||||
public void simulateIndexing(ShardId shardId) {
|
||||
lastIndexTimeNanos.put(shardId, currentTimeInNanos());
|
||||
if (indexingBuffers.containsKey(shardId) == false) {
|
||||
public void simulateIndexing(IndexShard shard) {
|
||||
lastIndexTimeNanos.put(shard, currentTimeInNanos());
|
||||
if (indexingBuffers.containsKey(shard) == false) {
|
||||
// First time we are seeing this shard; start it off with inactive buffers as IndexShard does:
|
||||
indexingBuffers.put(shardId, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER);
|
||||
translogBuffers.put(shardId, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
|
||||
indexingBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER);
|
||||
translogBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
|
||||
}
|
||||
activeShards.add(shardId);
|
||||
activeShards.add(shard);
|
||||
forceCheck();
|
||||
}
|
||||
}
|
||||
|
||||
public void testShardAdditionAndRemoval() {
|
||||
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 3).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
IndexService test = indicesService.indexService("test");
|
||||
|
||||
MockController controller = new MockController(Settings.builder()
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build());
|
||||
final ShardId shard1 = new ShardId("test", 1);
|
||||
controller.simulateIndexing(shard1);
|
||||
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build());
|
||||
IndexShard shard0 = test.getShard(0);
|
||||
controller.simulateIndexing(shard0);
|
||||
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
||||
|
||||
// add another shard
|
||||
final ShardId shard2 = new ShardId("test", 2);
|
||||
controller.simulateIndexing(shard2);
|
||||
IndexShard shard1 = test.getShard(1);
|
||||
controller.simulateIndexing(shard1);
|
||||
controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||
controller.assertBuffers(shard2, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||
|
||||
// remove first shard
|
||||
controller.deleteShard(shard1);
|
||||
controller.deleteShard(shard0);
|
||||
controller.forceCheck();
|
||||
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
||||
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
||||
|
||||
// remove second shard
|
||||
controller.deleteShard(shard2);
|
||||
controller.deleteShard(shard1);
|
||||
controller.forceCheck();
|
||||
|
||||
// add a new one
|
||||
final ShardId shard3 = new ShardId("test", 3);
|
||||
controller.simulateIndexing(shard3);
|
||||
controller.assertBuffers(shard3, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
||||
IndexShard shard2 = test.getShard(2);
|
||||
controller.simulateIndexing(shard2);
|
||||
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
||||
}
|
||||
|
||||
public void testActiveInactive() {
|
||||
MockController controller = new MockController(Settings.builder()
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb")
|
||||
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s")
|
||||
.build());
|
||||
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
IndexService test = indicesService.indexService("test");
|
||||
|
||||
final ShardId shard1 = new ShardId("test", 1);
|
||||
MockController controller = new MockController(Settings.builder()
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb")
|
||||
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s")
|
||||
.build());
|
||||
|
||||
IndexShard shard0 = test.getShard(0);
|
||||
controller.simulateIndexing(shard0);
|
||||
IndexShard shard1 = test.getShard(1);
|
||||
controller.simulateIndexing(shard1);
|
||||
final ShardId shard2 = new ShardId("test", 2);
|
||||
controller.simulateIndexing(shard2);
|
||||
controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||
controller.assertBuffers(shard2, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||
|
||||
// index into both shards, move the clock and see that they are still active
|
||||
controller.simulateIndexing(shard0);
|
||||
controller.simulateIndexing(shard1);
|
||||
controller.simulateIndexing(shard2);
|
||||
|
||||
controller.incrementTimeSec(10);
|
||||
controller.forceCheck();
|
||||
|
||||
// both shards now inactive
|
||||
controller.assertInActive(shard1);
|
||||
controller.assertInActive(shard2);
|
||||
controller.assertInactive(shard0);
|
||||
controller.assertInactive(shard1);
|
||||
|
||||
// index into one shard only, see it becomes active
|
||||
controller.simulateIndexing(shard1);
|
||||
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||
controller.assertInActive(shard2);
|
||||
controller.simulateIndexing(shard0);
|
||||
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||
controller.assertInactive(shard1);
|
||||
|
||||
controller.incrementTimeSec(3); // increment but not enough to become inactive
|
||||
controller.forceCheck();
|
||||
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||
controller.assertInActive(shard2);
|
||||
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||
controller.assertInactive(shard1);
|
||||
|
||||
controller.incrementTimeSec(3); // increment some more
|
||||
controller.forceCheck();
|
||||
controller.assertInActive(shard1);
|
||||
controller.assertInActive(shard2);
|
||||
controller.assertInactive(shard0);
|
||||
controller.assertInactive(shard1);
|
||||
|
||||
// index some and shard becomes immediately active
|
||||
controller.simulateIndexing(shard2);
|
||||
controller.assertInActive(shard1);
|
||||
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||
controller.simulateIndexing(shard1);
|
||||
controller.assertInactive(shard0);
|
||||
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||
}
|
||||
|
||||
public void testMinShardBufferSizes() {
|
||||
MockController controller = new MockController(Settings.builder()
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
|
||||
.put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
||||
.put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build());
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
|
||||
.put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
||||
.put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build());
|
||||
|
||||
assertTwoActiveShards(controller, new ByteSizeValue(6, ByteSizeUnit.MB), new ByteSizeValue(40, ByteSizeUnit.KB));
|
||||
}
|
||||
|
||||
public void testMaxShardBufferSizes() {
|
||||
MockController controller = new MockController(Settings.builder()
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
|
||||
.put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb")
|
||||
.put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build());
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
|
||||
.put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb")
|
||||
.put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build());
|
||||
|
||||
assertTwoActiveShards(controller, new ByteSizeValue(3, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.KB));
|
||||
}
|
||||
|
||||
public void testRelativeBufferSizes() {
|
||||
MockController controller = new MockController(Settings.builder()
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%")
|
||||
.build());
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%")
|
||||
.build());
|
||||
|
||||
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(50, ByteSizeUnit.MB)));
|
||||
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
|
||||
|
@ -240,10 +240,10 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
|||
|
||||
public void testMinBufferSizes() {
|
||||
MockController controller = new MockController(Settings.builder()
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%")
|
||||
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
||||
.put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%")
|
||||
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
||||
.put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
|
||||
|
||||
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
|
||||
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
|
||||
|
@ -251,23 +251,24 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
|||
|
||||
public void testMaxBufferSizes() {
|
||||
MockController controller = new MockController(Settings.builder()
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%")
|
||||
.put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
||||
.put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
|
||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%")
|
||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%")
|
||||
.put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
||||
.put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
|
||||
|
||||
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
|
||||
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
|
||||
}
|
||||
|
||||
protected void assertTwoActiveShards(MockController controller, ByteSizeValue indexBufferSize, ByteSizeValue translogBufferSize) {
|
||||
final ShardId shard1 = new ShardId("test", 1);
|
||||
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
IndexService test = indicesService.indexService("test");
|
||||
IndexShard shard0 = test.getShard(0);
|
||||
controller.simulateIndexing(shard0);
|
||||
IndexShard shard1 = test.getShard(1);
|
||||
controller.simulateIndexing(shard1);
|
||||
final ShardId shard2 = new ShardId("test", 2);
|
||||
controller.simulateIndexing(shard2);
|
||||
controller.assertBuffers(shard0, indexBufferSize, translogBufferSize);
|
||||
controller.assertBuffers(shard1, indexBufferSize, translogBufferSize);
|
||||
controller.assertBuffers(shard2, indexBufferSize, translogBufferSize);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.plugins;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -259,14 +259,14 @@ public class PluginInfoTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testPluginListSorted() {
|
||||
PluginsInfo pluginsInfo = new PluginsInfo(5);
|
||||
pluginsInfo.add(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true));
|
||||
pluginsInfo.add(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true));
|
||||
pluginsInfo.add(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true));
|
||||
pluginsInfo.add(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true));
|
||||
pluginsInfo.add(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
|
||||
PluginsAndModules pluginsInfo = new PluginsAndModules();
|
||||
pluginsInfo.addPlugin(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true));
|
||||
pluginsInfo.addPlugin(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true));
|
||||
pluginsInfo.addPlugin(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true));
|
||||
pluginsInfo.addPlugin(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true));
|
||||
pluginsInfo.addPlugin(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
|
||||
|
||||
final List<PluginInfo> infos = pluginsInfo.getInfos();
|
||||
final List<PluginInfo> infos = pluginsInfo.getPluginInfos();
|
||||
List<String> names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList());
|
||||
assertThat(names, contains("a", "b", "c", "d", "e"));
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ public class PluginsServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
static PluginsService newPluginsService(Settings settings, Class<? extends Plugin>... classpathPlugins) {
|
||||
return new PluginsService(settings, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins));
|
||||
return new PluginsService(settings, null, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins));
|
||||
}
|
||||
|
||||
public void testAdditionalSettings() {
|
||||
|
|
|
@ -39,20 +39,51 @@ buildscript {
|
|||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
project.ext {
|
||||
// this is common configuration for distributions, but we also add it here for the license check to use
|
||||
dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
|
||||
// this is common configuration for distributions, but we also add it here for the license check to use
|
||||
ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
|
||||
|
||||
/*****************************************************************************
|
||||
* Modules *
|
||||
*****************************************************************************/
|
||||
|
||||
task buildModules(type: Copy) {
|
||||
into 'build/modules'
|
||||
}
|
||||
|
||||
// we create the buildModules task above so the distribution subprojects can
|
||||
// depend on it, but we don't actually configure it until projects are evaluated
|
||||
// so it can depend on the bundling of plugins (ie modules must have been configured)
|
||||
project.gradle.projectsEvaluated {
|
||||
project.rootProject.subprojects.findAll { it.path.startsWith(':modules:') }.each { Project module ->
|
||||
buildModules {
|
||||
dependsOn module.bundlePlugin
|
||||
into(module.name) {
|
||||
from { zipTree(module.bundlePlugin.outputs.files.singleFile) }
|
||||
}
|
||||
}
|
||||
configure(subprojects.findAll { it.name != 'integ-test-zip' }) { Project distribution ->
|
||||
distribution.integTest.mustRunAfter(module.integTest)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// make sure we have a clean task since we aren't a java project, but we have tasks that
|
||||
// put stuff in the build dir
|
||||
task clean(type: Delete) {
|
||||
delete 'build'
|
||||
}
|
||||
|
||||
subprojects {
|
||||
/*****************************************************************************
|
||||
* Rest test config *
|
||||
*****************************************************************************/
|
||||
apply plugin: 'elasticsearch.rest-test'
|
||||
integTest {
|
||||
project.integTest {
|
||||
dependsOn(project.assemble)
|
||||
includePackaged true
|
||||
cluster {
|
||||
distribution = project.name
|
||||
}
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
|
@ -81,7 +112,12 @@ subprojects {
|
|||
libFiles = copySpec {
|
||||
into 'lib'
|
||||
from project(':core').jar
|
||||
from dependencyFiles
|
||||
from project(':distribution').dependencyFiles
|
||||
}
|
||||
|
||||
modulesFiles = copySpec {
|
||||
into 'modules'
|
||||
from project(':distribution').buildModules
|
||||
}
|
||||
|
||||
configFiles = copySpec {
|
||||
|
@ -103,7 +139,7 @@ subprojects {
|
|||
/*****************************************************************************
|
||||
* Zip and tgz configuration *
|
||||
*****************************************************************************/
|
||||
configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
|
||||
configure(subprojects.findAll { ['zip', 'tar', 'integ-test-zip'].contains(it.name) }) {
|
||||
project.ext.archivesFiles = copySpec {
|
||||
into("elasticsearch-${version}") {
|
||||
with libFiles
|
||||
|
@ -121,6 +157,9 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
|
|||
from('../src/main/resources') {
|
||||
include 'bin/*.exe'
|
||||
}
|
||||
if (project.name != 'integ-test-zip') {
|
||||
with modulesFiles
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -143,7 +182,7 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
|
|||
* directly from the filesystem. It doesn't want to process them through
|
||||
* MavenFilteringHack or any other copy-style action.
|
||||
*/
|
||||
configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
|
||||
configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) {
|
||||
integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false
|
||||
File packagingFiles = new File(buildDir, 'packaging')
|
||||
project.ext.packagingFiles = packagingFiles
|
||||
|
@ -233,6 +272,7 @@ configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
|
|||
user 'root'
|
||||
permissionGroup 'root'
|
||||
with libFiles
|
||||
with modulesFiles
|
||||
with copySpec {
|
||||
with commonFiles
|
||||
if (project.name == 'deb') {
|
||||
|
@ -305,7 +345,7 @@ task updateShas(type: UpdateShasTask) {
|
|||
parentTask = dependencyLicenses
|
||||
}
|
||||
|
||||
RunTask.configure(project)
|
||||
task run(type: RunTask) {}
|
||||
|
||||
/**
|
||||
* Build some variables that are replaced in the packages. This includes both
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
|
||||
task buildDeb(type: Deb) {
|
||||
dependsOn dependencyFiles, preparePackagingFiles
|
||||
dependsOn preparePackagingFiles
|
||||
baseName 'elasticsearch' // this is what pom generation uses for artifactId
|
||||
// Follow elasticsearch's deb file naming convention
|
||||
archiveName "${packageName}-${project.version}.deb"
|
||||
|
@ -44,6 +44,4 @@ integTest {
|
|||
skip the test if they aren't around. */
|
||||
enabled = new File('/usr/bin/dpkg-deb').exists() || // Standard location
|
||||
new File('/usr/local/bin/dpkg-deb').exists() // Homebrew location
|
||||
dependsOn buildDeb
|
||||
clusterConfig.distribution = 'deb'
|
||||
}
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
task buildZip(type: Zip) {
|
||||
baseName = 'elasticsearch'
|
||||
with archivesFiles
|
||||
}
|
||||
|
||||
artifacts {
|
||||
'default' buildZip
|
||||
archives buildZip
|
||||
}
|
||||
|
||||
integTest.dependsOn buildZip
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.rest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Rest integration test. runs against external cluster in 'mvn verify' */
|
||||
public class RestIT extends ESRestTestCase {
|
||||
public RestIT(RestTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
// we run them all sequentially: start simple!
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -42,6 +42,4 @@ integTest {
|
|||
enabled = new File('/bin/rpm').exists() || // Standard location
|
||||
new File('/usr/bin/rpm').exists() || // Debian location
|
||||
new File('/usr/local/bin/rpm').exists() // Homebrew location
|
||||
dependsOn buildRpm
|
||||
clusterConfig.distribution = 'rpm'
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
task buildTar(type: Tar, dependsOn: dependencyFiles) {
|
||||
task buildTar(type: Tar) {
|
||||
baseName = 'elasticsearch'
|
||||
extension = 'tar.gz'
|
||||
with archivesFiles
|
||||
|
@ -28,8 +28,3 @@ artifacts {
|
|||
'default' buildTar
|
||||
archives buildTar
|
||||
}
|
||||
|
||||
integTest {
|
||||
dependsOn buildTar
|
||||
clusterConfig.distribution = 'tar'
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
task buildZip(type: Zip, dependsOn: dependencyFiles) {
|
||||
task buildZip(type: Zip) {
|
||||
baseName = 'elasticsearch'
|
||||
with archivesFiles
|
||||
}
|
||||
|
|
|
@ -90,6 +90,9 @@ The search exists api has been removed in favour of using the search api with
|
|||
The deprecated `/_optimize` endpoint has been removed. The `/_forcemerge`
|
||||
endpoint should be used in lieu of optimize.
|
||||
|
||||
The `GET` HTTP verb for `/_forcemerge` is no longer supported, please use the
|
||||
`POST` HTTP verb.
|
||||
|
||||
==== Deprecated queries removed
|
||||
|
||||
The following deprecated queries have been removed:
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
subprojects {
|
||||
apply plugin: 'elasticsearch.esplugin'
|
||||
|
||||
esplugin {
|
||||
// for local ES plugins, the name of the plugin is the same as the directory
|
||||
name project.name
|
||||
}
|
||||
|
||||
if (project.file('src/main/packaging').exists()) {
|
||||
throw new InvalidModelException("Modules cannot contain packaging files")
|
||||
}
|
||||
if (project.file('src/main/bin').exists()) {
|
||||
throw new InvalidModelException("Modules cannot contain bin files")
|
||||
}
|
||||
if (project.file('src/main/config').exists()) {
|
||||
throw new InvalidModelException("Modules cannot contain config files")
|
||||
}
|
||||
|
||||
project.afterEvaluate {
|
||||
if (esplugin.isolated == false) {
|
||||
throw new InvalidModelException("Modules cannot disable isolation")
|
||||
}
|
||||
if (esplugin.jvm == false) {
|
||||
throw new InvalidModelException("Modules must be jvm plugins")
|
||||
}
|
||||
}
|
||||
}
|
|
@ -10,5 +10,5 @@
|
|||
- do:
|
||||
nodes.info: {}
|
||||
|
||||
- match: { nodes.$master.plugins.0.name: lang-expression }
|
||||
- match: { nodes.$master.plugins.0.jvm: true }
|
||||
- match: { nodes.$master.modules.0.name: lang-expression }
|
||||
- match: { nodes.$master.modules.0.jvm: true }
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue