Merge branch 'master' into feature/ingest
This commit is contained in:
commit
6062c4eac9
|
@ -109,6 +109,7 @@ subprojects {
|
||||||
"org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec',
|
"org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec',
|
||||||
"org.elasticsearch:elasticsearch:${version}": ':core',
|
"org.elasticsearch:elasticsearch:${version}": ':core',
|
||||||
"org.elasticsearch:test-framework:${version}": ':test-framework',
|
"org.elasticsearch:test-framework:${version}": ':test-framework',
|
||||||
|
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip',
|
||||||
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip',
|
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip',
|
||||||
"org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar',
|
"org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar',
|
||||||
"org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm',
|
"org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm',
|
||||||
|
|
|
@ -23,40 +23,41 @@ import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||||
import org.elasticsearch.gradle.test.RunTask
|
import org.elasticsearch.gradle.test.RunTask
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
import org.gradle.api.Task
|
import org.gradle.api.Task
|
||||||
|
import org.gradle.api.tasks.SourceSet
|
||||||
import org.gradle.api.tasks.bundling.Zip
|
import org.gradle.api.tasks.bundling.Zip
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encapsulates build configuration for an Elasticsearch plugin.
|
* Encapsulates build configuration for an Elasticsearch plugin.
|
||||||
*/
|
*/
|
||||||
class PluginBuildPlugin extends BuildPlugin {
|
public class PluginBuildPlugin extends BuildPlugin {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void apply(Project project) {
|
public void apply(Project project) {
|
||||||
super.apply(project)
|
super.apply(project)
|
||||||
configureDependencies(project)
|
configureDependencies(project)
|
||||||
// this afterEvaluate must happen before the afterEvaluate added by integTest configure,
|
// this afterEvaluate must happen before the afterEvaluate added by integTest creation,
|
||||||
// so that the file name resolution for installing the plugin will be setup
|
// so that the file name resolution for installing the plugin will be setup
|
||||||
project.afterEvaluate {
|
project.afterEvaluate {
|
||||||
String name = project.pluginProperties.extension.name
|
String name = project.pluginProperties.extension.name
|
||||||
project.jar.baseName = name
|
project.jar.baseName = name
|
||||||
project.bundlePlugin.baseName = name
|
project.bundlePlugin.baseName = name
|
||||||
|
|
||||||
project.integTest.dependsOn(project.bundlePlugin)
|
project.integTest.dependsOn(project.bundlePlugin)
|
||||||
project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
|
|
||||||
project.tasks.run.dependsOn(project.bundlePlugin)
|
project.tasks.run.dependsOn(project.bundlePlugin)
|
||||||
project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
|
if (project.path.startsWith(':modules:')) {
|
||||||
}
|
project.integTest.clusterConfig.module(project)
|
||||||
RestIntegTestTask.configure(project)
|
project.tasks.run.clusterConfig.module(project)
|
||||||
RunTask.configure(project)
|
} else {
|
||||||
Task bundle = configureBundleTask(project)
|
project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
|
||||||
project.configurations.archives.artifacts.removeAll { it.archiveTask.is project.jar }
|
project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
|
||||||
project.configurations.getByName('default').extendsFrom = []
|
}
|
||||||
project.artifacts {
|
|
||||||
archives bundle
|
|
||||||
'default' bundle
|
|
||||||
}
|
}
|
||||||
|
createIntegTestTask(project)
|
||||||
|
createBundleTask(project)
|
||||||
|
project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build
|
||||||
}
|
}
|
||||||
|
|
||||||
static void configureDependencies(Project project) {
|
private static void configureDependencies(Project project) {
|
||||||
project.dependencies {
|
project.dependencies {
|
||||||
provided "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}"
|
provided "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}"
|
||||||
testCompile "org.elasticsearch:test-framework:${project.versions.elasticsearch}"
|
testCompile "org.elasticsearch:test-framework:${project.versions.elasticsearch}"
|
||||||
|
@ -72,21 +73,36 @@ class PluginBuildPlugin extends BuildPlugin {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static Task configureBundleTask(Project project) {
|
/** Adds an integTest task which runs rest tests */
|
||||||
PluginPropertiesTask buildProperties = project.tasks.create(name: 'pluginProperties', type: PluginPropertiesTask)
|
private static void createIntegTestTask(Project project) {
|
||||||
File pluginMetadata = project.file("src/main/plugin-metadata")
|
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
||||||
project.sourceSets.test {
|
integTest.mustRunAfter(project.precommit, project.test)
|
||||||
output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties')
|
project.check.dependsOn(integTest)
|
||||||
resources {
|
}
|
||||||
srcDir pluginMetadata
|
|
||||||
}
|
/**
|
||||||
}
|
* Adds a bundlePlugin task which builds the zip containing the plugin jars,
|
||||||
Task bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties])
|
* metadata, properties, and packaging files
|
||||||
bundle.configure {
|
*/
|
||||||
from buildProperties
|
private static void createBundleTask(Project project) {
|
||||||
from pluginMetadata
|
File pluginMetadata = project.file('src/main/plugin-metadata')
|
||||||
from project.jar
|
|
||||||
from bundle.project.configurations.runtime - bundle.project.configurations.provided
|
// create a task to build the properties file for this plugin
|
||||||
|
PluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', PluginPropertiesTask.class)
|
||||||
|
|
||||||
|
// add the plugin properties and metadata to test resources, so unit tests can
|
||||||
|
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
|
||||||
|
SourceSet testSourceSet = project.sourceSets.test
|
||||||
|
testSourceSet.output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties')
|
||||||
|
testSourceSet.resources.srcDir(pluginMetadata)
|
||||||
|
|
||||||
|
// create the actual bundle task, which zips up all the files for the plugin
|
||||||
|
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) {
|
||||||
|
from buildProperties // plugin properties file
|
||||||
|
from pluginMetadata // metadata (eg custom security policy)
|
||||||
|
from project.jar // this plugin's jar
|
||||||
|
from project.configurations.runtime - project.configurations.provided // the dep jars
|
||||||
|
// extra files for the plugin to go into the zip
|
||||||
from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging
|
from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging
|
||||||
from('src/main') {
|
from('src/main') {
|
||||||
include 'config/**'
|
include 'config/**'
|
||||||
|
@ -97,6 +113,13 @@ class PluginBuildPlugin extends BuildPlugin {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
project.assemble.dependsOn(bundle)
|
project.assemble.dependsOn(bundle)
|
||||||
return bundle
|
|
||||||
|
// remove jar from the archives (things that will be published), and set it to the zip
|
||||||
|
project.configurations.archives.artifacts.removeAll { it.archiveTask.is project.jar }
|
||||||
|
project.artifacts.add('archives', bundle)
|
||||||
|
|
||||||
|
// also make the zip the default artifact (used when depending on this project)
|
||||||
|
project.configurations.getByName('default').extendsFrom = []
|
||||||
|
project.artifacts.add('default', bundle)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,7 @@ import org.gradle.api.tasks.Input
|
||||||
class ClusterConfiguration {
|
class ClusterConfiguration {
|
||||||
|
|
||||||
@Input
|
@Input
|
||||||
String distribution = 'zip'
|
String distribution = 'integ-test-zip'
|
||||||
|
|
||||||
@Input
|
@Input
|
||||||
int numNodes = 1
|
int numNodes = 1
|
||||||
|
@ -71,6 +71,8 @@ class ClusterConfiguration {
|
||||||
|
|
||||||
LinkedHashMap<String, Object> plugins = new LinkedHashMap<>()
|
LinkedHashMap<String, Object> plugins = new LinkedHashMap<>()
|
||||||
|
|
||||||
|
List<Project> modules = new ArrayList<>()
|
||||||
|
|
||||||
LinkedHashMap<String, Object[]> setupCommands = new LinkedHashMap<>()
|
LinkedHashMap<String, Object[]> setupCommands = new LinkedHashMap<>()
|
||||||
|
|
||||||
@Input
|
@Input
|
||||||
|
@ -93,6 +95,12 @@ class ClusterConfiguration {
|
||||||
plugins.put(name, pluginProject)
|
plugins.put(name, pluginProject)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Add a module to the cluster. The project must be an esplugin and have a single zip default artifact. */
|
||||||
|
@Input
|
||||||
|
void module(Project moduleProject) {
|
||||||
|
modules.add(moduleProject)
|
||||||
|
}
|
||||||
|
|
||||||
@Input
|
@Input
|
||||||
void setupCommand(String name, Object... args) {
|
void setupCommand(String name, Object... args) {
|
||||||
setupCommands.put(name, args)
|
setupCommands.put(name, args)
|
||||||
|
|
|
@ -60,7 +60,12 @@ class ClusterFormationTasks {
|
||||||
/** Adds a dependency on the given distribution */
|
/** Adds a dependency on the given distribution */
|
||||||
static void configureDistributionDependency(Project project, String distro) {
|
static void configureDistributionDependency(Project project, String distro) {
|
||||||
String elasticsearchVersion = VersionProperties.elasticsearch
|
String elasticsearchVersion = VersionProperties.elasticsearch
|
||||||
String packaging = distro == 'tar' ? 'tar.gz' : distro
|
String packaging = distro
|
||||||
|
if (distro == 'tar') {
|
||||||
|
packaging = 'tar.gz'
|
||||||
|
} else if (distro == 'integ-test-zip') {
|
||||||
|
packaging = 'zip'
|
||||||
|
}
|
||||||
project.configurations {
|
project.configurations {
|
||||||
elasticsearchDistro
|
elasticsearchDistro
|
||||||
}
|
}
|
||||||
|
@ -103,6 +108,12 @@ class ClusterFormationTasks {
|
||||||
setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
|
setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
|
||||||
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
|
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
|
||||||
|
|
||||||
|
// install modules
|
||||||
|
for (Project module : node.config.modules) {
|
||||||
|
String actionName = pluginTaskName('install', module.name, 'Module')
|
||||||
|
setup = configureInstallModuleTask(taskName(task, node, actionName), project, setup, node, module)
|
||||||
|
}
|
||||||
|
|
||||||
// install plugins
|
// install plugins
|
||||||
for (Map.Entry<String, Object> plugin : node.config.plugins.entrySet()) {
|
for (Map.Entry<String, Object> plugin : node.config.plugins.entrySet()) {
|
||||||
String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin')
|
String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin')
|
||||||
|
@ -138,6 +149,7 @@ class ClusterFormationTasks {
|
||||||
by the source tree. If it isn't then Bad Things(TM) will happen. */
|
by the source tree. If it isn't then Bad Things(TM) will happen. */
|
||||||
Task extract
|
Task extract
|
||||||
switch (node.config.distribution) {
|
switch (node.config.distribution) {
|
||||||
|
case 'integ-test-zip':
|
||||||
case 'zip':
|
case 'zip':
|
||||||
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
||||||
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) }
|
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) }
|
||||||
|
@ -286,6 +298,20 @@ class ClusterFormationTasks {
|
||||||
return copyPlugins
|
return copyPlugins
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Task configureInstallModuleTask(String name, Project project, Task setup, NodeInfo node, Project module) {
|
||||||
|
if (node.config.distribution != 'integ-test-zip') {
|
||||||
|
throw new GradleException("Module ${module.path} not allowed be installed distributions other than integ-test-zip because they should already have all modules bundled!")
|
||||||
|
}
|
||||||
|
if (module.plugins.hasPlugin(PluginBuildPlugin) == false) {
|
||||||
|
throw new GradleException("Task ${name} cannot include module ${module.path} which is not an esplugin")
|
||||||
|
}
|
||||||
|
Copy installModule = project.tasks.create(name, Copy.class)
|
||||||
|
installModule.dependsOn(setup)
|
||||||
|
installModule.into(new File(node.homeDir, "modules/${module.name}"))
|
||||||
|
installModule.from({ project.zipTree(module.tasks.bundlePlugin.outputs.files.singleFile) })
|
||||||
|
return installModule
|
||||||
|
}
|
||||||
|
|
||||||
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Object plugin) {
|
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Object plugin) {
|
||||||
FileCollection pluginZip
|
FileCollection pluginZip
|
||||||
if (plugin instanceof Project) {
|
if (plugin instanceof Project) {
|
||||||
|
|
|
@ -173,6 +173,7 @@ class NodeInfo {
|
||||||
static File homeDir(File baseDir, String distro) {
|
static File homeDir(File baseDir, String distro) {
|
||||||
String path
|
String path
|
||||||
switch (distro) {
|
switch (distro) {
|
||||||
|
case 'integ-test-zip':
|
||||||
case 'zip':
|
case 'zip':
|
||||||
case 'tar':
|
case 'tar':
|
||||||
path = "elasticsearch-${VersionProperties.elasticsearch}"
|
path = "elasticsearch-${VersionProperties.elasticsearch}"
|
||||||
|
@ -188,8 +189,8 @@ class NodeInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
static File confDir(File baseDir, String distro) {
|
static File confDir(File baseDir, String distro) {
|
||||||
String Path
|
|
||||||
switch (distro) {
|
switch (distro) {
|
||||||
|
case 'integ-test-zip':
|
||||||
case 'zip':
|
case 'zip':
|
||||||
case 'tar':
|
case 'tar':
|
||||||
return new File(homeDir(baseDir, distro), 'config')
|
return new File(homeDir(baseDir, distro), 'config')
|
||||||
|
|
|
@ -31,55 +31,38 @@ import org.gradle.util.ConfigureUtil
|
||||||
* Runs integration tests, but first starts an ES cluster,
|
* Runs integration tests, but first starts an ES cluster,
|
||||||
* and passes the ES cluster info as parameters to the tests.
|
* and passes the ES cluster info as parameters to the tests.
|
||||||
*/
|
*/
|
||||||
class RestIntegTestTask extends RandomizedTestingTask {
|
public class RestIntegTestTask extends RandomizedTestingTask {
|
||||||
|
|
||||||
ClusterConfiguration clusterConfig = new ClusterConfiguration()
|
ClusterConfiguration clusterConfig = new ClusterConfiguration()
|
||||||
|
|
||||||
|
/** Flag indicating whether the rest tests in the rest spec should be run. */
|
||||||
@Input
|
@Input
|
||||||
boolean includePackaged = false
|
boolean includePackaged = false
|
||||||
|
|
||||||
static RestIntegTestTask configure(Project project) {
|
public RestIntegTestTask() {
|
||||||
Map integTestOptions = [
|
description = 'Runs rest tests against an elasticsearch cluster.'
|
||||||
name: 'integTest',
|
group = JavaBasePlugin.VERIFICATION_GROUP
|
||||||
type: RestIntegTestTask,
|
dependsOn(project.testClasses)
|
||||||
dependsOn: 'testClasses',
|
classpath = project.sourceSets.test.runtimeClasspath
|
||||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
testClassesDir = project.sourceSets.test.output.classesDir
|
||||||
description: 'Runs rest tests against an elasticsearch cluster.'
|
|
||||||
]
|
// start with the common test configuration
|
||||||
RestIntegTestTask integTest = project.tasks.create(integTestOptions)
|
configure(BuildPlugin.commonTestConfig(project))
|
||||||
integTest.configure(BuildPlugin.commonTestConfig(project))
|
// override/add more for rest tests
|
||||||
integTest.configure {
|
parallelism = '1'
|
||||||
include '**/*IT.class'
|
include('**/*IT.class')
|
||||||
systemProperty 'tests.rest.load_packaged', 'false'
|
systemProperty('tests.rest.load_packaged', 'false')
|
||||||
}
|
|
||||||
RandomizedTestingTask test = project.tasks.findByName('test')
|
// copy the rest spec/tests into the test resources
|
||||||
if (test != null) {
|
|
||||||
integTest.classpath = test.classpath
|
|
||||||
integTest.testClassesDir = test.testClassesDir
|
|
||||||
integTest.mustRunAfter(test)
|
|
||||||
}
|
|
||||||
integTest.mustRunAfter(project.precommit)
|
|
||||||
project.check.dependsOn(integTest)
|
|
||||||
RestSpecHack.configureDependencies(project)
|
RestSpecHack.configureDependencies(project)
|
||||||
project.afterEvaluate {
|
project.afterEvaluate {
|
||||||
integTest.dependsOn(RestSpecHack.configureTask(project, integTest.includePackaged))
|
dependsOn(RestSpecHack.configureTask(project, includePackaged))
|
||||||
|
systemProperty('tests.cluster', "localhost:${clusterConfig.baseTransportPort}")
|
||||||
}
|
}
|
||||||
return integTest
|
|
||||||
}
|
|
||||||
|
|
||||||
RestIntegTestTask() {
|
|
||||||
// this must run after all projects have been configured, so we know any project
|
// this must run after all projects have been configured, so we know any project
|
||||||
// references can be accessed as a fully configured
|
// references can be accessed as a fully configured
|
||||||
project.gradle.projectsEvaluated {
|
project.gradle.projectsEvaluated {
|
||||||
Task test = project.tasks.findByName('test')
|
|
||||||
if (test != null) {
|
|
||||||
mustRunAfter(test)
|
|
||||||
}
|
|
||||||
ClusterFormationTasks.setup(project, this, clusterConfig)
|
ClusterFormationTasks.setup(project, this, clusterConfig)
|
||||||
configure {
|
|
||||||
parallelism '1'
|
|
||||||
systemProperty 'tests.cluster', "localhost:${clusterConfig.baseTransportPort}"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,11 +75,11 @@ class RestIntegTestTask extends RandomizedTestingTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Input
|
@Input
|
||||||
void cluster(Closure closure) {
|
public void cluster(Closure closure) {
|
||||||
ConfigureUtil.configure(closure, clusterConfig)
|
ConfigureUtil.configure(closure, clusterConfig)
|
||||||
}
|
}
|
||||||
|
|
||||||
ClusterConfiguration getCluster() {
|
public ClusterConfiguration getCluster() {
|
||||||
return clusterConfig
|
return clusterConfig
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,12 +28,12 @@ import org.gradle.api.tasks.Copy
|
||||||
* currently must be available on the local filesystem. This class encapsulates
|
* currently must be available on the local filesystem. This class encapsulates
|
||||||
* setting up tasks to copy the rest spec api to test resources.
|
* setting up tasks to copy the rest spec api to test resources.
|
||||||
*/
|
*/
|
||||||
class RestSpecHack {
|
public class RestSpecHack {
|
||||||
/**
|
/**
|
||||||
* Sets dependencies needed to copy the rest spec.
|
* Sets dependencies needed to copy the rest spec.
|
||||||
* @param project The project to add rest spec dependency to
|
* @param project The project to add rest spec dependency to
|
||||||
*/
|
*/
|
||||||
static void configureDependencies(Project project) {
|
public static void configureDependencies(Project project) {
|
||||||
project.configurations {
|
project.configurations {
|
||||||
restSpec
|
restSpec
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,7 @@ class RestSpecHack {
|
||||||
* @param project The project to add the copy task to
|
* @param project The project to add the copy task to
|
||||||
* @param includePackagedTests true if the packaged tests should be copied, false otherwise
|
* @param includePackagedTests true if the packaged tests should be copied, false otherwise
|
||||||
*/
|
*/
|
||||||
static Task configureTask(Project project, boolean includePackagedTests) {
|
public static Task configureTask(Project project, boolean includePackagedTests) {
|
||||||
Map copyRestSpecProps = [
|
Map copyRestSpecProps = [
|
||||||
name : 'copyRestSpec',
|
name : 'copyRestSpec',
|
||||||
type : Copy,
|
type : Copy,
|
||||||
|
@ -65,7 +65,6 @@ class RestSpecHack {
|
||||||
project.idea {
|
project.idea {
|
||||||
module {
|
module {
|
||||||
if (scopes.TEST != null) {
|
if (scopes.TEST != null) {
|
||||||
// TODO: need to add the TEST scope somehow for rest test plugin...
|
|
||||||
scopes.TEST.plus.add(project.configurations.restSpec)
|
scopes.TEST.plus.add(project.configurations.restSpec)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,22 +18,19 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.gradle.test
|
package org.elasticsearch.gradle.test
|
||||||
|
|
||||||
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
|
|
||||||
import org.gradle.api.Plugin
|
import org.gradle.api.Plugin
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
|
|
||||||
/** Configures the build to have a rest integration test. */
|
/** A plugin to add rest integration tests. Used for qa projects. */
|
||||||
class RestTestPlugin implements Plugin<Project> {
|
public class RestTestPlugin implements Plugin<Project> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void apply(Project project) {
|
public void apply(Project project) {
|
||||||
project.pluginManager.apply(StandaloneTestBasePlugin)
|
project.pluginManager.apply(StandaloneTestBasePlugin)
|
||||||
|
|
||||||
RandomizedTestingTask integTest = RestIntegTestTask.configure(project)
|
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
||||||
RestSpecHack.configureDependencies(project)
|
integTest.cluster.distribution = 'zip' // rest tests should run with the real zip
|
||||||
integTest.configure {
|
integTest.mustRunAfter(project.precommit)
|
||||||
classpath = project.sourceSets.test.runtimeClasspath
|
project.check.dependsOn(integTest)
|
||||||
testClassesDir project.sourceSets.test.output.classesDir
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,13 +2,17 @@ package org.elasticsearch.gradle.test
|
||||||
|
|
||||||
import org.gradle.api.DefaultTask
|
import org.gradle.api.DefaultTask
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
|
import org.gradle.api.Task
|
||||||
import org.gradle.api.internal.tasks.options.Option
|
import org.gradle.api.internal.tasks.options.Option
|
||||||
|
import org.gradle.util.ConfigureUtil
|
||||||
|
|
||||||
class RunTask extends DefaultTask {
|
public class RunTask extends DefaultTask {
|
||||||
|
|
||||||
ClusterConfiguration clusterConfig = new ClusterConfiguration(baseHttpPort: 9200, baseTransportPort: 9300, daemonize: false)
|
ClusterConfiguration clusterConfig = new ClusterConfiguration(baseHttpPort: 9200, baseTransportPort: 9300, daemonize: false)
|
||||||
|
|
||||||
RunTask() {
|
public RunTask() {
|
||||||
|
description = "Runs elasticsearch with '${project.path}'"
|
||||||
|
group = 'Verification'
|
||||||
project.afterEvaluate {
|
project.afterEvaluate {
|
||||||
ClusterFormationTasks.setup(project, this, clusterConfig)
|
ClusterFormationTasks.setup(project, this, clusterConfig)
|
||||||
}
|
}
|
||||||
|
@ -22,11 +26,10 @@ class RunTask extends DefaultTask {
|
||||||
clusterConfig.debug = enabled;
|
clusterConfig.debug = enabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void configure(Project project) {
|
/** Configure the cluster that will be run. */
|
||||||
RunTask task = project.tasks.create(
|
@Override
|
||||||
name: 'run',
|
public Task configure(Closure closure) {
|
||||||
type: RunTask,
|
ConfigureUtil.configure(closure, clusterConfig)
|
||||||
description: "Runs elasticsearch with '${project.path}'",
|
return this
|
||||||
group: 'Verification')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,35 +27,26 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||||
import org.gradle.api.Plugin
|
import org.gradle.api.Plugin
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
import org.gradle.api.plugins.JavaBasePlugin
|
import org.gradle.api.plugins.JavaBasePlugin
|
||||||
|
import org.gradle.plugins.ide.eclipse.model.EclipseClasspath
|
||||||
|
|
||||||
/** Configures the build to have a rest integration test. */
|
/** Configures the build to have a rest integration test. */
|
||||||
class StandaloneTestBasePlugin implements Plugin<Project> {
|
public class StandaloneTestBasePlugin implements Plugin<Project> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void apply(Project project) {
|
public void apply(Project project) {
|
||||||
project.pluginManager.apply(JavaBasePlugin)
|
project.pluginManager.apply(JavaBasePlugin)
|
||||||
project.pluginManager.apply(RandomizedTestingPlugin)
|
project.pluginManager.apply(RandomizedTestingPlugin)
|
||||||
|
|
||||||
BuildPlugin.globalBuildInfo(project)
|
BuildPlugin.globalBuildInfo(project)
|
||||||
BuildPlugin.configureRepositories(project)
|
BuildPlugin.configureRepositories(project)
|
||||||
|
|
||||||
// remove some unnecessary tasks for a qa test
|
|
||||||
project.tasks.removeAll { it.name in ['assemble', 'buildDependents'] }
|
|
||||||
|
|
||||||
// only setup tests to build
|
// only setup tests to build
|
||||||
project.sourceSets {
|
project.sourceSets.create('test')
|
||||||
test
|
project.dependencies.add('testCompile', "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}")
|
||||||
}
|
|
||||||
project.dependencies {
|
project.eclipse.classpath.sourceSets = [project.sourceSets.test]
|
||||||
testCompile "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}"
|
project.eclipse.classpath.plusConfigurations = [project.configurations.testRuntime]
|
||||||
}
|
|
||||||
|
|
||||||
project.eclipse {
|
|
||||||
classpath {
|
|
||||||
sourceSets = [project.sourceSets.test]
|
|
||||||
plusConfigurations = [project.configurations.testRuntime]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
PrecommitTasks.create(project, false)
|
PrecommitTasks.create(project, false)
|
||||||
project.check.dependsOn(project.precommit)
|
project.check.dependsOn(project.precommit)
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,11 +25,11 @@ import org.gradle.api.Plugin
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
import org.gradle.api.plugins.JavaBasePlugin
|
import org.gradle.api.plugins.JavaBasePlugin
|
||||||
|
|
||||||
/** Configures the build to have only unit tests. */
|
/** A plugin to add tests only. Used for QA tests that run arbitrary unit tests. */
|
||||||
class StandaloneTestPlugin implements Plugin<Project> {
|
public class StandaloneTestPlugin implements Plugin<Project> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void apply(Project project) {
|
public void apply(Project project) {
|
||||||
project.pluginManager.apply(StandaloneTestBasePlugin)
|
project.pluginManager.apply(StandaloneTestBasePlugin)
|
||||||
|
|
||||||
Map testOptions = [
|
Map testOptions = [
|
||||||
|
@ -41,10 +41,8 @@ class StandaloneTestPlugin implements Plugin<Project> {
|
||||||
]
|
]
|
||||||
RandomizedTestingTask test = project.tasks.create(testOptions)
|
RandomizedTestingTask test = project.tasks.create(testOptions)
|
||||||
test.configure(BuildPlugin.commonTestConfig(project))
|
test.configure(BuildPlugin.commonTestConfig(project))
|
||||||
test.configure {
|
test.classpath = project.sourceSets.test.runtimeClasspath
|
||||||
classpath = project.sourceSets.test.runtimeClasspath
|
test.testClassesDir project.sourceSets.test.output.classesDir
|
||||||
testClassesDir project.sourceSets.test.output.classesDir
|
|
||||||
}
|
|
||||||
test.mustRunAfter(project.precommit)
|
test.mustRunAfter(project.precommit)
|
||||||
project.check.dependsOn(test)
|
project.check.dependsOn(test)
|
||||||
}
|
}
|
||||||
|
|
|
@ -129,8 +129,4 @@ if (isEclipse == false || project.path == ":core-tests") {
|
||||||
}
|
}
|
||||||
check.dependsOn integTest
|
check.dependsOn integTest
|
||||||
integTest.mustRunAfter test
|
integTest.mustRunAfter test
|
||||||
|
|
||||||
RestSpecHack.configureDependencies(project)
|
|
||||||
Task copyRestSpec = RestSpecHack.configureTask(project, true)
|
|
||||||
integTest.dependsOn copyRestSpec
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,14 +72,14 @@ public class NodeInfo extends BaseNodeResponse {
|
||||||
private HttpInfo http;
|
private HttpInfo http;
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private PluginsInfo plugins;
|
private PluginsAndModules plugins;
|
||||||
|
|
||||||
NodeInfo() {
|
NodeInfo() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map<String, String> serviceAttributes, @Nullable Settings settings,
|
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map<String, String> serviceAttributes, @Nullable Settings settings,
|
||||||
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool,
|
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool,
|
||||||
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsInfo plugins) {
|
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins) {
|
||||||
super(node);
|
super(node);
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.build = build;
|
this.build = build;
|
||||||
|
@ -172,7 +172,7 @@ public class NodeInfo extends BaseNodeResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
public PluginsInfo getPlugins() {
|
public PluginsAndModules getPlugins() {
|
||||||
return this.plugins;
|
return this.plugins;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -217,7 +217,8 @@ public class NodeInfo extends BaseNodeResponse {
|
||||||
http = HttpInfo.readHttpInfo(in);
|
http = HttpInfo.readHttpInfo(in);
|
||||||
}
|
}
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
plugins = PluginsInfo.readPluginsInfo(in);
|
plugins = new PluginsAndModules();
|
||||||
|
plugins.readFrom(in);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,115 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.action.admin.cluster.node.info;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.plugins.PluginInfo;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Information about plugins and modules
|
||||||
|
*/
|
||||||
|
public class PluginsAndModules implements Streamable, ToXContent {
|
||||||
|
private List<PluginInfo> plugins;
|
||||||
|
private List<PluginInfo> modules;
|
||||||
|
|
||||||
|
public PluginsAndModules() {
|
||||||
|
plugins = new ArrayList<>();
|
||||||
|
modules = new ArrayList<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an ordered list based on plugins name
|
||||||
|
*/
|
||||||
|
public List<PluginInfo> getPluginInfos() {
|
||||||
|
List<PluginInfo> plugins = new ArrayList<>(this.plugins);
|
||||||
|
Collections.sort(plugins, (p1, p2) -> p1.getName().compareTo(p2.getName()));
|
||||||
|
return plugins;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an ordered list based on modules name
|
||||||
|
*/
|
||||||
|
public List<PluginInfo> getModuleInfos() {
|
||||||
|
List<PluginInfo> modules = new ArrayList<>(this.modules);
|
||||||
|
Collections.sort(modules, (p1, p2) -> p1.getName().compareTo(p2.getName()));
|
||||||
|
return modules;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addPlugin(PluginInfo info) {
|
||||||
|
plugins.add(info);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addModule(PluginInfo info) {
|
||||||
|
modules.add(info);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void readFrom(StreamInput in) throws IOException {
|
||||||
|
if (plugins.isEmpty() == false || modules.isEmpty() == false) {
|
||||||
|
throw new IllegalStateException("instance is already populated");
|
||||||
|
}
|
||||||
|
int plugins_size = in.readInt();
|
||||||
|
for (int i = 0; i < plugins_size; i++) {
|
||||||
|
plugins.add(PluginInfo.readFromStream(in));
|
||||||
|
}
|
||||||
|
int modules_size = in.readInt();
|
||||||
|
for (int i = 0; i < modules_size; i++) {
|
||||||
|
modules.add(PluginInfo.readFromStream(in));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
out.writeInt(plugins.size());
|
||||||
|
for (PluginInfo plugin : getPluginInfos()) {
|
||||||
|
plugin.writeTo(out);
|
||||||
|
}
|
||||||
|
out.writeInt(modules.size());
|
||||||
|
for (PluginInfo module : getModuleInfos()) {
|
||||||
|
module.writeTo(out);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startArray("plugins");
|
||||||
|
for (PluginInfo pluginInfo : getPluginInfos()) {
|
||||||
|
pluginInfo.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
builder.endArray();
|
||||||
|
// TODO: not ideal, make a better api for this (e.g. with jar metadata, and so on)
|
||||||
|
builder.startArray("modules");
|
||||||
|
for (PluginInfo moduleInfo : getModuleInfos()) {
|
||||||
|
moduleInfo.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
builder.endArray();
|
||||||
|
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,101 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.cluster.node.info;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
|
||||||
import org.elasticsearch.plugins.PluginInfo;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class PluginsInfo implements Streamable, ToXContent {
|
|
||||||
static final class Fields {
|
|
||||||
static final XContentBuilderString PLUGINS = new XContentBuilderString("plugins");
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<PluginInfo> infos;
|
|
||||||
|
|
||||||
public PluginsInfo() {
|
|
||||||
infos = new ArrayList<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
public PluginsInfo(int size) {
|
|
||||||
infos = new ArrayList<>(size);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return an ordered list based on plugins name
|
|
||||||
*/
|
|
||||||
public List<PluginInfo> getInfos() {
|
|
||||||
Collections.sort(infos, new Comparator<PluginInfo>() {
|
|
||||||
@Override
|
|
||||||
public int compare(final PluginInfo o1, final PluginInfo o2) {
|
|
||||||
return o1.getName().compareTo(o2.getName());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return infos;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void add(PluginInfo info) {
|
|
||||||
infos.add(info);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static PluginsInfo readPluginsInfo(StreamInput in) throws IOException {
|
|
||||||
PluginsInfo infos = new PluginsInfo();
|
|
||||||
infos.readFrom(in);
|
|
||||||
return infos;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
|
||||||
int plugins_size = in.readInt();
|
|
||||||
for (int i = 0; i < plugins_size; i++) {
|
|
||||||
infos.add(PluginInfo.readFromStream(in));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeInt(infos.size());
|
|
||||||
for (PluginInfo plugin : getInfos()) {
|
|
||||||
plugin.writeTo(out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
|
||||||
builder.startArray(Fields.PLUGINS);
|
|
||||||
for (PluginInfo pluginInfo : getInfos()) {
|
|
||||||
pluginInfo.toXContent(builder, params);
|
|
||||||
}
|
|
||||||
builder.endArray();
|
|
||||||
|
|
||||||
return builder;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -74,7 +74,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
||||||
versions.add(nodeResponse.nodeInfo().getVersion());
|
versions.add(nodeResponse.nodeInfo().getVersion());
|
||||||
process.addNodeStats(nodeResponse.nodeStats());
|
process.addNodeStats(nodeResponse.nodeStats());
|
||||||
jvm.addNodeInfoStats(nodeResponse.nodeInfo(), nodeResponse.nodeStats());
|
jvm.addNodeInfoStats(nodeResponse.nodeInfo(), nodeResponse.nodeStats());
|
||||||
plugins.addAll(nodeResponse.nodeInfo().getPlugins().getInfos());
|
plugins.addAll(nodeResponse.nodeInfo().getPlugins().getPluginInfos());
|
||||||
|
|
||||||
// now do the stats that should be deduped by hardware (implemented by ip deduping)
|
// now do the stats that should be deduped by hardware (implemented by ip deduping)
|
||||||
TransportAddress publishAddress = nodeResponse.nodeInfo().getTransport().address().publishAddress();
|
TransportAddress publishAddress = nodeResponse.nodeInfo().getTransport().address().publishAddress();
|
||||||
|
|
|
@ -131,34 +131,48 @@ final class Security {
|
||||||
@SuppressForbidden(reason = "proper use of URL")
|
@SuppressForbidden(reason = "proper use of URL")
|
||||||
static Map<String,Policy> getPluginPermissions(Environment environment) throws IOException, NoSuchAlgorithmException {
|
static Map<String,Policy> getPluginPermissions(Environment environment) throws IOException, NoSuchAlgorithmException {
|
||||||
Map<String,Policy> map = new HashMap<>();
|
Map<String,Policy> map = new HashMap<>();
|
||||||
|
// collect up lists of plugins and modules
|
||||||
|
List<Path> pluginsAndModules = new ArrayList<>();
|
||||||
if (Files.exists(environment.pluginsFile())) {
|
if (Files.exists(environment.pluginsFile())) {
|
||||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
|
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
|
||||||
for (Path plugin : stream) {
|
for (Path plugin : stream) {
|
||||||
Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
|
pluginsAndModules.add(plugin);
|
||||||
if (Files.exists(policyFile)) {
|
}
|
||||||
// first get a list of URLs for the plugins' jars:
|
}
|
||||||
// we resolve symlinks so map is keyed on the normalize codebase name
|
}
|
||||||
List<URL> codebases = new ArrayList<>();
|
if (Files.exists(environment.modulesFile())) {
|
||||||
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
|
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.modulesFile())) {
|
||||||
for (Path jar : jarStream) {
|
for (Path plugin : stream) {
|
||||||
codebases.add(jar.toRealPath().toUri().toURL());
|
pluginsAndModules.add(plugin);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
// parse the plugin's policy file into a set of permissions
|
// now process each one
|
||||||
Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
|
for (Path plugin : pluginsAndModules) {
|
||||||
|
Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
|
||||||
// consult this policy for each of the plugin's jars:
|
if (Files.exists(policyFile)) {
|
||||||
for (URL url : codebases) {
|
// first get a list of URLs for the plugins' jars:
|
||||||
if (map.put(url.getFile(), policy) != null) {
|
// we resolve symlinks so map is keyed on the normalize codebase name
|
||||||
// just be paranoid ok?
|
List<URL> codebases = new ArrayList<>();
|
||||||
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
|
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
|
||||||
}
|
for (Path jar : jarStream) {
|
||||||
}
|
codebases.add(jar.toRealPath().toUri().toURL());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse the plugin's policy file into a set of permissions
|
||||||
|
Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
|
||||||
|
|
||||||
|
// consult this policy for each of the plugin's jars:
|
||||||
|
for (URL url : codebases) {
|
||||||
|
if (map.put(url.getFile(), policy) != null) {
|
||||||
|
// just be paranoid ok?
|
||||||
|
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return Collections.unmodifiableMap(map);
|
return Collections.unmodifiableMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,6 +242,7 @@ final class Security {
|
||||||
// read-only dirs
|
// read-only dirs
|
||||||
addPath(policy, "path.home", environment.binFile(), "read,readlink");
|
addPath(policy, "path.home", environment.binFile(), "read,readlink");
|
||||||
addPath(policy, "path.home", environment.libFile(), "read,readlink");
|
addPath(policy, "path.home", environment.libFile(), "read,readlink");
|
||||||
|
addPath(policy, "path.home", environment.modulesFile(), "read,readlink");
|
||||||
addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink");
|
addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink");
|
||||||
addPath(policy, "path.conf", environment.configFile(), "read,readlink");
|
addPath(policy, "path.conf", environment.configFile(), "read,readlink");
|
||||||
addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink");
|
addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink");
|
||||||
|
|
|
@ -125,7 +125,7 @@ public class TransportClient extends AbstractClient {
|
||||||
.put(CLIENT_TYPE_SETTING, CLIENT_TYPE)
|
.put(CLIENT_TYPE_SETTING, CLIENT_TYPE)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
PluginsService pluginsService = new PluginsService(settings, null, pluginClasses);
|
PluginsService pluginsService = new PluginsService(settings, null, null, pluginClasses);
|
||||||
this.settings = pluginsService.updatedSettings();
|
this.settings = pluginsService.updatedSettings();
|
||||||
|
|
||||||
Version version = Version.CURRENT;
|
Version version = Version.CURRENT;
|
||||||
|
|
|
@ -65,8 +65,8 @@ public class MacAddressProvider {
|
||||||
byte[] address = null;
|
byte[] address = null;
|
||||||
try {
|
try {
|
||||||
address = getMacAddress();
|
address = getMacAddress();
|
||||||
} catch( SocketException se ) {
|
} catch (Throwable t) {
|
||||||
logger.warn("Unable to get mac address, will use a dummy address", se);
|
logger.warn("Unable to get mac address, will use a dummy address", t);
|
||||||
// address will be set below
|
// address will be set below
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.*;
|
import org.elasticsearch.transport.*;
|
||||||
|
|
||||||
|
@ -41,7 +42,7 @@ import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.new
|
||||||
public class NodesFaultDetection extends FaultDetection {
|
public class NodesFaultDetection extends FaultDetection {
|
||||||
|
|
||||||
public static final String PING_ACTION_NAME = "internal:discovery/zen/fd/ping";
|
public static final String PING_ACTION_NAME = "internal:discovery/zen/fd/ping";
|
||||||
|
|
||||||
public abstract static class Listener {
|
public abstract static class Listener {
|
||||||
|
|
||||||
public void onNodeFailure(DiscoveryNode node, String reason) {}
|
public void onNodeFailure(DiscoveryNode node, String reason) {}
|
||||||
|
@ -145,14 +146,18 @@ public class NodesFaultDetection extends FaultDetection {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void notifyNodeFailure(final DiscoveryNode node, final String reason) {
|
private void notifyNodeFailure(final DiscoveryNode node, final String reason) {
|
||||||
threadPool.generic().execute(new Runnable() {
|
try {
|
||||||
@Override
|
threadPool.generic().execute(new Runnable() {
|
||||||
public void run() {
|
@Override
|
||||||
for (Listener listener : listeners) {
|
public void run() {
|
||||||
listener.onNodeFailure(node, reason);
|
for (Listener listener : listeners) {
|
||||||
|
listener.onNodeFailure(node, reason);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
});
|
} catch (EsRejectedExecutionException ex) {
|
||||||
|
logger.trace("[node ] [{}] ignoring node failure (reason [{}]). Local node is shutting down", ex, node, reason);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void notifyPingReceived(final PingRequest pingRequest) {
|
private void notifyPingReceived(final PingRequest pingRequest) {
|
||||||
|
|
|
@ -58,6 +58,8 @@ public class Environment {
|
||||||
|
|
||||||
private final Path pluginsFile;
|
private final Path pluginsFile;
|
||||||
|
|
||||||
|
private final Path modulesFile;
|
||||||
|
|
||||||
private final Path sharedDataFile;
|
private final Path sharedDataFile;
|
||||||
|
|
||||||
/** location of bin/, used by plugin manager */
|
/** location of bin/, used by plugin manager */
|
||||||
|
@ -157,6 +159,7 @@ public class Environment {
|
||||||
|
|
||||||
binFile = homeFile.resolve("bin");
|
binFile = homeFile.resolve("bin");
|
||||||
libFile = homeFile.resolve("lib");
|
libFile = homeFile.resolve("lib");
|
||||||
|
modulesFile = homeFile.resolve("modules");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -275,6 +278,10 @@ public class Environment {
|
||||||
return libFile;
|
return libFile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Path modulesFile() {
|
||||||
|
return modulesFile;
|
||||||
|
}
|
||||||
|
|
||||||
public Path logsFile() {
|
public Path logsFile() {
|
||||||
return logsFile;
|
return logsFile;
|
||||||
}
|
}
|
||||||
|
|
|
@ -336,8 +336,6 @@ public class DocumentMapper implements ToXContent {
|
||||||
|
|
||||||
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
|
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
|
||||||
assert mappingLock.isWriteLockedByCurrentThread();
|
assert mappingLock.isWriteLockedByCurrentThread();
|
||||||
// first ensure we don't have any incompatible new fields
|
|
||||||
mapperService.checkNewMappersCompatibility(objectMappers, fieldMappers, updateAllTypes);
|
|
||||||
|
|
||||||
// update mappers for this document type
|
// update mappers for this document type
|
||||||
Map<String, ObjectMapper> builder = new HashMap<>(this.objectMappers);
|
Map<String, ObjectMapper> builder = new HashMap<>(this.objectMappers);
|
||||||
|
@ -356,6 +354,7 @@ public class DocumentMapper implements ToXContent {
|
||||||
|
|
||||||
public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) {
|
public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) {
|
||||||
try (ReleasableLock lock = mappingWriteLock.acquire()) {
|
try (ReleasableLock lock = mappingWriteLock.acquire()) {
|
||||||
|
mapperService.checkMappersCompatibility(type, mapping, updateAllTypes);
|
||||||
final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes);
|
final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes);
|
||||||
this.mapping.merge(mapping, mergeResult);
|
this.mapping.merge(mapping, mergeResult);
|
||||||
if (simulate == false) {
|
if (simulate == false) {
|
||||||
|
|
|
@ -28,8 +28,6 @@ import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.util.concurrent.ReleasableLock;
|
import org.elasticsearch.common.util.concurrent.ReleasableLock;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
|
import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
|
||||||
|
@ -47,7 +45,6 @@ import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
/** A parser for documents, given mappings from a DocumentMapper */
|
/** A parser for documents, given mappings from a DocumentMapper */
|
||||||
|
@ -716,37 +713,64 @@ class DocumentParser implements Closeable {
|
||||||
// The path of the dest field might be completely different from the current one so we need to reset it
|
// The path of the dest field might be completely different from the current one so we need to reset it
|
||||||
context = context.overridePath(new ContentPath(0));
|
context = context.overridePath(new ContentPath(0));
|
||||||
|
|
||||||
|
String[] paths = Strings.splitStringToArray(field, '.');
|
||||||
|
String fieldName = paths[paths.length-1];
|
||||||
ObjectMapper mapper = context.root();
|
ObjectMapper mapper = context.root();
|
||||||
String objectPath = "";
|
ObjectMapper[] mappers = new ObjectMapper[paths.length-1];
|
||||||
String fieldPath = field;
|
if (paths.length > 1) {
|
||||||
int posDot = field.lastIndexOf('.');
|
ObjectMapper parent = context.root();
|
||||||
if (posDot > 0) {
|
for (int i = 0; i < paths.length-1; i++) {
|
||||||
objectPath = field.substring(0, posDot);
|
mapper = context.docMapper().objectMappers().get(context.path().fullPathAsText(paths[i]));
|
||||||
context.path().add(objectPath);
|
if (mapper == null) {
|
||||||
mapper = context.docMapper().objectMappers().get(objectPath);
|
// One mapping is missing, check if we are allowed to create a dynamic one.
|
||||||
fieldPath = field.substring(posDot + 1);
|
ObjectMapper.Dynamic dynamic = parent.dynamic();
|
||||||
|
if (dynamic == null) {
|
||||||
|
dynamic = dynamicOrDefault(context.root().dynamic());
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (dynamic) {
|
||||||
|
case STRICT:
|
||||||
|
throw new StrictDynamicMappingException(parent.fullPath(), paths[i]);
|
||||||
|
case TRUE:
|
||||||
|
Mapper.Builder builder = context.root().findTemplateBuilder(context, paths[i], "object");
|
||||||
|
if (builder == null) {
|
||||||
|
// if this is a non root object, then explicitly set the dynamic behavior if set
|
||||||
|
if (!(parent instanceof RootObjectMapper) && parent.dynamic() != ObjectMapper.Defaults.DYNAMIC) {
|
||||||
|
((ObjectMapper.Builder) builder).dynamic(parent.dynamic());
|
||||||
|
}
|
||||||
|
builder = MapperBuilders.object(paths[i]).enabled(true).pathType(parent.pathType());
|
||||||
|
}
|
||||||
|
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
|
||||||
|
mapper = (ObjectMapper) builder.build(builderContext);
|
||||||
|
if (mapper.nested() != ObjectMapper.Nested.NO) {
|
||||||
|
throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().fullPathAsText(paths[i]) + "]) through `copy_to`");
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case FALSE:
|
||||||
|
// Maybe we should log something to tell the user that the copy_to is ignored in this case.
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new AssertionError("Unexpected dynamic type " + dynamic);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
context.path().add(paths[i]);
|
||||||
|
mappers[i] = mapper;
|
||||||
|
parent = mapper;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (mapper == null) {
|
ObjectMapper update = parseDynamicValue(context, mapper, fieldName, context.parser().currentToken());
|
||||||
//TODO: Create an object dynamically?
|
|
||||||
throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]");
|
|
||||||
}
|
|
||||||
ObjectMapper update = parseDynamicValue(context, mapper, fieldPath, context.parser().currentToken());
|
|
||||||
assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping
|
assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping
|
||||||
|
|
||||||
// propagate the update to the root
|
if (paths.length > 1) {
|
||||||
while (objectPath.length() > 0) {
|
for (int i = paths.length - 2; i >= 0; i--) {
|
||||||
String parentPath = "";
|
ObjectMapper parent = context.root();
|
||||||
ObjectMapper parent = context.root();
|
if (i > 0) {
|
||||||
posDot = objectPath.lastIndexOf('.');
|
parent = mappers[i-1];
|
||||||
if (posDot > 0) {
|
}
|
||||||
parentPath = objectPath.substring(0, posDot);
|
assert parent != null;
|
||||||
parent = context.docMapper().objectMappers().get(parentPath);
|
update = parent.mappingUpdate(update);
|
||||||
}
|
}
|
||||||
if (parent == null) {
|
|
||||||
throw new IllegalStateException("[" + objectPath + "] has no parent for path [" + parentPath + "]");
|
|
||||||
}
|
|
||||||
update = parent.mappingUpdate(update);
|
|
||||||
objectPath = parentPath;
|
|
||||||
}
|
}
|
||||||
context.addDynamicMappingsUpdate(update);
|
context.addDynamicMappingsUpdate(update);
|
||||||
}
|
}
|
||||||
|
|
|
@ -307,7 +307,6 @@ public abstract class FieldMapper extends Mapper {
|
||||||
if (ref.get().equals(fieldType()) == false) {
|
if (ref.get().equals(fieldType()) == false) {
|
||||||
throw new IllegalStateException("Cannot overwrite field type reference to unequal reference");
|
throw new IllegalStateException("Cannot overwrite field type reference to unequal reference");
|
||||||
}
|
}
|
||||||
ref.incrementAssociatedMappers();
|
|
||||||
this.fieldTypeRef = ref;
|
this.fieldTypeRef = ref;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -380,11 +379,6 @@ public abstract class FieldMapper extends Mapper {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean strict = this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false;
|
|
||||||
fieldType().checkCompatibility(fieldMergeWith.fieldType(), subConflicts, strict);
|
|
||||||
for (String conflict : subConflicts) {
|
|
||||||
mergeResult.addConflict(conflict);
|
|
||||||
}
|
|
||||||
multiFields.merge(mergeWith, mergeResult);
|
multiFields.merge(mergeWith, mergeResult);
|
||||||
|
|
||||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.regex.Regex;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -38,18 +39,49 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||||
/** Full field name to field type */
|
/** Full field name to field type */
|
||||||
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType;
|
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType;
|
||||||
|
|
||||||
|
/** Full field name to types containing a mapping for this full name. */
|
||||||
|
private final CopyOnWriteHashMap<String, Set<String>> fullNameToTypes;
|
||||||
|
|
||||||
/** Index field name to field type */
|
/** Index field name to field type */
|
||||||
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> indexNameToFieldType;
|
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> indexNameToFieldType;
|
||||||
|
|
||||||
|
/** Index field name to types containing a mapping for this index name. */
|
||||||
|
private final CopyOnWriteHashMap<String, Set<String>> indexNameToTypes;
|
||||||
|
|
||||||
/** Create a new empty instance. */
|
/** Create a new empty instance. */
|
||||||
public FieldTypeLookup() {
|
public FieldTypeLookup() {
|
||||||
fullNameToFieldType = new CopyOnWriteHashMap<>();
|
fullNameToFieldType = new CopyOnWriteHashMap<>();
|
||||||
|
fullNameToTypes = new CopyOnWriteHashMap<>();
|
||||||
indexNameToFieldType = new CopyOnWriteHashMap<>();
|
indexNameToFieldType = new CopyOnWriteHashMap<>();
|
||||||
|
indexNameToTypes = new CopyOnWriteHashMap<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
private FieldTypeLookup(CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName, CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName) {
|
private FieldTypeLookup(
|
||||||
fullNameToFieldType = fullName;
|
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName,
|
||||||
indexNameToFieldType = indexName;
|
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes,
|
||||||
|
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName,
|
||||||
|
CopyOnWriteHashMap<String, Set<String>> indexNameToTypes) {
|
||||||
|
this.fullNameToFieldType = fullName;
|
||||||
|
this.fullNameToTypes = fullNameToTypes;
|
||||||
|
this.indexNameToFieldType = indexName;
|
||||||
|
this.indexNameToTypes = indexNameToTypes;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static CopyOnWriteHashMap<String, Set<String>> addType(CopyOnWriteHashMap<String, Set<String>> map, String key, String type) {
|
||||||
|
Set<String> types = map.get(key);
|
||||||
|
if (types == null) {
|
||||||
|
return map.copyAndPut(key, Collections.singleton(type));
|
||||||
|
} else if (types.contains(type)) {
|
||||||
|
// noting to do
|
||||||
|
return map;
|
||||||
|
} else {
|
||||||
|
Set<String> newTypes = new HashSet<>(types.size() + 1);
|
||||||
|
newTypes.addAll(types);
|
||||||
|
newTypes.add(type);
|
||||||
|
assert newTypes.size() == types.size() + 1;
|
||||||
|
newTypes = Collections.unmodifiableSet(newTypes);
|
||||||
|
return map.copyAndPut(key, newTypes);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -63,7 +95,9 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||||
throw new IllegalArgumentException("Default mappings should not be added to the lookup");
|
throw new IllegalArgumentException("Default mappings should not be added to the lookup");
|
||||||
}
|
}
|
||||||
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName = this.fullNameToFieldType;
|
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName = this.fullNameToFieldType;
|
||||||
|
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes = this.fullNameToTypes;
|
||||||
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName = this.indexNameToFieldType;
|
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName = this.indexNameToFieldType;
|
||||||
|
CopyOnWriteHashMap<String, Set<String>> indexNameToTypes = this.indexNameToTypes;
|
||||||
|
|
||||||
for (FieldMapper fieldMapper : newFieldMappers) {
|
for (FieldMapper fieldMapper : newFieldMappers) {
|
||||||
MappedFieldType fieldType = fieldMapper.fieldType();
|
MappedFieldType fieldType = fieldMapper.fieldType();
|
||||||
|
@ -91,8 +125,23 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||||
// this new field bridges between two existing field names (a full and index name), which we cannot support
|
// this new field bridges between two existing field names (a full and index name), which we cannot support
|
||||||
throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName());
|
throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fullNameToTypes = addType(fullNameToTypes, fieldType.names().fullName(), type);
|
||||||
|
indexNameToTypes = addType(indexNameToTypes, fieldType.names().indexName(), type);
|
||||||
|
}
|
||||||
|
return new FieldTypeLookup(fullName, fullNameToTypes, indexName, indexNameToTypes);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean beStrict(String type, Set<String> types, boolean updateAllTypes) {
|
||||||
|
assert types.size() >= 1;
|
||||||
|
if (updateAllTypes) {
|
||||||
|
return false;
|
||||||
|
} else if (types.size() == 1 && types.contains(type)) {
|
||||||
|
// we are implicitly updating all types
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
return new FieldTypeLookup(fullName, indexName);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -100,14 +149,15 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||||
* If any are not compatible, an IllegalArgumentException is thrown.
|
* If any are not compatible, an IllegalArgumentException is thrown.
|
||||||
* If updateAllTypes is true, only basic compatibility is checked.
|
* If updateAllTypes is true, only basic compatibility is checked.
|
||||||
*/
|
*/
|
||||||
public void checkCompatibility(Collection<FieldMapper> newFieldMappers, boolean updateAllTypes) {
|
public void checkCompatibility(String type, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
|
||||||
for (FieldMapper fieldMapper : newFieldMappers) {
|
for (FieldMapper fieldMapper : fieldMappers) {
|
||||||
MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName());
|
MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName());
|
||||||
if (ref != null) {
|
if (ref != null) {
|
||||||
List<String> conflicts = new ArrayList<>();
|
List<String> conflicts = new ArrayList<>();
|
||||||
ref.get().checkTypeName(fieldMapper.fieldType(), conflicts);
|
ref.get().checkTypeName(fieldMapper.fieldType(), conflicts);
|
||||||
if (conflicts.isEmpty()) { // only check compat if they are the same type
|
if (conflicts.isEmpty()) { // only check compat if they are the same type
|
||||||
boolean strict = updateAllTypes == false;
|
final Set<String> types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName());
|
||||||
|
boolean strict = beStrict(type, types, updateAllTypes);
|
||||||
ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
|
ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
|
||||||
}
|
}
|
||||||
if (conflicts.isEmpty() == false) {
|
if (conflicts.isEmpty() == false) {
|
||||||
|
@ -121,7 +171,8 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||||
List<String> conflicts = new ArrayList<>();
|
List<String> conflicts = new ArrayList<>();
|
||||||
indexNameRef.get().checkTypeName(fieldMapper.fieldType(), conflicts);
|
indexNameRef.get().checkTypeName(fieldMapper.fieldType(), conflicts);
|
||||||
if (conflicts.isEmpty()) { // only check compat if they are the same type
|
if (conflicts.isEmpty()) { // only check compat if they are the same type
|
||||||
boolean strict = updateAllTypes == false;
|
final Set<String> types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName());
|
||||||
|
boolean strict = beStrict(type, types, updateAllTypes);
|
||||||
indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
|
indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
|
||||||
}
|
}
|
||||||
if (conflicts.isEmpty() == false) {
|
if (conflicts.isEmpty() == false) {
|
||||||
|
@ -138,6 +189,15 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||||
return ref.get();
|
return ref.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Get the set of types that have a mapping for the given field. */
|
||||||
|
public Set<String> getTypes(String field) {
|
||||||
|
Set<String> types = fullNameToTypes.get(field);
|
||||||
|
if (types == null) {
|
||||||
|
types = Collections.emptySet();
|
||||||
|
}
|
||||||
|
return types;
|
||||||
|
}
|
||||||
|
|
||||||
/** Returns the field type for the given index name */
|
/** Returns the field type for the given index name */
|
||||||
public MappedFieldType getByIndexName(String field) {
|
public MappedFieldType getByIndexName(String field) {
|
||||||
MappedFieldTypeReference ref = indexNameToFieldType.get(field);
|
MappedFieldTypeReference ref = indexNameToFieldType.get(field);
|
||||||
|
@ -145,6 +205,15 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||||
return ref.get();
|
return ref.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Get the set of types that have a mapping for the given field. */
|
||||||
|
public Set<String> getTypesByIndexName(String field) {
|
||||||
|
Set<String> types = indexNameToTypes.get(field);
|
||||||
|
if (types == null) {
|
||||||
|
types = Collections.emptySet();
|
||||||
|
}
|
||||||
|
return types;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a list of the index names of a simple match regex like pattern against full name and index name.
|
* Returns a list of the index names of a simple match regex like pattern against full name and index name.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -23,12 +23,10 @@ package org.elasticsearch.index.mapper;
|
||||||
*/
|
*/
|
||||||
public class MappedFieldTypeReference {
|
public class MappedFieldTypeReference {
|
||||||
private MappedFieldType fieldType; // the current field type this reference points to
|
private MappedFieldType fieldType; // the current field type this reference points to
|
||||||
private int numAssociatedMappers;
|
|
||||||
|
|
||||||
public MappedFieldTypeReference(MappedFieldType fieldType) {
|
public MappedFieldTypeReference(MappedFieldType fieldType) {
|
||||||
fieldType.freeze(); // ensure frozen
|
fieldType.freeze(); // ensure frozen
|
||||||
this.fieldType = fieldType;
|
this.fieldType = fieldType;
|
||||||
this.numAssociatedMappers = 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public MappedFieldType get() {
|
public MappedFieldType get() {
|
||||||
|
@ -40,11 +38,4 @@ public class MappedFieldTypeReference {
|
||||||
this.fieldType = fieldType;
|
this.fieldType = fieldType;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getNumAssociatedMappers() {
|
|
||||||
return numAssociatedMappers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void incrementAssociatedMappers() {
|
|
||||||
++numAssociatedMappers;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.ElasticsearchGenerationException;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||||
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.regex.Regex;
|
import org.elasticsearch.common.regex.Regex;
|
||||||
|
@ -260,13 +261,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
assert result.hasConflicts() == false; // we already simulated
|
assert result.hasConflicts() == false; // we already simulated
|
||||||
return oldMapper;
|
return oldMapper;
|
||||||
} else {
|
} else {
|
||||||
List<ObjectMapper> newObjectMappers = new ArrayList<>();
|
Tuple<Collection<ObjectMapper>, Collection<FieldMapper>> newMappers = checkMappersCompatibility(
|
||||||
List<FieldMapper> newFieldMappers = new ArrayList<>();
|
mapper.type(), mapper.mapping(), updateAllTypes);
|
||||||
for (MetadataFieldMapper metadataMapper : mapper.mapping().metadataMappers) {
|
Collection<ObjectMapper> newObjectMappers = newMappers.v1();
|
||||||
newFieldMappers.add(metadataMapper);
|
Collection<FieldMapper> newFieldMappers = newMappers.v2();
|
||||||
}
|
|
||||||
MapperUtils.collect(mapper.mapping().root, newObjectMappers, newFieldMappers);
|
|
||||||
checkNewMappersCompatibility(newObjectMappers, newFieldMappers, updateAllTypes);
|
|
||||||
addMappers(mapper.type(), newObjectMappers, newFieldMappers);
|
addMappers(mapper.type(), newObjectMappers, newFieldMappers);
|
||||||
|
|
||||||
for (DocumentTypeListener typeListener : typeListeners) {
|
for (DocumentTypeListener typeListener : typeListeners) {
|
||||||
|
@ -302,9 +300,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void checkNewMappersCompatibility(Collection<ObjectMapper> newObjectMappers, Collection<FieldMapper> newFieldMappers, boolean updateAllTypes) {
|
protected void checkMappersCompatibility(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
|
||||||
assert mappingLock.isWriteLockedByCurrentThread();
|
assert mappingLock.isWriteLockedByCurrentThread();
|
||||||
for (ObjectMapper newObjectMapper : newObjectMappers) {
|
for (ObjectMapper newObjectMapper : objectMappers) {
|
||||||
ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath());
|
ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath());
|
||||||
if (existingObjectMapper != null) {
|
if (existingObjectMapper != null) {
|
||||||
MergeResult result = new MergeResult(true, updateAllTypes);
|
MergeResult result = new MergeResult(true, updateAllTypes);
|
||||||
|
@ -315,7 +313,19 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fieldTypes.checkCompatibility(newFieldMappers, updateAllTypes);
|
fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Tuple<Collection<ObjectMapper>, Collection<FieldMapper>> checkMappersCompatibility(
|
||||||
|
String type, Mapping mapping, boolean updateAllTypes) {
|
||||||
|
List<ObjectMapper> objectMappers = new ArrayList<>();
|
||||||
|
List<FieldMapper> fieldMappers = new ArrayList<>();
|
||||||
|
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
|
||||||
|
fieldMappers.add(metadataMapper);
|
||||||
|
}
|
||||||
|
MapperUtils.collect(mapping.root, objectMappers, fieldMappers);
|
||||||
|
checkMappersCompatibility(type, objectMappers, fieldMappers, updateAllTypes);
|
||||||
|
return new Tuple<>(objectMappers, fieldMappers);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void addMappers(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
|
protected void addMappers(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
|
||||||
|
|
|
@ -135,6 +135,15 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
||||||
super(ref);
|
super(ref);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkCompatibility(MappedFieldType other,
|
||||||
|
List<String> conflicts, boolean strict) {
|
||||||
|
super.checkCompatibility(other, conflicts, strict);
|
||||||
|
if (numericPrecisionStep() != other.numericPrecisionStep()) {
|
||||||
|
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step] values");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public abstract NumberFieldType clone();
|
public abstract NumberFieldType clone();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -251,11 +260,6 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
|
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
|
||||||
if (this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false) {
|
|
||||||
if (fieldType().numericPrecisionStep() != nfmMergeWith.fieldType().numericPrecisionStep()) {
|
|
||||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] is used by multiple types. Set update_all_types to true to update precision_step across all types.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||||
this.includeInAll = nfmMergeWith.includeInAll;
|
this.includeInAll = nfmMergeWith.includeInAll;
|
||||||
|
|
|
@ -1034,7 +1034,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||||
boolean wasActive = active.getAndSet(false);
|
boolean wasActive = active.getAndSet(false);
|
||||||
if (wasActive) {
|
if (wasActive) {
|
||||||
updateBufferSize(IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
|
updateBufferSize(IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
|
||||||
logger.debug("shard is now inactive");
|
logger.debug("marking shard as inactive (inactive_time=[{}]) indexing wise", inactiveTime);
|
||||||
indexEventListener.onShardInactive(this);
|
indexEventListener.onShardInactive(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.indices.memory;
|
package org.elasticsearch.indices.memory;
|
||||||
|
|
||||||
import org.elasticsearch.common.Nullable;
|
|
||||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -33,7 +32,6 @@ import org.elasticsearch.index.engine.FlushNotAllowedEngineException;
|
||||||
import org.elasticsearch.index.shard.IndexEventListener;
|
import org.elasticsearch.index.shard.IndexEventListener;
|
||||||
import org.elasticsearch.index.shard.IndexShard;
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
import org.elasticsearch.index.shard.IndexShardState;
|
import org.elasticsearch.index.shard.IndexShardState;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
|
||||||
import org.elasticsearch.indices.IndicesService;
|
import org.elasticsearch.indices.IndicesService;
|
||||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
@ -200,159 +198,57 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
||||||
return translogBuffer;
|
return translogBuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected List<IndexShard> availableShards() {
|
||||||
protected List<ShardId> availableShards() {
|
List<IndexShard> availableShards = new ArrayList<>();
|
||||||
ArrayList<ShardId> list = new ArrayList<>();
|
|
||||||
|
|
||||||
for (IndexService indexService : indicesService) {
|
for (IndexService indexService : indicesService) {
|
||||||
for (IndexShard indexShard : indexService) {
|
for (IndexShard shard : indexService) {
|
||||||
if (shardAvailable(indexShard)) {
|
if (shardAvailable(shard)) {
|
||||||
list.add(indexShard.shardId());
|
availableShards.add(shard);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return list;
|
return availableShards;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** returns true if shard exists and is availabe for updates */
|
/** returns true if shard exists and is availabe for updates */
|
||||||
protected boolean shardAvailable(ShardId shardId) {
|
protected boolean shardAvailable(IndexShard shard) {
|
||||||
return shardAvailable(getShard(shardId));
|
|
||||||
}
|
|
||||||
|
|
||||||
/** returns true if shard exists and is availabe for updates */
|
|
||||||
protected boolean shardAvailable(@Nullable IndexShard shard) {
|
|
||||||
// shadow replica doesn't have an indexing buffer
|
// shadow replica doesn't have an indexing buffer
|
||||||
return shard != null && shard.canIndex() && CAN_UPDATE_INDEX_BUFFER_STATES.contains(shard.state());
|
return shard.canIndex() && CAN_UPDATE_INDEX_BUFFER_STATES.contains(shard.state());
|
||||||
}
|
|
||||||
|
|
||||||
/** gets an {@link IndexShard} instance for the given shard. returns null if the shard doesn't exist */
|
|
||||||
protected IndexShard getShard(ShardId shardId) {
|
|
||||||
IndexService indexService = indicesService.indexService(shardId.index().name());
|
|
||||||
if (indexService != null) {
|
|
||||||
IndexShard indexShard = indexService.getShardOrNull(shardId.id());
|
|
||||||
return indexShard;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** set new indexing and translog buffers on this shard. this may cause the shard to refresh to free up heap. */
|
/** set new indexing and translog buffers on this shard. this may cause the shard to refresh to free up heap. */
|
||||||
protected void updateShardBuffers(ShardId shardId, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
|
protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
|
||||||
final IndexShard shard = getShard(shardId);
|
try {
|
||||||
if (shard != null) {
|
shard.updateBufferSize(shardIndexingBufferSize, shardTranslogBufferSize);
|
||||||
try {
|
} catch (EngineClosedException | FlushNotAllowedEngineException e) {
|
||||||
shard.updateBufferSize(shardIndexingBufferSize, shardTranslogBufferSize);
|
// ignore
|
||||||
} catch (EngineClosedException e) {
|
} catch (Exception e) {
|
||||||
// ignore
|
logger.warn("failed to set shard {} index buffer to [{}]", e, shard.shardId(), shardIndexingBufferSize);
|
||||||
} catch (FlushNotAllowedEngineException e) {
|
|
||||||
// ignore
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.warn("failed to set shard {} index buffer to [{}]", e, shardId, shardIndexingBufferSize);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** returns {@link IndexShard#getActive} if the shard exists, else null */
|
|
||||||
protected Boolean getShardActive(ShardId shardId) {
|
|
||||||
final IndexShard indexShard = getShard(shardId);
|
|
||||||
if (indexShard == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return indexShard.getActive();
|
|
||||||
}
|
|
||||||
|
|
||||||
/** check if any shards active status changed, now. */
|
/** check if any shards active status changed, now. */
|
||||||
public void forceCheck() {
|
public void forceCheck() {
|
||||||
statusChecker.run();
|
statusChecker.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
class ShardsIndicesStatusChecker implements Runnable {
|
class ShardsIndicesStatusChecker implements Runnable {
|
||||||
|
|
||||||
// True if the shard was active last time we checked
|
|
||||||
private final Map<ShardId,Boolean> shardWasActive = new HashMap<>();
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void run() {
|
public synchronized void run() {
|
||||||
EnumSet<ShardStatusChangeType> changes = purgeDeletedAndClosedShards();
|
List<IndexShard> availableShards = availableShards();
|
||||||
|
List<IndexShard> activeShards = new ArrayList<>();
|
||||||
updateShardStatuses(changes);
|
for (IndexShard shard : availableShards) {
|
||||||
|
if (!checkIdle(shard)) {
|
||||||
if (changes.isEmpty() == false) {
|
activeShards.add(shard);
|
||||||
// Something changed: recompute indexing buffers:
|
|
||||||
calcAndSetShardBuffers("[" + changes + "]");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* goes through all existing shards and check whether there are changes in their active status
|
|
||||||
*/
|
|
||||||
private void updateShardStatuses(EnumSet<ShardStatusChangeType> changes) {
|
|
||||||
for (ShardId shardId : availableShards()) {
|
|
||||||
|
|
||||||
// Is the shard active now?
|
|
||||||
Boolean isActive = getShardActive(shardId);
|
|
||||||
|
|
||||||
if (isActive == null) {
|
|
||||||
// shard was closed..
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Was the shard active last time we checked?
|
|
||||||
Boolean wasActive = shardWasActive.get(shardId);
|
|
||||||
if (wasActive == null) {
|
|
||||||
// First time we are seeing this shard
|
|
||||||
shardWasActive.put(shardId, isActive);
|
|
||||||
changes.add(ShardStatusChangeType.ADDED);
|
|
||||||
} else if (isActive) {
|
|
||||||
// Shard is active now
|
|
||||||
if (wasActive == false) {
|
|
||||||
// Shard became active itself, since we last checked (due to new indexing op arriving)
|
|
||||||
changes.add(ShardStatusChangeType.BECAME_ACTIVE);
|
|
||||||
logger.debug("marking shard {} as active indexing wise", shardId);
|
|
||||||
shardWasActive.put(shardId, true);
|
|
||||||
} else if (checkIdle(shardId) == Boolean.TRUE) {
|
|
||||||
// Make shard inactive now
|
|
||||||
changes.add(ShardStatusChangeType.BECAME_INACTIVE);
|
|
||||||
|
|
||||||
shardWasActive.put(shardId, false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* purge any existing statuses that are no longer updated
|
|
||||||
*
|
|
||||||
* @return the changes applied
|
|
||||||
*/
|
|
||||||
private EnumSet<ShardStatusChangeType> purgeDeletedAndClosedShards() {
|
|
||||||
EnumSet<ShardStatusChangeType> changes = EnumSet.noneOf(ShardStatusChangeType.class);
|
|
||||||
|
|
||||||
Iterator<ShardId> statusShardIdIterator = shardWasActive.keySet().iterator();
|
|
||||||
while (statusShardIdIterator.hasNext()) {
|
|
||||||
ShardId shardId = statusShardIdIterator.next();
|
|
||||||
if (shardAvailable(shardId) == false) {
|
|
||||||
changes.add(ShardStatusChangeType.DELETED);
|
|
||||||
statusShardIdIterator.remove();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return changes;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void calcAndSetShardBuffers(String reason) {
|
|
||||||
|
|
||||||
// Count how many shards are now active:
|
|
||||||
int activeShardCount = 0;
|
|
||||||
for (Map.Entry<ShardId,Boolean> ent : shardWasActive.entrySet()) {
|
|
||||||
if (ent.getValue()) {
|
|
||||||
activeShardCount++;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
int activeShardCount = activeShards.size();
|
||||||
|
|
||||||
// TODO: we could be smarter here by taking into account how RAM the IndexWriter on each shard
|
// TODO: we could be smarter here by taking into account how RAM the IndexWriter on each shard
|
||||||
// is actually using (using IW.ramBytesUsed), so that small indices (e.g. Marvel) would not
|
// is actually using (using IW.ramBytesUsed), so that small indices (e.g. Marvel) would not
|
||||||
// get the same indexing buffer as large indices. But it quickly gets tricky...
|
// get the same indexing buffer as large indices. But it quickly gets tricky...
|
||||||
if (activeShardCount == 0) {
|
if (activeShardCount == 0) {
|
||||||
logger.debug("no active shards (reason={})", reason);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -372,13 +268,10 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
||||||
shardTranslogBufferSize = maxShardTranslogBufferSize;
|
shardTranslogBufferSize = maxShardTranslogBufferSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug("recalculating shard indexing buffer (reason={}), total is [{}] with [{}] active shards, each shard set to indexing=[{}], translog=[{}]", reason, indexingBuffer, activeShardCount, shardIndexingBufferSize, shardTranslogBufferSize);
|
logger.debug("recalculating shard indexing buffer, total is [{}] with [{}] active shards, each shard set to indexing=[{}], translog=[{}]", indexingBuffer, activeShardCount, shardIndexingBufferSize, shardTranslogBufferSize);
|
||||||
|
|
||||||
for (Map.Entry<ShardId,Boolean> ent : shardWasActive.entrySet()) {
|
for (IndexShard shard : activeShards) {
|
||||||
if (ent.getValue()) {
|
updateShardBuffers(shard, shardIndexingBufferSize, shardTranslogBufferSize);
|
||||||
// This shard is active
|
|
||||||
updateShardBuffers(ent.getKey(), shardIndexingBufferSize, shardTranslogBufferSize);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -387,38 +280,17 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
||||||
return System.nanoTime();
|
return System.nanoTime();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** ask this shard to check now whether it is inactive, and reduces its indexing and translog buffers if so. returns Boolean.TRUE if
|
/**
|
||||||
* it did deactive, Boolean.FALSE if it did not, and null if the shard is unknown */
|
* ask this shard to check now whether it is inactive, and reduces its indexing and translog buffers if so.
|
||||||
protected Boolean checkIdle(ShardId shardId) {
|
* return false if the shard is not idle, otherwise true
|
||||||
String ignoreReason; // eclipse compiler does not know it is really final
|
*/
|
||||||
final IndexShard shard = getShard(shardId);
|
protected boolean checkIdle(IndexShard shard) {
|
||||||
if (shard != null) {
|
try {
|
||||||
try {
|
return shard.checkIdle();
|
||||||
if (shard.checkIdle()) {
|
} catch (EngineClosedException | FlushNotAllowedEngineException e) {
|
||||||
logger.debug("marking shard {} as inactive (inactive_time[{}]) indexing wise",
|
logger.trace("ignore [{}] while marking shard {} as inactive", e.getClass().getSimpleName(), shard.shardId());
|
||||||
shardId,
|
return true;
|
||||||
shard.getInactiveTime());
|
|
||||||
return Boolean.TRUE;
|
|
||||||
}
|
|
||||||
return Boolean.FALSE;
|
|
||||||
} catch (EngineClosedException e) {
|
|
||||||
// ignore
|
|
||||||
ignoreReason = "EngineClosedException";
|
|
||||||
} catch (FlushNotAllowedEngineException e) {
|
|
||||||
// ignore
|
|
||||||
ignoreReason = "FlushNotAllowedEngineException";
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ignoreReason = "shard not found";
|
|
||||||
}
|
}
|
||||||
if (ignoreReason != null) {
|
|
||||||
logger.trace("ignore [{}] while marking shard {} as inactive", ignoreReason, shardId);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static enum ShardStatusChangeType {
|
|
||||||
ADDED, DELETED, BECAME_ACTIVE, BECAME_INACTIVE
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -147,7 +147,7 @@ public class Node implements Releasable {
|
||||||
tmpEnv.configFile(), Arrays.toString(tmpEnv.dataFiles()), tmpEnv.logsFile(), tmpEnv.pluginsFile());
|
tmpEnv.configFile(), Arrays.toString(tmpEnv.dataFiles()), tmpEnv.logsFile(), tmpEnv.pluginsFile());
|
||||||
}
|
}
|
||||||
|
|
||||||
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.pluginsFile(), classpathPlugins);
|
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.modulesFile(), tmpEnv.pluginsFile(), classpathPlugins);
|
||||||
this.settings = pluginsService.updatedSettings();
|
this.settings = pluginsService.updatedSettings();
|
||||||
// create the environment based on the finalized (processed) view of the settings
|
// create the environment based on the finalized (processed) view of the settings
|
||||||
this.environment = new Environment(this.settings());
|
this.environment = new Environment(this.settings());
|
||||||
|
|
|
@ -71,7 +71,7 @@ public abstract class Plugin {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called before a new index is created on a node. The given module can be used to regsiter index-leve
|
* Called before a new index is created on a node. The given module can be used to register index-level
|
||||||
* extensions.
|
* extensions.
|
||||||
*/
|
*/
|
||||||
public void onIndexModule(IndexModule indexModule) {}
|
public void onIndexModule(IndexModule indexModule) {}
|
||||||
|
|
|
@ -66,6 +66,10 @@ public class PluginManager {
|
||||||
"plugin",
|
"plugin",
|
||||||
"plugin.bat",
|
"plugin.bat",
|
||||||
"service.bat"));
|
"service.bat"));
|
||||||
|
|
||||||
|
static final Set<String> MODULES = unmodifiableSet(newHashSet(
|
||||||
|
"lang-expression",
|
||||||
|
"lang-groovy"));
|
||||||
|
|
||||||
static final Set<String> OFFICIAL_PLUGINS = unmodifiableSet(newHashSet(
|
static final Set<String> OFFICIAL_PLUGINS = unmodifiableSet(newHashSet(
|
||||||
"analysis-icu",
|
"analysis-icu",
|
||||||
|
@ -78,8 +82,6 @@ public class PluginManager {
|
||||||
"discovery-ec2",
|
"discovery-ec2",
|
||||||
"discovery-gce",
|
"discovery-gce",
|
||||||
"discovery-multicast",
|
"discovery-multicast",
|
||||||
"lang-expression",
|
|
||||||
"lang-groovy",
|
|
||||||
"lang-javascript",
|
"lang-javascript",
|
||||||
"lang-python",
|
"lang-python",
|
||||||
"mapper-attachments",
|
"mapper-attachments",
|
||||||
|
@ -221,6 +223,12 @@ public class PluginManager {
|
||||||
PluginInfo info = PluginInfo.readFromProperties(root);
|
PluginInfo info = PluginInfo.readFromProperties(root);
|
||||||
terminal.println(VERBOSE, "%s", info);
|
terminal.println(VERBOSE, "%s", info);
|
||||||
|
|
||||||
|
// don't let luser install plugin as a module...
|
||||||
|
// they might be unavoidably in maven central and are packaged up the same way)
|
||||||
|
if (MODULES.contains(info.getName())) {
|
||||||
|
throw new IOException("plugin '" + info.getName() + "' cannot be installed like this, it is a system module");
|
||||||
|
}
|
||||||
|
|
||||||
// update name in handle based on 'name' property found in descriptor file
|
// update name in handle based on 'name' property found in descriptor file
|
||||||
pluginHandle = new PluginHandle(info.getName(), pluginHandle.version, pluginHandle.user);
|
pluginHandle = new PluginHandle(info.getName(), pluginHandle.version, pluginHandle.user);
|
||||||
final Path extractLocation = pluginHandle.extractedDir(environment);
|
final Path extractLocation = pluginHandle.extractedDir(environment);
|
||||||
|
|
|
@ -25,9 +25,8 @@ import org.apache.lucene.analysis.util.TokenizerFactory;
|
||||||
import org.apache.lucene.codecs.Codec;
|
import org.apache.lucene.codecs.Codec;
|
||||||
import org.apache.lucene.codecs.DocValuesFormat;
|
import org.apache.lucene.codecs.DocValuesFormat;
|
||||||
import org.apache.lucene.codecs.PostingsFormat;
|
import org.apache.lucene.codecs.PostingsFormat;
|
||||||
import org.apache.lucene.util.IOUtils;
|
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
|
||||||
import org.elasticsearch.bootstrap.JarHell;
|
import org.elasticsearch.bootstrap.JarHell;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
|
@ -39,10 +38,7 @@ import org.elasticsearch.common.logging.ESLogger;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.IndexModule;
|
import org.elasticsearch.index.IndexModule;
|
||||||
import org.elasticsearch.index.IndexService;
|
|
||||||
import org.elasticsearch.index.shard.IndexEventListener;
|
|
||||||
|
|
||||||
import java.io.Closeable;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.lang.reflect.InvocationTargetException;
|
import java.lang.reflect.InvocationTargetException;
|
||||||
import java.lang.reflect.Method;
|
import java.lang.reflect.Method;
|
||||||
|
@ -50,6 +46,7 @@ import java.net.URL;
|
||||||
import java.net.URLClassLoader;
|
import java.net.URLClassLoader;
|
||||||
import java.nio.file.DirectoryStream;
|
import java.nio.file.DirectoryStream;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.NoSuchFileException;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -69,10 +66,10 @@ import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory;
|
||||||
public class PluginsService extends AbstractComponent {
|
public class PluginsService extends AbstractComponent {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* We keep around a list of plugins
|
* We keep around a list of plugins and modules
|
||||||
*/
|
*/
|
||||||
private final List<Tuple<PluginInfo, Plugin>> plugins;
|
private final List<Tuple<PluginInfo, Plugin>> plugins;
|
||||||
private final PluginsInfo info;
|
private final PluginsAndModules info;
|
||||||
|
|
||||||
private final Map<Plugin, List<OnModuleReference>> onModuleReferences;
|
private final Map<Plugin, List<OnModuleReference>> onModuleReferences;
|
||||||
|
|
||||||
|
@ -89,13 +86,15 @@ public class PluginsService extends AbstractComponent {
|
||||||
/**
|
/**
|
||||||
* Constructs a new PluginService
|
* Constructs a new PluginService
|
||||||
* @param settings The settings of the system
|
* @param settings The settings of the system
|
||||||
|
* @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem
|
||||||
* @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem
|
* @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem
|
||||||
* @param classpathPlugins Plugins that exist in the classpath which should be loaded
|
* @param classpathPlugins Plugins that exist in the classpath which should be loaded
|
||||||
*/
|
*/
|
||||||
public PluginsService(Settings settings, Path pluginsDirectory, Collection<Class<? extends Plugin>> classpathPlugins) {
|
public PluginsService(Settings settings, Path modulesDirectory, Path pluginsDirectory, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||||
super(settings);
|
super(settings);
|
||||||
|
info = new PluginsAndModules();
|
||||||
|
|
||||||
List<Tuple<PluginInfo, Plugin>> tupleBuilder = new ArrayList<>();
|
List<Tuple<PluginInfo, Plugin>> pluginsLoaded = new ArrayList<>();
|
||||||
|
|
||||||
// first we load plugins that are on the classpath. this is for tests and transport clients
|
// first we load plugins that are on the classpath. this is for tests and transport clients
|
||||||
for (Class<? extends Plugin> pluginClass : classpathPlugins) {
|
for (Class<? extends Plugin> pluginClass : classpathPlugins) {
|
||||||
|
@ -104,24 +103,39 @@ public class PluginsService extends AbstractComponent {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("plugin loaded from classpath [{}]", pluginInfo);
|
logger.trace("plugin loaded from classpath [{}]", pluginInfo);
|
||||||
}
|
}
|
||||||
tupleBuilder.add(new Tuple<>(pluginInfo, plugin));
|
pluginsLoaded.add(new Tuple<>(pluginInfo, plugin));
|
||||||
|
info.addPlugin(pluginInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
// load modules
|
||||||
|
if (modulesDirectory != null) {
|
||||||
|
try {
|
||||||
|
List<Bundle> bundles = getModuleBundles(modulesDirectory);
|
||||||
|
List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles);
|
||||||
|
pluginsLoaded.addAll(loaded);
|
||||||
|
for (Tuple<PluginInfo, Plugin> module : loaded) {
|
||||||
|
info.addModule(module.v1());
|
||||||
|
}
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new IllegalStateException("Unable to initialize modules", ex);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// now, find all the ones that are in plugins/
|
// now, find all the ones that are in plugins/
|
||||||
if (pluginsDirectory != null) {
|
if (pluginsDirectory != null) {
|
||||||
try {
|
try {
|
||||||
List<Bundle> bundles = getPluginBundles(pluginsDirectory);
|
List<Bundle> bundles = getPluginBundles(pluginsDirectory);
|
||||||
tupleBuilder.addAll(loadBundles(bundles));
|
List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles);
|
||||||
|
pluginsLoaded.addAll(loaded);
|
||||||
|
for (Tuple<PluginInfo, Plugin> plugin : loaded) {
|
||||||
|
info.addPlugin(plugin.v1());
|
||||||
|
}
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
throw new IllegalStateException("Unable to initialize plugins", ex);
|
throw new IllegalStateException("Unable to initialize plugins", ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
plugins = Collections.unmodifiableList(tupleBuilder);
|
plugins = Collections.unmodifiableList(pluginsLoaded);
|
||||||
info = new PluginsInfo();
|
|
||||||
for (Tuple<PluginInfo, Plugin> tuple : plugins) {
|
|
||||||
info.add(tuple.v1());
|
|
||||||
}
|
|
||||||
|
|
||||||
// We need to build a List of jvm and site plugins for checking mandatory plugins
|
// We need to build a List of jvm and site plugins for checking mandatory plugins
|
||||||
Map<String, Plugin> jvmPlugins = new HashMap<>();
|
Map<String, Plugin> jvmPlugins = new HashMap<>();
|
||||||
|
@ -151,7 +165,18 @@ public class PluginsService extends AbstractComponent {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info("loaded {}, sites {}", jvmPlugins.keySet(), sitePlugins);
|
// we don't log jars in lib/ we really shouldnt log modules,
|
||||||
|
// but for now: just be transparent so we can debug any potential issues
|
||||||
|
Set<String> moduleNames = new HashSet<>();
|
||||||
|
Set<String> jvmPluginNames = new HashSet<>();
|
||||||
|
for (PluginInfo moduleInfo : info.getModuleInfos()) {
|
||||||
|
moduleNames.add(moduleInfo.getName());
|
||||||
|
}
|
||||||
|
for (PluginInfo pluginInfo : info.getPluginInfos()) {
|
||||||
|
jvmPluginNames.add(pluginInfo.getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("modules {}, plugins {}, sites {}", moduleNames, jvmPluginNames, sitePlugins);
|
||||||
|
|
||||||
Map<Plugin, List<OnModuleReference>> onModuleReferences = new HashMap<>();
|
Map<Plugin, List<OnModuleReference>> onModuleReferences = new HashMap<>();
|
||||||
for (Plugin plugin : jvmPlugins.values()) {
|
for (Plugin plugin : jvmPlugins.values()) {
|
||||||
|
@ -160,6 +185,10 @@ public class PluginsService extends AbstractComponent {
|
||||||
if (!method.getName().equals("onModule")) {
|
if (!method.getName().equals("onModule")) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
// this is a deprecated final method, so all Plugin subclasses have it
|
||||||
|
if (method.getParameterTypes().length == 1 && method.getParameterTypes()[0].equals(IndexModule.class)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
if (method.getParameterTypes().length == 0 || method.getParameterTypes().length > 1) {
|
if (method.getParameterTypes().length == 0 || method.getParameterTypes().length > 1) {
|
||||||
logger.warn("Plugin: {} implementing onModule with no parameters or more than one parameter", plugin.name());
|
logger.warn("Plugin: {} implementing onModule with no parameters or more than one parameter", plugin.name());
|
||||||
continue;
|
continue;
|
||||||
|
@ -178,7 +207,7 @@ public class PluginsService extends AbstractComponent {
|
||||||
this.onModuleReferences = Collections.unmodifiableMap(onModuleReferences);
|
this.onModuleReferences = Collections.unmodifiableMap(onModuleReferences);
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Tuple<PluginInfo, Plugin>> plugins() {
|
private List<Tuple<PluginInfo, Plugin>> plugins() {
|
||||||
return plugins;
|
return plugins;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -249,12 +278,12 @@ public class PluginsService extends AbstractComponent {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Get information about plugins (jvm and site plugins).
|
* Get information about plugins and modules
|
||||||
*/
|
*/
|
||||||
public PluginsInfo info() {
|
public PluginsAndModules info() {
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
// a "bundle" is a group of plugins in a single classloader
|
// a "bundle" is a group of plugins in a single classloader
|
||||||
// really should be 1-1, but we are not so fortunate
|
// really should be 1-1, but we are not so fortunate
|
||||||
static class Bundle {
|
static class Bundle {
|
||||||
|
@ -262,6 +291,40 @@ public class PluginsService extends AbstractComponent {
|
||||||
List<URL> urls = new ArrayList<>();
|
List<URL> urls = new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// similar in impl to getPluginBundles, but DO NOT try to make them share code.
|
||||||
|
// we don't need to inherit all the leniency, and things are different enough.
|
||||||
|
static List<Bundle> getModuleBundles(Path modulesDirectory) throws IOException {
|
||||||
|
// damn leniency
|
||||||
|
if (Files.notExists(modulesDirectory)) {
|
||||||
|
return Collections.emptyList();
|
||||||
|
}
|
||||||
|
List<Bundle> bundles = new ArrayList<>();
|
||||||
|
try (DirectoryStream<Path> stream = Files.newDirectoryStream(modulesDirectory)) {
|
||||||
|
for (Path module : stream) {
|
||||||
|
if (FileSystemUtils.isHidden(module)) {
|
||||||
|
continue; // skip over .DS_Store etc
|
||||||
|
}
|
||||||
|
PluginInfo info = PluginInfo.readFromProperties(module);
|
||||||
|
if (!info.isJvm()) {
|
||||||
|
throw new IllegalStateException("modules must be jvm plugins: " + info);
|
||||||
|
}
|
||||||
|
if (!info.isIsolated()) {
|
||||||
|
throw new IllegalStateException("modules must be isolated: " + info);
|
||||||
|
}
|
||||||
|
Bundle bundle = new Bundle();
|
||||||
|
bundle.plugins.add(info);
|
||||||
|
// gather urls for jar files
|
||||||
|
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(module, "*.jar")) {
|
||||||
|
for (Path jar : jarStream) {
|
||||||
|
bundle.urls.add(jar.toUri().toURL());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bundles.add(bundle);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return bundles;
|
||||||
|
}
|
||||||
|
|
||||||
static List<Bundle> getPluginBundles(Path pluginsDirectory) throws IOException {
|
static List<Bundle> getPluginBundles(Path pluginsDirectory) throws IOException {
|
||||||
ESLogger logger = Loggers.getLogger(PluginsService.class);
|
ESLogger logger = Loggers.getLogger(PluginsService.class);
|
||||||
|
|
||||||
|
@ -269,7 +332,7 @@ public class PluginsService extends AbstractComponent {
|
||||||
if (!isAccessibleDirectory(pluginsDirectory, logger)) {
|
if (!isAccessibleDirectory(pluginsDirectory, logger)) {
|
||||||
return Collections.emptyList();
|
return Collections.emptyList();
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Bundle> bundles = new ArrayList<>();
|
List<Bundle> bundles = new ArrayList<>();
|
||||||
// a special purgatory for plugins that directly depend on each other
|
// a special purgatory for plugins that directly depend on each other
|
||||||
bundles.add(new Bundle());
|
bundles.add(new Bundle());
|
||||||
|
@ -281,7 +344,14 @@ public class PluginsService extends AbstractComponent {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
logger.trace("--- adding plugin [{}]", plugin.toAbsolutePath());
|
logger.trace("--- adding plugin [{}]", plugin.toAbsolutePath());
|
||||||
PluginInfo info = PluginInfo.readFromProperties(plugin);
|
final PluginInfo info;
|
||||||
|
try {
|
||||||
|
info = PluginInfo.readFromProperties(plugin);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new IllegalStateException("Could not load plugin descriptor for existing plugin ["
|
||||||
|
+ plugin.getFileName() + "]. Was the plugin built before 2.0?", e);
|
||||||
|
}
|
||||||
|
|
||||||
List<URL> urls = new ArrayList<>();
|
List<URL> urls = new ArrayList<>();
|
||||||
if (info.isJvm()) {
|
if (info.isJvm()) {
|
||||||
// a jvm plugin: gather urls for jar files
|
// a jvm plugin: gather urls for jar files
|
||||||
|
@ -302,7 +372,7 @@ public class PluginsService extends AbstractComponent {
|
||||||
bundle.urls.addAll(urls);
|
bundle.urls.addAll(urls);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return bundles;
|
return bundles;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -320,7 +390,7 @@ public class PluginsService extends AbstractComponent {
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new IllegalStateException("failed to load bundle " + bundle.urls + " due to jar hell", e);
|
throw new IllegalStateException("failed to load bundle " + bundle.urls + " due to jar hell", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
// create a child to load the plugins in this bundle
|
// create a child to load the plugins in this bundle
|
||||||
ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader());
|
ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader());
|
||||||
for (PluginInfo pluginInfo : bundle.plugins) {
|
for (PluginInfo pluginInfo : bundle.plugins) {
|
||||||
|
|
|
@ -45,9 +45,6 @@ public class RestForceMergeAction extends BaseRestHandler {
|
||||||
super(settings, controller, client);
|
super(settings, controller, client);
|
||||||
controller.registerHandler(POST, "/_forcemerge", this);
|
controller.registerHandler(POST, "/_forcemerge", this);
|
||||||
controller.registerHandler(POST, "/{index}/_forcemerge", this);
|
controller.registerHandler(POST, "/{index}/_forcemerge", this);
|
||||||
|
|
||||||
controller.registerHandler(GET, "/_forcemerge", this);
|
|
||||||
controller.registerHandler(GET, "/{index}/_forcemerge", this);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -95,7 +95,7 @@ public class RestPluginsAction extends AbstractCatAction {
|
||||||
for (DiscoveryNode node : nodes) {
|
for (DiscoveryNode node : nodes) {
|
||||||
NodeInfo info = nodesInfo.getNodesMap().get(node.id());
|
NodeInfo info = nodesInfo.getNodesMap().get(node.id());
|
||||||
|
|
||||||
for (PluginInfo pluginInfo : info.getPlugins().getInfos()) {
|
for (PluginInfo pluginInfo : info.getPlugins().getPluginInfos()) {
|
||||||
table.startRow();
|
table.startRow();
|
||||||
table.addCell(node.id());
|
table.addCell(node.id());
|
||||||
table.addCell(node.name());
|
table.addCell(node.name());
|
||||||
|
|
|
@ -0,0 +1,171 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.script;
|
||||||
|
|
||||||
|
import java.security.BasicPermission;
|
||||||
|
import java.security.Permission;
|
||||||
|
import java.security.PermissionCollection;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Enumeration;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checked by scripting engines to allow loading a java class.
|
||||||
|
* <p>
|
||||||
|
* Examples:
|
||||||
|
* <p>
|
||||||
|
* Allow permission to {@code java.util.List}
|
||||||
|
* <pre>permission org.elasticsearch.script.ClassPermission "java.util.List";</pre>
|
||||||
|
* Allow permission to classes underneath {@code java.util} (and its subpackages such as {@code java.util.zip})
|
||||||
|
* <pre>permission org.elasticsearch.script.ClassPermission "java.util.*";</pre>
|
||||||
|
* Allow permission to standard predefined list of basic classes (see list below)
|
||||||
|
* <pre>permission org.elasticsearch.script.ClassPermission "<<STANDARD>>";</pre>
|
||||||
|
* Allow permission to all classes
|
||||||
|
* <pre>permission org.elasticsearch.script.ClassPermission "*";</pre>
|
||||||
|
* <p>
|
||||||
|
* Set of classes (allowed by special value <code><<STANDARD>></code>):
|
||||||
|
* <ul>
|
||||||
|
* <li>{@link java.lang.Boolean}</li>
|
||||||
|
* <li>{@link java.lang.Byte}</li>
|
||||||
|
* <li>{@link java.lang.Character}</li>
|
||||||
|
* <li>{@link java.lang.Double}</li>
|
||||||
|
* <li>{@link java.lang.Integer}</li>
|
||||||
|
* <li>{@link java.lang.Long}</li>
|
||||||
|
* <li>{@link java.lang.Math}</li>
|
||||||
|
* <li>{@link java.lang.Object}</li>
|
||||||
|
* <li>{@link java.lang.Short}</li>
|
||||||
|
* <li>{@link java.lang.String}</li>
|
||||||
|
* <li>{@link java.math.BigDecimal}</li>
|
||||||
|
* <li>{@link java.util.ArrayList}</li>
|
||||||
|
* <li>{@link java.util.Arrays}</li>
|
||||||
|
* <li>{@link java.util.Date}</li>
|
||||||
|
* <li>{@link java.util.HashMap}</li>
|
||||||
|
* <li>{@link java.util.HashSet}</li>
|
||||||
|
* <li>{@link java.util.Iterator}</li>
|
||||||
|
* <li>{@link java.util.List}</li>
|
||||||
|
* <li>{@link java.util.Map}</li>
|
||||||
|
* <li>{@link java.util.Set}</li>
|
||||||
|
* <li>{@link java.util.UUID}</li>
|
||||||
|
* <li>{@link org.joda.time.DateTime}</li>
|
||||||
|
* <li>{@link org.joda.time.DateTimeUtils}</li>
|
||||||
|
* <li>{@link org.joda.time.DateTimeZone}</li>
|
||||||
|
* <li>{@link org.joda.time.Instant}</li>
|
||||||
|
* </ul>
|
||||||
|
*/
|
||||||
|
public final class ClassPermission extends BasicPermission {
|
||||||
|
private static final long serialVersionUID = 3530711429252193884L;
|
||||||
|
|
||||||
|
public static final String STANDARD = "<<STANDARD>>";
|
||||||
|
/** Typical set of classes for scripting: basic data types, math, dates, and simple collections */
|
||||||
|
// this is the list from the old grovy sandbox impl (+ some things like String, Iterator, etc that were missing)
|
||||||
|
public static final Set<String> STANDARD_CLASSES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||||
|
// jdk classes
|
||||||
|
java.lang.Boolean.class.getName(),
|
||||||
|
java.lang.Byte.class.getName(),
|
||||||
|
java.lang.Character.class.getName(),
|
||||||
|
java.lang.Double.class.getName(),
|
||||||
|
java.lang.Integer.class.getName(),
|
||||||
|
java.lang.Long.class.getName(),
|
||||||
|
java.lang.Math.class.getName(),
|
||||||
|
java.lang.Object.class.getName(),
|
||||||
|
java.lang.Short.class.getName(),
|
||||||
|
java.lang.String.class.getName(),
|
||||||
|
java.math.BigDecimal.class.getName(),
|
||||||
|
java.util.ArrayList.class.getName(),
|
||||||
|
java.util.Arrays.class.getName(),
|
||||||
|
java.util.Date.class.getName(),
|
||||||
|
java.util.HashMap.class.getName(),
|
||||||
|
java.util.HashSet.class.getName(),
|
||||||
|
java.util.Iterator.class.getName(),
|
||||||
|
java.util.List.class.getName(),
|
||||||
|
java.util.Map.class.getName(),
|
||||||
|
java.util.Set.class.getName(),
|
||||||
|
java.util.UUID.class.getName(),
|
||||||
|
// joda-time
|
||||||
|
org.joda.time.DateTime.class.getName(),
|
||||||
|
org.joda.time.DateTimeUtils.class.getName(),
|
||||||
|
org.joda.time.DateTimeZone.class.getName(),
|
||||||
|
org.joda.time.Instant.class.getName()
|
||||||
|
)));
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new ClassPermission object.
|
||||||
|
*
|
||||||
|
* @param name class to grant permission to
|
||||||
|
*/
|
||||||
|
public ClassPermission(String name) {
|
||||||
|
super(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new ClassPermission object.
|
||||||
|
* This constructor exists for use by the {@code Policy} object to instantiate new Permission objects.
|
||||||
|
*
|
||||||
|
* @param name class to grant permission to
|
||||||
|
* @param actions ignored
|
||||||
|
*/
|
||||||
|
public ClassPermission(String name, String actions) {
|
||||||
|
this(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean implies(Permission p) {
|
||||||
|
// check for a special value of STANDARD to imply the basic set
|
||||||
|
if (p != null && p.getClass() == getClass()) {
|
||||||
|
ClassPermission other = (ClassPermission) p;
|
||||||
|
if (STANDARD.equals(getName()) && STANDARD_CLASSES.contains(other.getName())) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return super.implies(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PermissionCollection newPermissionCollection() {
|
||||||
|
// BasicPermissionCollection only handles wildcards, we expand <<STANDARD>> here
|
||||||
|
PermissionCollection impl = super.newPermissionCollection();
|
||||||
|
return new PermissionCollection() {
|
||||||
|
private static final long serialVersionUID = 6792220143549780002L;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void add(Permission permission) {
|
||||||
|
if (permission instanceof ClassPermission && STANDARD.equals(permission.getName())) {
|
||||||
|
for (String clazz : STANDARD_CLASSES) {
|
||||||
|
impl.add(new ClassPermission(clazz));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
impl.add(permission);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean implies(Permission permission) {
|
||||||
|
return impl.implies(permission);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Enumeration<Permission> elements() {
|
||||||
|
return impl.elements();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
|
@ -84,9 +84,6 @@ grant {
|
||||||
// the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely!
|
// the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely!
|
||||||
permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write";
|
permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write";
|
||||||
|
|
||||||
// needed by lucene SPI currently
|
|
||||||
permission java.lang.RuntimePermission "getClassLoader";
|
|
||||||
|
|
||||||
// needed by Settings
|
// needed by Settings
|
||||||
permission java.lang.RuntimePermission "getenv.*";
|
permission java.lang.RuntimePermission "getenv.*";
|
||||||
|
|
||||||
|
|
|
@ -34,5 +34,6 @@ grant {
|
||||||
permission java.util.PropertyPermission "rhino.stack.style", "read";
|
permission java.util.PropertyPermission "rhino.stack.style", "read";
|
||||||
|
|
||||||
// needed IndyInterface selectMethod (setCallSiteTarget)
|
// needed IndyInterface selectMethod (setCallSiteTarget)
|
||||||
|
// TODO: clean this up / only give it to engines that really must have it
|
||||||
permission java.lang.RuntimePermission "getClassLoader";
|
permission java.lang.RuntimePermission "getClassLoader";
|
||||||
};
|
};
|
||||||
|
|
|
@ -43,8 +43,6 @@ OFFICIAL PLUGINS
|
||||||
- discovery-ec2
|
- discovery-ec2
|
||||||
- discovery-gce
|
- discovery-gce
|
||||||
- discovery-multicast
|
- discovery-multicast
|
||||||
- lang-expression
|
|
||||||
- lang-groovy
|
|
||||||
- lang-javascript
|
- lang-javascript
|
||||||
- lang-python
|
- lang-python
|
||||||
- mapper-attachments
|
- mapper-attachments
|
||||||
|
|
|
@ -766,11 +766,11 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int numberOfThreads = randomIntBetween(2, 256);
|
int numberOfThreads = randomIntBetween(2, 8);
|
||||||
int tasksSubmittedPerThread = randomIntBetween(1, 1024);
|
int tasksSubmittedPerThread = randomIntBetween(1, 1024);
|
||||||
|
|
||||||
ConcurrentMap<String, AtomicInteger> counters = new ConcurrentHashMap<>();
|
ConcurrentMap<String, AtomicInteger> counters = new ConcurrentHashMap<>();
|
||||||
CountDownLatch latch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread);
|
CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread);
|
||||||
ClusterStateTaskListener listener = new ClusterStateTaskListener() {
|
ClusterStateTaskListener listener = new ClusterStateTaskListener() {
|
||||||
@Override
|
@Override
|
||||||
public void onFailure(String source, Throwable t) {
|
public void onFailure(String source, Throwable t) {
|
||||||
|
@ -780,7 +780,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
||||||
@Override
|
@Override
|
||||||
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
|
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
|
||||||
counters.computeIfAbsent(source, key -> new AtomicInteger()).incrementAndGet();
|
counters.computeIfAbsent(source, key -> new AtomicInteger()).incrementAndGet();
|
||||||
latch.countDown();
|
updateLatch.countDown();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -814,7 +814,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
||||||
clusterService.submitStateUpdateTask(
|
clusterService.submitStateUpdateTask(
|
||||||
Thread.currentThread().getName(),
|
Thread.currentThread().getName(),
|
||||||
new Task(),
|
new Task(),
|
||||||
ClusterStateTaskConfig.build(Priority.NORMAL),
|
ClusterStateTaskConfig.build(randomFrom(Priority.values())),
|
||||||
executor,
|
executor,
|
||||||
listener);
|
listener);
|
||||||
}
|
}
|
||||||
|
@ -829,14 +829,16 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// wait until all the cluster state updates have been processed
|
// wait until all the cluster state updates have been processed
|
||||||
latch.await();
|
updateLatch.await();
|
||||||
|
|
||||||
// assert the number of executed tasks is correct
|
// assert the number of executed tasks is correct
|
||||||
assertEquals(numberOfThreads * tasksSubmittedPerThread, counter.get());
|
assertEquals(numberOfThreads * tasksSubmittedPerThread, counter.get());
|
||||||
|
|
||||||
// assert each executor executed the correct number of tasks
|
// assert each executor executed the correct number of tasks
|
||||||
for (TaskExecutor executor : executors) {
|
for (TaskExecutor executor : executors) {
|
||||||
assertEquals((int)counts.get(executor), executor.counter.get());
|
if (counts.containsKey(executor)) {
|
||||||
|
assertEquals((int) counts.get(executor), executor.counter.get());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// assert the correct number of clusterStateProcessed events were triggered
|
// assert the correct number of clusterStateProcessed events were triggered
|
||||||
|
|
|
@ -37,6 +37,8 @@ public class FieldTypeLookupTests extends ESTestCase {
|
||||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||||
assertNull(lookup.get("foo"));
|
assertNull(lookup.get("foo"));
|
||||||
assertNull(lookup.getByIndexName("foo"));
|
assertNull(lookup.getByIndexName("foo"));
|
||||||
|
assertEquals(Collections.emptySet(), lookup.getTypes("foo"));
|
||||||
|
assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo"));
|
||||||
Collection<String> names = lookup.simpleMatchToFullName("foo");
|
Collection<String> names = lookup.simpleMatchToFullName("foo");
|
||||||
assertNotNull(names);
|
assertNotNull(names);
|
||||||
assertTrue(names.isEmpty());
|
assertTrue(names.isEmpty());
|
||||||
|
@ -70,6 +72,14 @@ public class FieldTypeLookupTests extends ESTestCase {
|
||||||
assertNull(lookup.get("bar"));
|
assertNull(lookup.get("bar"));
|
||||||
assertEquals(f.fieldType(), lookup2.getByIndexName("bar"));
|
assertEquals(f.fieldType(), lookup2.getByIndexName("bar"));
|
||||||
assertNull(lookup.getByIndexName("foo"));
|
assertNull(lookup.getByIndexName("foo"));
|
||||||
|
assertEquals(Collections.emptySet(), lookup.getTypes("foo"));
|
||||||
|
assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo"));
|
||||||
|
assertEquals(Collections.emptySet(), lookup.getTypes("bar"));
|
||||||
|
assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("bar"));
|
||||||
|
assertEquals(Collections.singleton("type"), lookup2.getTypes("foo"));
|
||||||
|
assertEquals(Collections.emptySet(), lookup2.getTypesByIndexName("foo"));
|
||||||
|
assertEquals(Collections.emptySet(), lookup2.getTypes("bar"));
|
||||||
|
assertEquals(Collections.singleton("type"), lookup2.getTypesByIndexName("bar"));
|
||||||
assertEquals(1, size(lookup2.iterator()));
|
assertEquals(1, size(lookup2.iterator()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,7 +154,7 @@ public class FieldTypeLookupTests extends ESTestCase {
|
||||||
public void testCheckCompatibilityNewField() {
|
public void testCheckCompatibilityNewField() {
|
||||||
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
|
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
|
||||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||||
lookup.checkCompatibility(newList(f1), false);
|
lookup.checkCompatibility("type", newList(f1), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCheckCompatibilityMismatchedTypes() {
|
public void testCheckCompatibilityMismatchedTypes() {
|
||||||
|
@ -155,14 +165,14 @@ public class FieldTypeLookupTests extends ESTestCase {
|
||||||
MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo");
|
MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo");
|
||||||
FieldMapper f2 = new FakeFieldMapper("foo", ft2);
|
FieldMapper f2 = new FakeFieldMapper("foo", ft2);
|
||||||
try {
|
try {
|
||||||
lookup.checkCompatibility(newList(f2), false);
|
lookup.checkCompatibility("type2", newList(f2), false);
|
||||||
fail("expected type mismatch");
|
fail("expected type mismatch");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]"));
|
assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]"));
|
||||||
}
|
}
|
||||||
// fails even if updateAllTypes == true
|
// fails even if updateAllTypes == true
|
||||||
try {
|
try {
|
||||||
lookup.checkCompatibility(newList(f2), true);
|
lookup.checkCompatibility("type2", newList(f2), true);
|
||||||
fail("expected type mismatch");
|
fail("expected type mismatch");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]"));
|
assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]"));
|
||||||
|
@ -178,25 +188,27 @@ public class FieldTypeLookupTests extends ESTestCase {
|
||||||
ft2.setBoost(2.0f);
|
ft2.setBoost(2.0f);
|
||||||
FieldMapper f2 = new FakeFieldMapper("foo", ft2);
|
FieldMapper f2 = new FakeFieldMapper("foo", ft2);
|
||||||
try {
|
try {
|
||||||
lookup.checkCompatibility(newList(f2), false);
|
// different type
|
||||||
|
lookup.checkCompatibility("type2", newList(f2), false);
|
||||||
fail("expected conflict");
|
fail("expected conflict");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertTrue(e.getMessage().contains("to update [boost] across all types"));
|
assertTrue(e.getMessage().contains("to update [boost] across all types"));
|
||||||
}
|
}
|
||||||
lookup.checkCompatibility(newList(f2), true); // boost is updateable, so ok if forcing
|
lookup.checkCompatibility("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types
|
||||||
|
lookup.checkCompatibility("type2", newList(f2), true); // boost is updateable, so ok if forcing
|
||||||
// now with a non changeable setting
|
// now with a non changeable setting
|
||||||
MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar");
|
MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar");
|
||||||
ft3.setStored(true);
|
ft3.setStored(true);
|
||||||
FieldMapper f3 = new FakeFieldMapper("foo", ft3);
|
FieldMapper f3 = new FakeFieldMapper("foo", ft3);
|
||||||
try {
|
try {
|
||||||
lookup.checkCompatibility(newList(f3), false);
|
lookup.checkCompatibility("type2", newList(f3), false);
|
||||||
fail("expected conflict");
|
fail("expected conflict");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertTrue(e.getMessage().contains("has different [store] values"));
|
assertTrue(e.getMessage().contains("has different [store] values"));
|
||||||
}
|
}
|
||||||
// even with updateAllTypes == true, incompatible
|
// even with updateAllTypes == true, incompatible
|
||||||
try {
|
try {
|
||||||
lookup.checkCompatibility(newList(f3), true);
|
lookup.checkCompatibility("type2", newList(f3), true);
|
||||||
fail("expected conflict");
|
fail("expected conflict");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertTrue(e.getMessage().contains("has different [store] values"));
|
assertTrue(e.getMessage().contains("has different [store] values"));
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
@ -68,6 +69,25 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testDynamicObjectCopyTo() throws Exception {
|
||||||
|
String mapping = jsonBuilder().startObject().startObject("doc").startObject("properties")
|
||||||
|
.startObject("foo")
|
||||||
|
.field("type", "string")
|
||||||
|
.field("copy_to", "root.top.child")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().endObject().string();
|
||||||
|
assertAcked(
|
||||||
|
client().admin().indices().prepareCreate("test-idx")
|
||||||
|
.addMapping("doc", mapping)
|
||||||
|
);
|
||||||
|
client().prepareIndex("test-idx", "doc", "1")
|
||||||
|
.setSource("foo", "bar")
|
||||||
|
.get();
|
||||||
|
client().admin().indices().prepareRefresh("test-idx").execute().actionGet();
|
||||||
|
SearchResponse response = client().prepareSearch("test-idx")
|
||||||
|
.setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get();
|
||||||
|
assertThat(response.getHits().totalHits(), equalTo(1L));
|
||||||
|
}
|
||||||
|
|
||||||
private XContentBuilder createDynamicTemplateMapping() throws IOException {
|
private XContentBuilder createDynamicTemplateMapping() throws IOException {
|
||||||
return XContentFactory.jsonBuilder().startObject().startObject("doc")
|
return XContentFactory.jsonBuilder().startObject().startObject("doc")
|
||||||
|
|
|
@ -167,27 +167,126 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToFieldsNonExistingInnerObjectParsing() throws Exception {
|
public void testCopyToDynamicInnerObjectParsing() throws Exception {
|
||||||
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
String mapping = jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties")
|
||||||
.startObject("copy_test")
|
.startObject("copy_test")
|
||||||
.field("type", "string")
|
.field("type", "string")
|
||||||
.field("copy_to", "very.inner.field")
|
.field("copy_to", "very.inner.field")
|
||||||
.endObject()
|
.endObject()
|
||||||
|
.endObject()
|
||||||
.endObject().endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
|
|
||||||
BytesReference json = jsonBuilder().startObject()
|
BytesReference json = jsonBuilder().startObject()
|
||||||
.field("copy_test", "foo")
|
.field("copy_test", "foo")
|
||||||
|
.field("new_field", "bar")
|
||||||
.endObject().bytes();
|
.endObject().bytes();
|
||||||
|
|
||||||
|
ParseContext.Document doc = docMapper.parse("test", "type1", "1", json).rootDoc();
|
||||||
|
assertThat(doc.getFields("copy_test").length, equalTo(1));
|
||||||
|
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
||||||
|
|
||||||
|
assertThat(doc.getFields("very.inner.field").length, equalTo(1));
|
||||||
|
assertThat(doc.getFields("very.inner.field")[0].stringValue(), equalTo("foo"));
|
||||||
|
|
||||||
|
assertThat(doc.getFields("new_field").length, equalTo(1));
|
||||||
|
assertThat(doc.getFields("new_field")[0].stringValue(), equalTo("bar"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCopyToDynamicInnerInnerObjectParsing() throws Exception {
|
||||||
|
String mapping = jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("copy_test")
|
||||||
|
.field("type", "string")
|
||||||
|
.field("copy_to", "very.far.inner.field")
|
||||||
|
.endObject()
|
||||||
|
.startObject("very")
|
||||||
|
.field("type", "object")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("far")
|
||||||
|
.field("type", "object")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
|
|
||||||
|
BytesReference json = jsonBuilder().startObject()
|
||||||
|
.field("copy_test", "foo")
|
||||||
|
.field("new_field", "bar")
|
||||||
|
.endObject().bytes();
|
||||||
|
|
||||||
|
ParseContext.Document doc = docMapper.parse("test", "type1", "1", json).rootDoc();
|
||||||
|
assertThat(doc.getFields("copy_test").length, equalTo(1));
|
||||||
|
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
||||||
|
|
||||||
|
assertThat(doc.getFields("very.far.inner.field").length, equalTo(1));
|
||||||
|
assertThat(doc.getFields("very.far.inner.field")[0].stringValue(), equalTo("foo"));
|
||||||
|
|
||||||
|
assertThat(doc.getFields("new_field").length, equalTo(1));
|
||||||
|
assertThat(doc.getFields("new_field")[0].stringValue(), equalTo("bar"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCopyToStrictDynamicInnerObjectParsing() throws Exception {
|
||||||
|
String mapping = jsonBuilder().startObject().startObject("type1")
|
||||||
|
.field("dynamic", "strict")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("copy_test")
|
||||||
|
.field("type", "string")
|
||||||
|
.field("copy_to", "very.inner.field")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
|
|
||||||
|
BytesReference json = jsonBuilder().startObject()
|
||||||
|
.field("copy_test", "foo")
|
||||||
|
.endObject().bytes();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
docMapper.parse("test", "type1", "1", json).rootDoc();
|
docMapper.parse("test", "type1", "1", json).rootDoc();
|
||||||
fail();
|
fail();
|
||||||
} catch (MapperParsingException ex) {
|
} catch (MapperParsingException ex) {
|
||||||
assertThat(ex.getMessage(), startsWith("attempt to copy value to non-existing object"));
|
assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [very] within [type1] is not allowed"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception {
|
||||||
|
String mapping = jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("copy_test")
|
||||||
|
.field("type", "string")
|
||||||
|
.field("copy_to", "very.far.field")
|
||||||
|
.endObject()
|
||||||
|
.startObject("very")
|
||||||
|
.field("type", "object")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("far")
|
||||||
|
.field("type", "object")
|
||||||
|
.field("dynamic", "strict")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
|
|
||||||
|
BytesReference json = jsonBuilder().startObject()
|
||||||
|
.field("copy_test", "foo")
|
||||||
|
.endObject().bytes();
|
||||||
|
|
||||||
|
try {
|
||||||
|
docMapper.parse("test", "type1", "1", json).rootDoc();
|
||||||
|
fail();
|
||||||
|
} catch (MapperParsingException ex) {
|
||||||
|
assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -337,6 +436,41 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testCopyToDynamicNestedObjectParsing() throws Exception {
|
||||||
|
String mapping = jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startArray("dynamic_templates")
|
||||||
|
.startObject()
|
||||||
|
.startObject("objects")
|
||||||
|
.field("match_mapping_type", "object")
|
||||||
|
.startObject("mapping")
|
||||||
|
.field("type", "nested")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endArray()
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("copy_test")
|
||||||
|
.field("type", "string")
|
||||||
|
.field("copy_to", "very.inner.field")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
|
|
||||||
|
BytesReference json = jsonBuilder().startObject()
|
||||||
|
.field("copy_test", "foo")
|
||||||
|
.field("new_field", "bar")
|
||||||
|
.endObject().bytes();
|
||||||
|
|
||||||
|
try {
|
||||||
|
docMapper.parse("test", "type1", "1", json).rootDoc();
|
||||||
|
fail();
|
||||||
|
} catch (MapperParsingException ex) {
|
||||||
|
assertThat(ex.getMessage(), startsWith("It is forbidden to create dynamic nested objects ([very]) through `copy_to`"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void assertFieldValue(Document doc, String field, Number... expected) {
|
private void assertFieldValue(Document doc, String field, Number... expected) {
|
||||||
IndexableField[] values = doc.getFields(field);
|
IndexableField[] values = doc.getFields(field);
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
|
|
|
@ -25,12 +25,14 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.Priority;
|
import org.elasticsearch.common.Priority;
|
||||||
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.mapper.MergeResult;
|
import org.elasticsearch.index.mapper.MergeResult;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.elasticsearch.search.SearchHitField;
|
import org.elasticsearch.search.SearchHitField;
|
||||||
|
@ -715,28 +717,25 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
|
||||||
.field("geohash", true).endObject().endObject().endObject().endObject().string();
|
.field("geohash", true).endObject().endObject().endObject().endObject().string();
|
||||||
DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser();
|
MapperService mapperService = createIndex("test", settings).mapperService();
|
||||||
DocumentMapper stage1 = parser.parse(stage1Mapping);
|
DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false);
|
||||||
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
|
||||||
.field("geohash", false).endObject().endObject().endObject().endObject().string();
|
.field("geohash", false).endObject().endObject().endObject().endObject().string();
|
||||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
try {
|
||||||
|
mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false);
|
||||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
|
fail();
|
||||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat(mergeResult.buildConflicts().length, equalTo(3));
|
assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]"));
|
||||||
// todo better way of checking conflict?
|
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]"));
|
||||||
assertThat("mapper [point] has different [lat_lon]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]"));
|
||||||
assertThat("mapper [point] has different [geohash]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
}
|
||||||
assertThat("mapper [point] has different [geohash_precision]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
|
||||||
|
|
||||||
// correct mapping and ensure no failures
|
// correct mapping and ensure no failures
|
||||||
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
|
||||||
.field("geohash", true).endObject().endObject().endObject().endObject().string();
|
.field("geohash", true).endObject().endObject().endObject().endObject().string();
|
||||||
stage2 = parser.parse(stage2Mapping);
|
mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false);
|
||||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
|
||||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGeoHashSearch() throws Exception {
|
public void testGeoHashSearch() throws Exception {
|
||||||
|
|
|
@ -22,12 +22,14 @@ import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
|
||||||
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
|
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
|
||||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||||
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.geo.GeoUtils;
|
import org.elasticsearch.common.geo.GeoUtils;
|
||||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.mapper.MergeResult;
|
import org.elasticsearch.index.mapper.MergeResult;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
|
||||||
|
@ -35,6 +37,7 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
import static org.hamcrest.Matchers.isIn;
|
import static org.hamcrest.Matchers.isIn;
|
||||||
|
@ -376,23 +379,21 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
.startObject("shape").field("type", "geo_shape").field("tree", "geohash").field("strategy", "recursive")
|
.startObject("shape").field("type", "geo_shape").field("tree", "geohash").field("strategy", "recursive")
|
||||||
.field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw")
|
.field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw")
|
||||||
.endObject().endObject().endObject().endObject().string();
|
.endObject().endObject().endObject().endObject().string();
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
MapperService mapperService = createIndex("test").mapperService();
|
||||||
DocumentMapper stage1 = parser.parse(stage1Mapping);
|
DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false);
|
||||||
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree")
|
.startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree")
|
||||||
.field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26)
|
.field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26)
|
||||||
.field("orientation", "cw").endObject().endObject().endObject().endObject().string();
|
.field("orientation", "cw").endObject().endObject().endObject().endObject().string();
|
||||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
try {
|
||||||
|
mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false);
|
||||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
|
fail();
|
||||||
// check correct conflicts
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]"));
|
||||||
assertThat(mergeResult.buildConflicts().length, equalTo(4));
|
assertThat(e.getMessage(), containsString("mapper [shape] has different [tree]"));
|
||||||
ArrayList<String> conflicts = new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()));
|
assertThat(e.getMessage(), containsString("mapper [shape] has different [tree_levels]"));
|
||||||
assertThat("mapper [shape] has different [strategy]", isIn(conflicts));
|
assertThat(e.getMessage(), containsString("mapper [shape] has different [precision]"));
|
||||||
assertThat("mapper [shape] has different [tree]", isIn(conflicts));
|
}
|
||||||
assertThat("mapper [shape] has different [tree_levels]", isIn(conflicts));
|
|
||||||
assertThat("mapper [shape] has different [precision]", isIn(conflicts));
|
|
||||||
|
|
||||||
// verify nothing changed
|
// verify nothing changed
|
||||||
FieldMapper fieldMapper = stage1.mappers().getMapper("shape");
|
FieldMapper fieldMapper = stage1.mappers().getMapper("shape");
|
||||||
|
@ -411,11 +412,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
|
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
|
||||||
.field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string();
|
.field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string();
|
||||||
stage2 = parser.parse(stage2Mapping);
|
mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false);
|
||||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
|
||||||
|
|
||||||
// verify mapping changes, and ensure no failures
|
|
||||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
|
||||||
|
|
||||||
fieldMapper = stage1.mappers().getMapper("shape");
|
fieldMapper = stage1.mappers().getMapper("shape");
|
||||||
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
|
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
|
||||||
|
|
|
@ -22,9 +22,11 @@ package org.elasticsearch.index.mapper.multifield.merge;
|
||||||
import org.apache.lucene.index.IndexOptions;
|
import org.apache.lucene.index.IndexOptions;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.mapper.MergeResult;
|
import org.elasticsearch.index.mapper.MergeResult;
|
||||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
@ -32,6 +34,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.notNullValue;
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
@ -113,9 +116,9 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception {
|
public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception {
|
||||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json");
|
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json");
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
MapperService mapperService = createIndex("test").mapperService();
|
||||||
|
|
||||||
DocumentMapper docMapper = parser.parse(mapping);
|
DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false);
|
||||||
|
|
||||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||||
assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue());
|
assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue());
|
||||||
|
@ -129,12 +132,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
|
||||||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json");
|
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json");
|
||||||
DocumentMapper docMapper2 = parser.parse(mapping);
|
mapperService.merge("person", new CompressedXContent(mapping), false, false);
|
||||||
|
|
||||||
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false);
|
|
||||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
|
||||||
|
|
||||||
docMapper.merge(docMapper2.mapping(), false, false);
|
|
||||||
|
|
||||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||||
|
|
||||||
|
@ -151,12 +149,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
|
||||||
assertThat(f, notNullValue());
|
assertThat(f, notNullValue());
|
||||||
|
|
||||||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json");
|
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json");
|
||||||
DocumentMapper docMapper3 = parser.parse(mapping);
|
mapperService.merge("person", new CompressedXContent(mapping), false, false);
|
||||||
|
|
||||||
mergeResult = docMapper.merge(docMapper3.mapping(), true, false);
|
|
||||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
|
||||||
|
|
||||||
docMapper.merge(docMapper3.mapping(), false, false);
|
|
||||||
|
|
||||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||||
|
|
||||||
|
@ -168,24 +161,19 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
|
||||||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json");
|
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json");
|
||||||
DocumentMapper docMapper4 = parser.parse(mapping);
|
try {
|
||||||
mergeResult = docMapper.merge(docMapper4.mapping(), true, false);
|
mapperService.merge("person", new CompressedXContent(mapping), false, false);
|
||||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
|
fail();
|
||||||
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different [index] values"));
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different [store] values"));
|
assertThat(e.getMessage(), containsString("mapper [name] has different [index] values"));
|
||||||
|
assertThat(e.getMessage(), containsString("mapper [name] has different [store] values"));
|
||||||
|
}
|
||||||
|
|
||||||
mergeResult = docMapper.merge(docMapper4.mapping(), false, false);
|
// There are conflicts, so the `name.not_indexed3` has not been added
|
||||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
|
|
||||||
|
|
||||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
|
||||||
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different [index] values"));
|
|
||||||
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different [store] values"));
|
|
||||||
|
|
||||||
// There are conflicts, but the `name.not_indexed3` has been added, b/c that field has no conflicts
|
|
||||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||||
assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue());
|
assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue());
|
||||||
assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue());
|
assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue());
|
||||||
assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue());
|
assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue());
|
||||||
assertThat(docMapper.mappers().getMapper("name.not_indexed3"), notNullValue());
|
assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.IndexableField;
|
||||||
import org.apache.lucene.index.IndexableFieldType;
|
import org.apache.lucene.index.IndexableFieldType;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -478,7 +479,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
||||||
.startObject("properties").startObject("field").field("type", "string").endObject().endObject()
|
.startObject("properties").startObject("field").field("type", "string").endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
DocumentMapper defaultMapper = parser.parse(mapping);
|
DocumentMapper defaultMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false);
|
||||||
|
|
||||||
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||||
.startObject()
|
.startObject()
|
||||||
|
@ -507,10 +508,12 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
||||||
updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
|
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
|
||||||
.endObject().endObject().endObject().endObject().string();
|
.endObject().endObject().endObject().endObject().string();
|
||||||
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false);
|
try {
|
||||||
assertTrue(mergeResult.hasConflicts());
|
defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false);
|
||||||
assertEquals(1, mergeResult.buildConflicts().length);
|
fail();
|
||||||
assertTrue(mergeResult.buildConflicts()[0].contains("different [omit_norms]"));
|
} catch (IllegalArgumentException e) {
|
||||||
|
assertThat(e.getMessage(), containsString("different [omit_norms]"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -41,6 +41,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.mapper.MergeResult;
|
import org.elasticsearch.index.mapper.MergeResult;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.elasticsearch.index.mapper.SourceToParse;
|
import org.elasticsearch.index.mapper.SourceToParse;
|
||||||
|
@ -557,7 +558,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
||||||
public void testMergingConflicts() throws Exception {
|
public void testMergingConflicts() throws Exception {
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("_timestamp").field("enabled", true)
|
.startObject("_timestamp").field("enabled", true)
|
||||||
.startObject("fielddata").field("format", "doc_values").endObject()
|
|
||||||
.field("store", "yes")
|
.field("store", "yes")
|
||||||
.field("index", "analyzed")
|
.field("index", "analyzed")
|
||||||
.field("path", "foo")
|
.field("path", "foo")
|
||||||
|
@ -565,9 +565,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||||
DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser();
|
MapperService mapperService = createIndex("test", indexSettings).mapperService();
|
||||||
|
|
||||||
DocumentMapper docMapper = parser.parse(mapping);
|
DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false);
|
||||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
|
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
|
||||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("_timestamp").field("enabled", false)
|
.startObject("_timestamp").field("enabled", false)
|
||||||
|
@ -579,20 +579,32 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false);
|
try {
|
||||||
List<String> expectedConflicts = new ArrayList<>(Arrays.asList(
|
mapperService.merge("type", new CompressedXContent(mapping), false, false);
|
||||||
"mapper [_timestamp] has different [index] values",
|
fail();
|
||||||
"mapper [_timestamp] has different [store] values",
|
} catch (IllegalArgumentException e) {
|
||||||
"Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02",
|
assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values"));
|
||||||
"Cannot update path in _timestamp value. Value is foo path in merged mapping is bar"));
|
assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [store] values"));
|
||||||
|
|
||||||
for (String conflict : mergeResult.buildConflicts()) {
|
|
||||||
assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict));
|
|
||||||
}
|
}
|
||||||
assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty());
|
|
||||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
|
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
|
||||||
assertTrue(docMapper.timestampFieldMapper().enabled());
|
assertTrue(docMapper.timestampFieldMapper().enabled());
|
||||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values"));
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("_timestamp").field("enabled", true)
|
||||||
|
.field("store", "yes")
|
||||||
|
.field("index", "analyzed")
|
||||||
|
.field("path", "bar")
|
||||||
|
.field("default", "1970-01-02")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
try {
|
||||||
|
mapperService.merge("type", new CompressedXContent(mapping), false, false);
|
||||||
|
fail();
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02"));
|
||||||
|
assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value. Value is foo path in merged mapping is bar"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBackcompatMergingConflictsForIndexValues() throws Exception {
|
public void testBackcompatMergingConflictsForIndexValues() throws Exception {
|
||||||
|
|
|
@ -48,7 +48,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
||||||
public void testAllConflicts() throws Exception {
|
public void testAllConflicts() throws Exception {
|
||||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_create_index.json");
|
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_create_index.json");
|
||||||
String mappingUpdate = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json");
|
String mappingUpdate = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json");
|
||||||
String[] errorMessage = {"[_all] enabled is true now encountering false",
|
String[] errorMessage = {
|
||||||
"[_all] has different [omit_norms] values",
|
"[_all] has different [omit_norms] values",
|
||||||
"[_all] has different [store] values",
|
"[_all] has different [store] values",
|
||||||
"[_all] has different [store_term_vector] values",
|
"[_all] has different [store_term_vector] values",
|
||||||
|
@ -61,6 +61,13 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
||||||
testConflict(mapping, mappingUpdate, errorMessage);
|
testConflict(mapping, mappingUpdate, errorMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testAllDisabled() throws Exception {
|
||||||
|
XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", true).endObject().endObject().endObject().endObject();
|
||||||
|
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
|
||||||
|
String errorMessage = "[_all] enabled is true now encountering false";
|
||||||
|
testConflict(mapping.string(), mappingUpdate.string(), errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
public void testAllWithDefault() throws Exception {
|
public void testAllWithDefault() throws Exception {
|
||||||
String defaultMapping = jsonBuilder().startObject().startObject("_default_")
|
String defaultMapping = jsonBuilder().startObject().startObject("_default_")
|
||||||
.startObject("_all")
|
.startObject("_all")
|
||||||
|
|
|
@ -123,14 +123,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
||||||
mapperService.merge("type", new CompressedXContent(update.string()), false, false);
|
mapperService.merge("type", new CompressedXContent(update.string()), false, false);
|
||||||
fail();
|
fail();
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat(e.getMessage(), containsString("Merge failed"));
|
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
mapperService.merge("type", new CompressedXContent(update.string()), false, false);
|
mapperService.merge("type", new CompressedXContent(update.string()), false, false);
|
||||||
fail();
|
fail();
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat(e.getMessage(), containsString("Merge failed"));
|
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
assertTrue(mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper);
|
assertTrue(mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper);
|
||||||
|
@ -167,7 +167,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// same as the testConflictNewType except that the mapping update is on an existing type
|
// same as the testConflictNewType except that the mapping update is on an existing type
|
||||||
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/15049")
|
|
||||||
public void testConflictNewTypeUpdate() throws Exception {
|
public void testConflictNewTypeUpdate() throws Exception {
|
||||||
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||||
.startObject("properties").startObject("foo").field("type", "long").endObject()
|
.startObject("properties").startObject("foo").field("type", "long").endObject()
|
||||||
|
|
|
@ -140,7 +140,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
||||||
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}").execute().actionGet();
|
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}").execute().actionGet();
|
||||||
fail("Expected MergeMappingException");
|
fail("Expected MergeMappingException");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat(e.getMessage(), containsString("mapper [body] of different type"));
|
assertThat(e.getMessage(), containsString("mapper [body] cannot be changed from type [string] to [int]"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,54 +22,51 @@ import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.shard.IndexShard;
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.indices.IndicesService;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||||
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.not;
|
|
||||||
|
|
||||||
public class IndexingMemoryControllerTests extends ESTestCase {
|
public class IndexingMemoryControllerTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
static class MockController extends IndexingMemoryController {
|
static class MockController extends IndexingMemoryController {
|
||||||
|
|
||||||
final static ByteSizeValue INACTIVE = new ByteSizeValue(-1);
|
final static ByteSizeValue INACTIVE = new ByteSizeValue(-1);
|
||||||
|
|
||||||
final Map<ShardId, ByteSizeValue> indexingBuffers = new HashMap<>();
|
final Map<IndexShard, ByteSizeValue> indexingBuffers = new HashMap<>();
|
||||||
final Map<ShardId, ByteSizeValue> translogBuffers = new HashMap<>();
|
final Map<IndexShard, ByteSizeValue> translogBuffers = new HashMap<>();
|
||||||
|
|
||||||
final Map<ShardId, Long> lastIndexTimeNanos = new HashMap<>();
|
final Map<IndexShard, Long> lastIndexTimeNanos = new HashMap<>();
|
||||||
final Set<ShardId> activeShards = new HashSet<>();
|
final Set<IndexShard> activeShards = new HashSet<>();
|
||||||
|
|
||||||
long currentTimeSec = TimeValue.timeValueNanos(System.nanoTime()).seconds();
|
long currentTimeSec = TimeValue.timeValueNanos(System.nanoTime()).seconds();
|
||||||
|
|
||||||
public MockController(Settings settings) {
|
public MockController(Settings settings) {
|
||||||
super(Settings.builder()
|
super(Settings.builder()
|
||||||
.put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it
|
.put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it
|
||||||
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate
|
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate
|
||||||
.put(settings)
|
.put(settings)
|
||||||
.build(),
|
.build(),
|
||||||
null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb
|
null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb
|
||||||
}
|
}
|
||||||
|
|
||||||
public void deleteShard(ShardId id) {
|
public void deleteShard(IndexShard id) {
|
||||||
indexingBuffers.remove(id);
|
indexingBuffers.remove(id);
|
||||||
translogBuffers.remove(id);
|
translogBuffers.remove(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void assertBuffers(ShardId id, ByteSizeValue indexing, ByteSizeValue translog) {
|
public void assertBuffers(IndexShard id, ByteSizeValue indexing, ByteSizeValue translog) {
|
||||||
assertThat(indexingBuffers.get(id), equalTo(indexing));
|
assertThat(indexingBuffers.get(id), equalTo(indexing));
|
||||||
assertThat(translogBuffers.get(id), equalTo(translog));
|
assertThat(translogBuffers.get(id), equalTo(translog));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void assertInActive(ShardId id) {
|
public void assertInactive(IndexShard id) {
|
||||||
assertThat(indexingBuffers.get(id), equalTo(INACTIVE));
|
assertThat(indexingBuffers.get(id), equalTo(INACTIVE));
|
||||||
assertThat(translogBuffers.get(id), equalTo(INACTIVE));
|
assertThat(translogBuffers.get(id), equalTo(INACTIVE));
|
||||||
}
|
}
|
||||||
|
@ -80,36 +77,31 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<ShardId> availableShards() {
|
protected List<IndexShard> availableShards() {
|
||||||
return new ArrayList<>(indexingBuffers.keySet());
|
return new ArrayList<>(indexingBuffers.keySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean shardAvailable(ShardId shardId) {
|
protected boolean shardAvailable(IndexShard shard) {
|
||||||
return indexingBuffers.containsKey(shardId);
|
return indexingBuffers.containsKey(shard);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Boolean getShardActive(ShardId shardId) {
|
protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
|
||||||
return activeShards.contains(shardId);
|
indexingBuffers.put(shard, shardIndexingBufferSize);
|
||||||
|
translogBuffers.put(shard, shardTranslogBufferSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void updateShardBuffers(ShardId shardId, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
|
protected boolean checkIdle(IndexShard shard) {
|
||||||
indexingBuffers.put(shardId, shardIndexingBufferSize);
|
|
||||||
translogBuffers.put(shardId, shardTranslogBufferSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Boolean checkIdle(ShardId shardId) {
|
|
||||||
final TimeValue inactiveTime = settings.getAsTime(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5));
|
final TimeValue inactiveTime = settings.getAsTime(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5));
|
||||||
Long ns = lastIndexTimeNanos.get(shardId);
|
Long ns = lastIndexTimeNanos.get(shard);
|
||||||
if (ns == null) {
|
if (ns == null) {
|
||||||
return null;
|
return true;
|
||||||
} else if (currentTimeInNanos() - ns >= inactiveTime.nanos()) {
|
} else if (currentTimeInNanos() - ns >= inactiveTime.nanos()) {
|
||||||
indexingBuffers.put(shardId, INACTIVE);
|
indexingBuffers.put(shard, INACTIVE);
|
||||||
translogBuffers.put(shardId, INACTIVE);
|
translogBuffers.put(shard, INACTIVE);
|
||||||
activeShards.remove(shardId);
|
activeShards.remove(shard);
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
return false;
|
return false;
|
||||||
|
@ -120,118 +112,126 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
||||||
currentTimeSec += sec;
|
currentTimeSec += sec;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void simulateIndexing(ShardId shardId) {
|
public void simulateIndexing(IndexShard shard) {
|
||||||
lastIndexTimeNanos.put(shardId, currentTimeInNanos());
|
lastIndexTimeNanos.put(shard, currentTimeInNanos());
|
||||||
if (indexingBuffers.containsKey(shardId) == false) {
|
if (indexingBuffers.containsKey(shard) == false) {
|
||||||
// First time we are seeing this shard; start it off with inactive buffers as IndexShard does:
|
// First time we are seeing this shard; start it off with inactive buffers as IndexShard does:
|
||||||
indexingBuffers.put(shardId, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER);
|
indexingBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER);
|
||||||
translogBuffers.put(shardId, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
|
translogBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
|
||||||
}
|
}
|
||||||
activeShards.add(shardId);
|
activeShards.add(shard);
|
||||||
forceCheck();
|
forceCheck();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testShardAdditionAndRemoval() {
|
public void testShardAdditionAndRemoval() {
|
||||||
|
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 3).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
|
||||||
|
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||||
|
IndexService test = indicesService.indexService("test");
|
||||||
|
|
||||||
MockController controller = new MockController(Settings.builder()
|
MockController controller = new MockController(Settings.builder()
|
||||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build());
|
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build());
|
||||||
final ShardId shard1 = new ShardId("test", 1);
|
IndexShard shard0 = test.getShard(0);
|
||||||
controller.simulateIndexing(shard1);
|
controller.simulateIndexing(shard0);
|
||||||
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
||||||
|
|
||||||
// add another shard
|
// add another shard
|
||||||
final ShardId shard2 = new ShardId("test", 2);
|
IndexShard shard1 = test.getShard(1);
|
||||||
controller.simulateIndexing(shard2);
|
controller.simulateIndexing(shard1);
|
||||||
|
controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||||
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||||
controller.assertBuffers(shard2, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
|
||||||
|
|
||||||
// remove first shard
|
// remove first shard
|
||||||
controller.deleteShard(shard1);
|
controller.deleteShard(shard0);
|
||||||
controller.forceCheck();
|
controller.forceCheck();
|
||||||
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
||||||
|
|
||||||
// remove second shard
|
// remove second shard
|
||||||
controller.deleteShard(shard2);
|
controller.deleteShard(shard1);
|
||||||
controller.forceCheck();
|
controller.forceCheck();
|
||||||
|
|
||||||
// add a new one
|
// add a new one
|
||||||
final ShardId shard3 = new ShardId("test", 3);
|
IndexShard shard2 = test.getShard(2);
|
||||||
controller.simulateIndexing(shard3);
|
controller.simulateIndexing(shard2);
|
||||||
controller.assertBuffers(shard3, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testActiveInactive() {
|
public void testActiveInactive() {
|
||||||
MockController controller = new MockController(Settings.builder()
|
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
|
||||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb")
|
IndexService test = indicesService.indexService("test");
|
||||||
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s")
|
|
||||||
.build());
|
|
||||||
|
|
||||||
final ShardId shard1 = new ShardId("test", 1);
|
MockController controller = new MockController(Settings.builder()
|
||||||
|
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||||
|
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb")
|
||||||
|
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s")
|
||||||
|
.build());
|
||||||
|
|
||||||
|
IndexShard shard0 = test.getShard(0);
|
||||||
|
controller.simulateIndexing(shard0);
|
||||||
|
IndexShard shard1 = test.getShard(1);
|
||||||
controller.simulateIndexing(shard1);
|
controller.simulateIndexing(shard1);
|
||||||
final ShardId shard2 = new ShardId("test", 2);
|
controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||||
controller.simulateIndexing(shard2);
|
|
||||||
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
||||||
controller.assertBuffers(shard2, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
|
|
||||||
|
|
||||||
// index into both shards, move the clock and see that they are still active
|
// index into both shards, move the clock and see that they are still active
|
||||||
|
controller.simulateIndexing(shard0);
|
||||||
controller.simulateIndexing(shard1);
|
controller.simulateIndexing(shard1);
|
||||||
controller.simulateIndexing(shard2);
|
|
||||||
|
|
||||||
controller.incrementTimeSec(10);
|
controller.incrementTimeSec(10);
|
||||||
controller.forceCheck();
|
controller.forceCheck();
|
||||||
|
|
||||||
// both shards now inactive
|
// both shards now inactive
|
||||||
controller.assertInActive(shard1);
|
controller.assertInactive(shard0);
|
||||||
controller.assertInActive(shard2);
|
controller.assertInactive(shard1);
|
||||||
|
|
||||||
// index into one shard only, see it becomes active
|
// index into one shard only, see it becomes active
|
||||||
controller.simulateIndexing(shard1);
|
controller.simulateIndexing(shard0);
|
||||||
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||||
controller.assertInActive(shard2);
|
controller.assertInactive(shard1);
|
||||||
|
|
||||||
controller.incrementTimeSec(3); // increment but not enough to become inactive
|
controller.incrementTimeSec(3); // increment but not enough to become inactive
|
||||||
controller.forceCheck();
|
controller.forceCheck();
|
||||||
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||||
controller.assertInActive(shard2);
|
controller.assertInactive(shard1);
|
||||||
|
|
||||||
controller.incrementTimeSec(3); // increment some more
|
controller.incrementTimeSec(3); // increment some more
|
||||||
controller.forceCheck();
|
controller.forceCheck();
|
||||||
controller.assertInActive(shard1);
|
controller.assertInactive(shard0);
|
||||||
controller.assertInActive(shard2);
|
controller.assertInactive(shard1);
|
||||||
|
|
||||||
// index some and shard becomes immediately active
|
// index some and shard becomes immediately active
|
||||||
controller.simulateIndexing(shard2);
|
controller.simulateIndexing(shard1);
|
||||||
controller.assertInActive(shard1);
|
controller.assertInactive(shard0);
|
||||||
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMinShardBufferSizes() {
|
public void testMinShardBufferSizes() {
|
||||||
MockController controller = new MockController(Settings.builder()
|
MockController controller = new MockController(Settings.builder()
|
||||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
|
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
|
||||||
.put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
.put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
||||||
.put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build());
|
.put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build());
|
||||||
|
|
||||||
assertTwoActiveShards(controller, new ByteSizeValue(6, ByteSizeUnit.MB), new ByteSizeValue(40, ByteSizeUnit.KB));
|
assertTwoActiveShards(controller, new ByteSizeValue(6, ByteSizeUnit.MB), new ByteSizeValue(40, ByteSizeUnit.KB));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMaxShardBufferSizes() {
|
public void testMaxShardBufferSizes() {
|
||||||
MockController controller = new MockController(Settings.builder()
|
MockController controller = new MockController(Settings.builder()
|
||||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
|
||||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
|
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
|
||||||
.put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb")
|
.put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb")
|
||||||
.put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build());
|
.put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build());
|
||||||
|
|
||||||
assertTwoActiveShards(controller, new ByteSizeValue(3, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.KB));
|
assertTwoActiveShards(controller, new ByteSizeValue(3, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.KB));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRelativeBufferSizes() {
|
public void testRelativeBufferSizes() {
|
||||||
MockController controller = new MockController(Settings.builder()
|
MockController controller = new MockController(Settings.builder()
|
||||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%")
|
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%")
|
||||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%")
|
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%")
|
||||||
.build());
|
.build());
|
||||||
|
|
||||||
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(50, ByteSizeUnit.MB)));
|
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(50, ByteSizeUnit.MB)));
|
||||||
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
|
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
|
||||||
|
@ -240,10 +240,10 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
||||||
|
|
||||||
public void testMinBufferSizes() {
|
public void testMinBufferSizes() {
|
||||||
MockController controller = new MockController(Settings.builder()
|
MockController controller = new MockController(Settings.builder()
|
||||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
|
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
|
||||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%")
|
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%")
|
||||||
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
||||||
.put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
|
.put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
|
||||||
|
|
||||||
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
|
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
|
||||||
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
|
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
|
||||||
|
@ -251,23 +251,24 @@ public class IndexingMemoryControllerTests extends ESTestCase {
|
||||||
|
|
||||||
public void testMaxBufferSizes() {
|
public void testMaxBufferSizes() {
|
||||||
MockController controller = new MockController(Settings.builder()
|
MockController controller = new MockController(Settings.builder()
|
||||||
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%")
|
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%")
|
||||||
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%")
|
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%")
|
||||||
.put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
.put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb")
|
||||||
.put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
|
.put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
|
||||||
|
|
||||||
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
|
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
|
||||||
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
|
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void assertTwoActiveShards(MockController controller, ByteSizeValue indexBufferSize, ByteSizeValue translogBufferSize) {
|
protected void assertTwoActiveShards(MockController controller, ByteSizeValue indexBufferSize, ByteSizeValue translogBufferSize) {
|
||||||
final ShardId shard1 = new ShardId("test", 1);
|
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
|
||||||
|
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||||
|
IndexService test = indicesService.indexService("test");
|
||||||
|
IndexShard shard0 = test.getShard(0);
|
||||||
|
controller.simulateIndexing(shard0);
|
||||||
|
IndexShard shard1 = test.getShard(1);
|
||||||
controller.simulateIndexing(shard1);
|
controller.simulateIndexing(shard1);
|
||||||
final ShardId shard2 = new ShardId("test", 2);
|
controller.assertBuffers(shard0, indexBufferSize, translogBufferSize);
|
||||||
controller.simulateIndexing(shard2);
|
|
||||||
controller.assertBuffers(shard1, indexBufferSize, translogBufferSize);
|
controller.assertBuffers(shard1, indexBufferSize, translogBufferSize);
|
||||||
controller.assertBuffers(shard2, indexBufferSize, translogBufferSize);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.plugins;
|
package org.elasticsearch.plugins;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -259,14 +259,14 @@ public class PluginInfoTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPluginListSorted() {
|
public void testPluginListSorted() {
|
||||||
PluginsInfo pluginsInfo = new PluginsInfo(5);
|
PluginsAndModules pluginsInfo = new PluginsAndModules();
|
||||||
pluginsInfo.add(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true));
|
pluginsInfo.addPlugin(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true));
|
||||||
pluginsInfo.add(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true));
|
pluginsInfo.addPlugin(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true));
|
||||||
pluginsInfo.add(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true));
|
pluginsInfo.addPlugin(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true));
|
||||||
pluginsInfo.add(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true));
|
pluginsInfo.addPlugin(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true));
|
||||||
pluginsInfo.add(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
|
pluginsInfo.addPlugin(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
|
||||||
|
|
||||||
final List<PluginInfo> infos = pluginsInfo.getInfos();
|
final List<PluginInfo> infos = pluginsInfo.getPluginInfos();
|
||||||
List<String> names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList());
|
List<String> names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList());
|
||||||
assertThat(names, contains("a", "b", "c", "d", "e"));
|
assertThat(names, contains("a", "b", "c", "d", "e"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,8 @@ import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.IndexModule;
|
import org.elasticsearch.index.IndexModule;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
public class PluginsServiceTests extends ESTestCase {
|
public class PluginsServiceTests extends ESTestCase {
|
||||||
|
@ -81,7 +83,7 @@ public class PluginsServiceTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
static PluginsService newPluginsService(Settings settings, Class<? extends Plugin>... classpathPlugins) {
|
static PluginsService newPluginsService(Settings settings, Class<? extends Plugin>... classpathPlugins) {
|
||||||
return new PluginsService(settings, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins));
|
return new PluginsService(settings, null, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testAdditionalSettings() {
|
public void testAdditionalSettings() {
|
||||||
|
@ -123,4 +125,15 @@ public class PluginsServiceTests extends ESTestCase {
|
||||||
assertEquals("boom", ex.getCause().getCause().getMessage());
|
assertEquals("boom", ex.getCause().getCause().getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testExistingPluginMissingDescriptor() throws Exception {
|
||||||
|
Path pluginsDir = createTempDir();
|
||||||
|
Files.createDirectory(pluginsDir.resolve("plugin-missing-descriptor"));
|
||||||
|
try {
|
||||||
|
PluginsService.getPluginBundles(pluginsDir);
|
||||||
|
fail();
|
||||||
|
} catch (IllegalStateException e) {
|
||||||
|
assertTrue(e.getMessage(), e.getMessage().contains("Could not load plugin descriptor for existing plugin"));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,79 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.script;
|
||||||
|
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.security.AllPermission;
|
||||||
|
import java.security.PermissionCollection;
|
||||||
|
|
||||||
|
/** Very simple sanity checks for {@link ClassPermission} */
|
||||||
|
public class ClassPermissionTests extends ESTestCase {
|
||||||
|
|
||||||
|
public void testEquals() {
|
||||||
|
assertEquals(new ClassPermission("pkg.MyClass"), new ClassPermission("pkg.MyClass"));
|
||||||
|
assertFalse(new ClassPermission("pkg.MyClass").equals(new AllPermission()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testImplies() {
|
||||||
|
assertTrue(new ClassPermission("pkg.MyClass").implies(new ClassPermission("pkg.MyClass")));
|
||||||
|
assertFalse(new ClassPermission("pkg.MyClass").implies(new ClassPermission("pkg.MyOtherClass")));
|
||||||
|
assertFalse(new ClassPermission("pkg.MyClass").implies(null));
|
||||||
|
assertFalse(new ClassPermission("pkg.MyClass").implies(new AllPermission()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testStandard() {
|
||||||
|
assertTrue(new ClassPermission("<<STANDARD>>").implies(new ClassPermission("java.lang.Math")));
|
||||||
|
assertFalse(new ClassPermission("<<STANDARD>>").implies(new ClassPermission("pkg.MyClass")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPermissionCollection() {
|
||||||
|
ClassPermission math = new ClassPermission("java.lang.Math");
|
||||||
|
PermissionCollection collection = math.newPermissionCollection();
|
||||||
|
collection.add(math);
|
||||||
|
assertTrue(collection.implies(new ClassPermission("java.lang.Math")));
|
||||||
|
assertFalse(collection.implies(new ClassPermission("pkg.MyClass")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPermissionCollectionStandard() {
|
||||||
|
ClassPermission standard = new ClassPermission("<<STANDARD>>");
|
||||||
|
PermissionCollection collection = standard.newPermissionCollection();
|
||||||
|
collection.add(standard);
|
||||||
|
assertTrue(collection.implies(new ClassPermission("java.lang.Math")));
|
||||||
|
assertFalse(collection.implies(new ClassPermission("pkg.MyClass")));
|
||||||
|
}
|
||||||
|
|
||||||
|
/** not recommended but we test anyway */
|
||||||
|
public void testWildcards() {
|
||||||
|
assertTrue(new ClassPermission("*").implies(new ClassPermission("pkg.MyClass")));
|
||||||
|
assertTrue(new ClassPermission("pkg.*").implies(new ClassPermission("pkg.MyClass")));
|
||||||
|
assertTrue(new ClassPermission("pkg.*").implies(new ClassPermission("pkg.sub.MyClass")));
|
||||||
|
assertFalse(new ClassPermission("pkg.My*").implies(new ClassPermission("pkg.MyClass")));
|
||||||
|
assertFalse(new ClassPermission("pkg*").implies(new ClassPermission("pkg.MyClass")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPermissionCollectionWildcards() {
|
||||||
|
ClassPermission lang = new ClassPermission("java.lang.*");
|
||||||
|
PermissionCollection collection = lang.newPermissionCollection();
|
||||||
|
collection.add(lang);
|
||||||
|
assertTrue(collection.implies(new ClassPermission("java.lang.Math")));
|
||||||
|
assertFalse(collection.implies(new ClassPermission("pkg.MyClass")));
|
||||||
|
}
|
||||||
|
}
|
|
@ -39,20 +39,61 @@ buildscript {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
allprojects {
|
// this is common configuration for distributions, but we also add it here for the license check to use
|
||||||
project.ext {
|
ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
|
||||||
// this is common configuration for distributions, but we also add it here for the license check to use
|
|
||||||
dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
|
|
||||||
|
/*****************************************************************************
|
||||||
|
* Modules *
|
||||||
|
*****************************************************************************/
|
||||||
|
|
||||||
|
task buildModules(type: Copy) {
|
||||||
|
into 'build/modules'
|
||||||
|
}
|
||||||
|
|
||||||
|
ext.restTestExpansions = [
|
||||||
|
'expected.modules.count': 0,
|
||||||
|
]
|
||||||
|
// we create the buildModules task above so the distribution subprojects can
|
||||||
|
// depend on it, but we don't actually configure it until projects are evaluated
|
||||||
|
// so it can depend on the bundling of plugins (ie modules must have been configured)
|
||||||
|
project.gradle.projectsEvaluated {
|
||||||
|
project.rootProject.subprojects.findAll { it.path.startsWith(':modules:') }.each { Project module ->
|
||||||
|
buildModules {
|
||||||
|
dependsOn module.bundlePlugin
|
||||||
|
into(module.name) {
|
||||||
|
from { zipTree(module.bundlePlugin.outputs.files.singleFile) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
configure(subprojects.findAll { it.name != 'integ-test-zip' }) { Project distribution ->
|
||||||
|
distribution.integTest.mustRunAfter(module.integTest)
|
||||||
|
}
|
||||||
|
restTestExpansions['expected.modules.count'] += 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// make sure we have a clean task since we aren't a java project, but we have tasks that
|
||||||
|
// put stuff in the build dir
|
||||||
|
task clean(type: Delete) {
|
||||||
|
delete 'build'
|
||||||
|
}
|
||||||
|
|
||||||
subprojects {
|
subprojects {
|
||||||
/*****************************************************************************
|
/*****************************************************************************
|
||||||
* Rest test config *
|
* Rest test config *
|
||||||
*****************************************************************************/
|
*****************************************************************************/
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
integTest {
|
project.integTest {
|
||||||
includePackaged true
|
dependsOn(project.assemble)
|
||||||
|
includePackaged project.name == 'integ-test-zip'
|
||||||
|
cluster {
|
||||||
|
distribution = project.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
processTestResources {
|
||||||
|
inputs.properties(project(':distribution').restTestExpansions)
|
||||||
|
MavenFilteringHack.filter(it, project(':distribution').restTestExpansions)
|
||||||
}
|
}
|
||||||
|
|
||||||
/*****************************************************************************
|
/*****************************************************************************
|
||||||
|
@ -81,7 +122,12 @@ subprojects {
|
||||||
libFiles = copySpec {
|
libFiles = copySpec {
|
||||||
into 'lib'
|
into 'lib'
|
||||||
from project(':core').jar
|
from project(':core').jar
|
||||||
from dependencyFiles
|
from project(':distribution').dependencyFiles
|
||||||
|
}
|
||||||
|
|
||||||
|
modulesFiles = copySpec {
|
||||||
|
into 'modules'
|
||||||
|
from project(':distribution').buildModules
|
||||||
}
|
}
|
||||||
|
|
||||||
configFiles = copySpec {
|
configFiles = copySpec {
|
||||||
|
@ -103,7 +149,7 @@ subprojects {
|
||||||
/*****************************************************************************
|
/*****************************************************************************
|
||||||
* Zip and tgz configuration *
|
* Zip and tgz configuration *
|
||||||
*****************************************************************************/
|
*****************************************************************************/
|
||||||
configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
|
configure(subprojects.findAll { ['zip', 'tar', 'integ-test-zip'].contains(it.name) }) {
|
||||||
project.ext.archivesFiles = copySpec {
|
project.ext.archivesFiles = copySpec {
|
||||||
into("elasticsearch-${version}") {
|
into("elasticsearch-${version}") {
|
||||||
with libFiles
|
with libFiles
|
||||||
|
@ -121,6 +167,9 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
|
||||||
from('../src/main/resources') {
|
from('../src/main/resources') {
|
||||||
include 'bin/*.exe'
|
include 'bin/*.exe'
|
||||||
}
|
}
|
||||||
|
if (project.name != 'integ-test-zip') {
|
||||||
|
with modulesFiles
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -143,7 +192,7 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
|
||||||
* directly from the filesystem. It doesn't want to process them through
|
* directly from the filesystem. It doesn't want to process them through
|
||||||
* MavenFilteringHack or any other copy-style action.
|
* MavenFilteringHack or any other copy-style action.
|
||||||
*/
|
*/
|
||||||
configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
|
configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) {
|
||||||
integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false
|
integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false
|
||||||
File packagingFiles = new File(buildDir, 'packaging')
|
File packagingFiles = new File(buildDir, 'packaging')
|
||||||
project.ext.packagingFiles = packagingFiles
|
project.ext.packagingFiles = packagingFiles
|
||||||
|
@ -233,6 +282,7 @@ configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
|
||||||
user 'root'
|
user 'root'
|
||||||
permissionGroup 'root'
|
permissionGroup 'root'
|
||||||
with libFiles
|
with libFiles
|
||||||
|
with modulesFiles
|
||||||
with copySpec {
|
with copySpec {
|
||||||
with commonFiles
|
with commonFiles
|
||||||
if (project.name == 'deb') {
|
if (project.name == 'deb') {
|
||||||
|
@ -305,7 +355,7 @@ task updateShas(type: UpdateShasTask) {
|
||||||
parentTask = dependencyLicenses
|
parentTask = dependencyLicenses
|
||||||
}
|
}
|
||||||
|
|
||||||
RunTask.configure(project)
|
task run(type: RunTask) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build some variables that are replaced in the packages. This includes both
|
* Build some variables that are replaced in the packages. This includes both
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
task buildDeb(type: Deb) {
|
task buildDeb(type: Deb) {
|
||||||
dependsOn dependencyFiles, preparePackagingFiles
|
dependsOn preparePackagingFiles
|
||||||
baseName 'elasticsearch' // this is what pom generation uses for artifactId
|
baseName 'elasticsearch' // this is what pom generation uses for artifactId
|
||||||
// Follow elasticsearch's deb file naming convention
|
// Follow elasticsearch's deb file naming convention
|
||||||
archiveName "${packageName}-${project.version}.deb"
|
archiveName "${packageName}-${project.version}.deb"
|
||||||
|
@ -44,6 +44,4 @@ integTest {
|
||||||
skip the test if they aren't around. */
|
skip the test if they aren't around. */
|
||||||
enabled = new File('/usr/bin/dpkg-deb').exists() || // Standard location
|
enabled = new File('/usr/bin/dpkg-deb').exists() || // Standard location
|
||||||
new File('/usr/local/bin/dpkg-deb').exists() // Homebrew location
|
new File('/usr/local/bin/dpkg-deb').exists() // Homebrew location
|
||||||
dependsOn buildDeb
|
|
||||||
clusterConfig.distribution = 'deb'
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
# Integration tests for distributions with modules
|
||||||
|
#
|
||||||
|
"Correct Modules Count":
|
||||||
|
- do:
|
||||||
|
cluster.state: {}
|
||||||
|
|
||||||
|
# Get master node id
|
||||||
|
- set: { master_node: master }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
nodes.info: {}
|
||||||
|
|
||||||
|
- length: { nodes.$master.plugins: ${expected.modules.count} }
|
|
@ -17,24 +17,15 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.script.expression;
|
task buildZip(type: Zip) {
|
||||||
|
baseName = 'elasticsearch'
|
||||||
import org.elasticsearch.plugins.Plugin;
|
with archivesFiles
|
||||||
import org.elasticsearch.script.ScriptModule;
|
|
||||||
|
|
||||||
public class ExpressionPlugin extends Plugin {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String name() {
|
|
||||||
return "lang-expression";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String description() {
|
|
||||||
return "Lucene expressions integration for Elasticsearch";
|
|
||||||
}
|
|
||||||
|
|
||||||
public void onModule(ScriptModule module) {
|
|
||||||
module.addScriptEngine(ExpressionScriptEngineService.class);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
artifacts {
|
||||||
|
'default' buildZip
|
||||||
|
archives buildZip
|
||||||
|
}
|
||||||
|
|
||||||
|
integTest.dependsOn buildZip
|
||||||
|
|
|
@ -19,20 +19,20 @@
|
||||||
|
|
||||||
package org.elasticsearch.test.rest;
|
package org.elasticsearch.test.rest;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||||
|
|
||||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
/** Rest API tests subset 3 */
|
/** Rest integration test. runs against external cluster in 'mvn verify' */
|
||||||
public class Rest3IT extends ESRestTestCase {
|
public class RestIT extends ESRestTestCase {
|
||||||
public Rest3IT(@Name("yaml") RestTestCandidate testCandidate) {
|
public RestIT(RestTestCandidate testCandidate) {
|
||||||
super(testCandidate);
|
super(testCandidate);
|
||||||
}
|
}
|
||||||
|
// we run them all sequentially: start simple!
|
||||||
@ParametersFactory
|
@ParametersFactory
|
||||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||||
return createParameters(3, 8);
|
return createParameters(0, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -42,6 +42,4 @@ integTest {
|
||||||
enabled = new File('/bin/rpm').exists() || // Standard location
|
enabled = new File('/bin/rpm').exists() || // Standard location
|
||||||
new File('/usr/bin/rpm').exists() || // Debian location
|
new File('/usr/bin/rpm').exists() || // Debian location
|
||||||
new File('/usr/local/bin/rpm').exists() // Homebrew location
|
new File('/usr/local/bin/rpm').exists() // Homebrew location
|
||||||
dependsOn buildRpm
|
|
||||||
clusterConfig.distribution = 'rpm'
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
# Integration tests for distributions with modules
|
||||||
|
#
|
||||||
|
"Correct Modules Count":
|
||||||
|
- do:
|
||||||
|
cluster.state: {}
|
||||||
|
|
||||||
|
# Get master node id
|
||||||
|
- set: { master_node: master }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
nodes.info: {}
|
||||||
|
|
||||||
|
- length: { nodes.$master.plugins: ${expected.modules.count} }
|
|
@ -17,7 +17,7 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
task buildTar(type: Tar, dependsOn: dependencyFiles) {
|
task buildTar(type: Tar) {
|
||||||
baseName = 'elasticsearch'
|
baseName = 'elasticsearch'
|
||||||
extension = 'tar.gz'
|
extension = 'tar.gz'
|
||||||
with archivesFiles
|
with archivesFiles
|
||||||
|
@ -28,8 +28,3 @@ artifacts {
|
||||||
'default' buildTar
|
'default' buildTar
|
||||||
archives buildTar
|
archives buildTar
|
||||||
}
|
}
|
||||||
|
|
||||||
integTest {
|
|
||||||
dependsOn buildTar
|
|
||||||
clusterConfig.distribution = 'tar'
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
# Integration tests for distributions with modules
|
||||||
|
#
|
||||||
|
"Correct Modules Count":
|
||||||
|
- do:
|
||||||
|
cluster.state: {}
|
||||||
|
|
||||||
|
# Get master node id
|
||||||
|
- set: { master_node: master }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
nodes.info: {}
|
||||||
|
|
||||||
|
- length: { nodes.$master.plugins: ${expected.modules.count} }
|
|
@ -17,7 +17,7 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
task buildZip(type: Zip, dependsOn: dependencyFiles) {
|
task buildZip(type: Zip) {
|
||||||
baseName = 'elasticsearch'
|
baseName = 'elasticsearch'
|
||||||
with archivesFiles
|
with archivesFiles
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
# Integration tests for distributions with modules
|
||||||
|
#
|
||||||
|
"Correct Modules Count":
|
||||||
|
- do:
|
||||||
|
cluster.state: {}
|
||||||
|
|
||||||
|
# Get master node id
|
||||||
|
- set: { master_node: master }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
nodes.info: {}
|
||||||
|
|
||||||
|
- length: { nodes.$master.plugins: ${expected.modules.count} }
|
|
@ -90,6 +90,9 @@ The search exists api has been removed in favour of using the search api with
|
||||||
The deprecated `/_optimize` endpoint has been removed. The `/_forcemerge`
|
The deprecated `/_optimize` endpoint has been removed. The `/_forcemerge`
|
||||||
endpoint should be used in lieu of optimize.
|
endpoint should be used in lieu of optimize.
|
||||||
|
|
||||||
|
The `GET` HTTP verb for `/_forcemerge` is no longer supported, please use the
|
||||||
|
`POST` HTTP verb.
|
||||||
|
|
||||||
==== Deprecated queries removed
|
==== Deprecated queries removed
|
||||||
|
|
||||||
The following deprecated queries have been removed:
|
The following deprecated queries have been removed:
|
||||||
|
|
|
@ -60,7 +60,7 @@ default.
|
||||||
|
|
||||||
It is a good idea to place these directories in a different location so that
|
It is a good idea to place these directories in a different location so that
|
||||||
there is no chance of deleting them when upgrading Elasticsearch. These
|
there is no chance of deleting them when upgrading Elasticsearch. These
|
||||||
custom paths can be <<paths,configured>> with the `path.config` and
|
custom paths can be <<paths,configured>> with the `path.conf` and
|
||||||
`path.data` settings.
|
`path.data` settings.
|
||||||
|
|
||||||
The Debian and RPM packages place these directories in the
|
The Debian and RPM packages place these directories in the
|
||||||
|
@ -80,7 +80,7 @@ To upgrade using a zip or compressed tarball:
|
||||||
overwrite the `config` or `data` directories.
|
overwrite the `config` or `data` directories.
|
||||||
|
|
||||||
* Either copy the files in the `config` directory from your old installation
|
* Either copy the files in the `config` directory from your old installation
|
||||||
to your new installation, or use the `--path.config` option on the command
|
to your new installation, or use the `--path.conf` option on the command
|
||||||
line to point to an external config directory.
|
line to point to an external config directory.
|
||||||
|
|
||||||
* Either copy the files in the `data` directory from your old installation
|
* Either copy the files in the `data` directory from your old installation
|
||||||
|
|
|
@ -21,12 +21,10 @@ consult this table:
|
||||||
[cols="1<m,1<m,3",options="header",]
|
[cols="1<m,1<m,3",options="header",]
|
||||||
|=======================================================================
|
|=======================================================================
|
||||||
|Upgrade From |Upgrade To |Supported Upgrade Type
|
|Upgrade From |Upgrade To |Supported Upgrade Type
|
||||||
|0.90.x |1.x, 2.x |<<restart-upgrade,Full cluster restart>>
|
|0.90.x |2.x |<<restart-upgrade,Full cluster restart>>
|
||||||
|< 0.90.7 |0.90.x |<<restart-upgrade,Full cluster restart>>
|
|
||||||
|>= 0.90.7 |0.90.x |<<rolling-upgrades,Rolling upgrade>>
|
|
||||||
|1.0.0 - 1.3.1 |1.x |<<rolling-upgrades,Rolling upgrade>> (if <<recovery,`indices.recovery.compress`>> set to `false`)
|
|
||||||
|>= 1.3.2 |1.x |<<rolling-upgrades,Rolling upgrade>>
|
|
||||||
|1.x |2.x |<<restart-upgrade,Full cluster restart>>
|
|1.x |2.x |<<restart-upgrade,Full cluster restart>>
|
||||||
|
|2.x |2.y |<<rolling-upgrades,Rolling upgrade>> (where `y > x `)
|
||||||
|
|2.x |3.x |<<restart-upgrade,Full cluster restart>>
|
||||||
|=======================================================================
|
|=======================================================================
|
||||||
|
|
||||||
TIP: Take plugins into consideration as well when upgrading. Most plugins will have to be upgraded alongside Elasticsearch, although some plugins accessed primarily through the browser (`_site` plugins) may continue to work given that API changes are compatible.
|
TIP: Take plugins into consideration as well when upgrading. Most plugins will have to be upgraded alongside Elasticsearch, although some plugins accessed primarily through the browser (`_site` plugins) may continue to work given that API changes are compatible.
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
subprojects {
|
||||||
|
apply plugin: 'elasticsearch.esplugin'
|
||||||
|
|
||||||
|
esplugin {
|
||||||
|
// for local ES plugins, the name of the plugin is the same as the directory
|
||||||
|
name project.name
|
||||||
|
}
|
||||||
|
|
||||||
|
if (project.file('src/main/packaging').exists()) {
|
||||||
|
throw new InvalidModelException("Modules cannot contain packaging files")
|
||||||
|
}
|
||||||
|
if (project.file('src/main/bin').exists()) {
|
||||||
|
throw new InvalidModelException("Modules cannot contain bin files")
|
||||||
|
}
|
||||||
|
if (project.file('src/main/config').exists()) {
|
||||||
|
throw new InvalidModelException("Modules cannot contain config files")
|
||||||
|
}
|
||||||
|
|
||||||
|
project.afterEvaluate {
|
||||||
|
if (esplugin.isolated == false) {
|
||||||
|
throw new InvalidModelException("Modules cannot disable isolation")
|
||||||
|
}
|
||||||
|
if (esplugin.jvm == false) {
|
||||||
|
throw new InvalidModelException("Modules must be jvm plugins")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,67 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.script.expression;
|
||||||
|
|
||||||
|
import org.apache.lucene.expressions.js.JavascriptCompiler;
|
||||||
|
import org.elasticsearch.SpecialPermission;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
|
import org.elasticsearch.script.ScriptModule;
|
||||||
|
|
||||||
|
import java.security.AccessController;
|
||||||
|
import java.security.PrivilegedAction;
|
||||||
|
import java.text.ParseException;
|
||||||
|
|
||||||
|
public class ExpressionPlugin extends Plugin {
|
||||||
|
|
||||||
|
// lucene expressions has crazy checks in its clinit for the functions map
|
||||||
|
// it violates rules of classloaders to detect accessibility
|
||||||
|
// TODO: clean that up
|
||||||
|
static {
|
||||||
|
SecurityManager sm = System.getSecurityManager();
|
||||||
|
if (sm != null) {
|
||||||
|
sm.checkPermission(new SpecialPermission());
|
||||||
|
}
|
||||||
|
AccessController.doPrivileged(new PrivilegedAction<Void>() {
|
||||||
|
@Override
|
||||||
|
public Void run() {
|
||||||
|
try {
|
||||||
|
JavascriptCompiler.compile("0");
|
||||||
|
} catch (ParseException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String name() {
|
||||||
|
return "lang-expression";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String description() {
|
||||||
|
return "Lucene expressions integration for Elasticsearch";
|
||||||
|
}
|
||||||
|
|
||||||
|
public void onModule(ScriptModule module) {
|
||||||
|
module.addScriptEngine(ExpressionScriptEngineService.class);
|
||||||
|
}
|
||||||
|
}
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||||
|
import org.elasticsearch.script.ClassPermission;
|
||||||
import org.elasticsearch.script.CompiledScript;
|
import org.elasticsearch.script.CompiledScript;
|
||||||
import org.elasticsearch.script.ExecutableScript;
|
import org.elasticsearch.script.ExecutableScript;
|
||||||
import org.elasticsearch.script.ScriptEngineService;
|
import org.elasticsearch.script.ScriptEngineService;
|
||||||
|
@ -44,6 +45,7 @@ import org.elasticsearch.script.SearchScript;
|
||||||
import org.elasticsearch.search.MultiValueMode;
|
import org.elasticsearch.search.MultiValueMode;
|
||||||
import org.elasticsearch.search.lookup.SearchLookup;
|
import org.elasticsearch.search.lookup.SearchLookup;
|
||||||
|
|
||||||
|
import java.security.AccessControlContext;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
import java.security.PrivilegedAction;
|
import java.security.PrivilegedAction;
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
|
@ -95,7 +97,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||||
@Override
|
@Override
|
||||||
public Object compile(String script) {
|
public Object compile(String script) {
|
||||||
// classloader created here
|
// classloader created here
|
||||||
SecurityManager sm = System.getSecurityManager();
|
final SecurityManager sm = System.getSecurityManager();
|
||||||
if (sm != null) {
|
if (sm != null) {
|
||||||
sm.checkPermission(new SpecialPermission());
|
sm.checkPermission(new SpecialPermission());
|
||||||
}
|
}
|
||||||
|
@ -103,8 +105,24 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||||
@Override
|
@Override
|
||||||
public Expression run() {
|
public Expression run() {
|
||||||
try {
|
try {
|
||||||
|
// snapshot our context here, we check on behalf of the expression
|
||||||
|
AccessControlContext engineContext = AccessController.getContext();
|
||||||
|
ClassLoader loader = getClass().getClassLoader();
|
||||||
|
if (sm != null) {
|
||||||
|
loader = new ClassLoader(loader) {
|
||||||
|
@Override
|
||||||
|
protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
|
||||||
|
try {
|
||||||
|
engineContext.checkPermission(new ClassPermission(name));
|
||||||
|
} catch (SecurityException e) {
|
||||||
|
throw new ClassNotFoundException(name, e);
|
||||||
|
}
|
||||||
|
return super.loadClass(name, resolve);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
// NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here
|
// NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here
|
||||||
return JavascriptCompiler.compile(script);
|
return JavascriptCompiler.compile(script, JavascriptCompiler.DEFAULT_FUNCTIONS, loader);
|
||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
throw new ScriptException("Failed to parse expression: " + script, e);
|
throw new ScriptException("Failed to parse expression: " + script, e);
|
||||||
}
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
grant {
|
||||||
|
// needed to generate runtime classes
|
||||||
|
permission java.lang.RuntimePermission "createClassLoader";
|
||||||
|
// needed because of security problems in JavascriptCompiler
|
||||||
|
permission java.lang.RuntimePermission "getClassLoader";
|
||||||
|
|
||||||
|
// expression runtime
|
||||||
|
permission org.elasticsearch.script.ClassPermission "java.lang.String";
|
||||||
|
permission org.elasticsearch.script.ClassPermission "org.apache.lucene.expressions.Expression";
|
||||||
|
permission org.elasticsearch.script.ClassPermission "org.apache.lucene.queries.function.FunctionValues";
|
||||||
|
// available functions
|
||||||
|
permission org.elasticsearch.script.ClassPermission "java.lang.Math";
|
||||||
|
permission org.elasticsearch.script.ClassPermission "org.apache.lucene.util.MathUtil";
|
||||||
|
permission org.elasticsearch.script.ClassPermission "org.apache.lucene.util.SloppyMath";
|
||||||
|
};
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue