Merge branch 'master' into feature/ingest

This commit is contained in:
Martijn van Groningen 2015-12-07 15:53:39 +01:00
commit 6062c4eac9
208 changed files with 2350 additions and 1210 deletions

View File

@ -109,6 +109,7 @@ subprojects {
"org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec',
"org.elasticsearch:elasticsearch:${version}": ':core',
"org.elasticsearch:test-framework:${version}": ':test-framework',
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip',
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip',
"org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar',
"org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm',

View File

@ -23,40 +23,41 @@ import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.test.RunTask
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.tasks.SourceSet
import org.gradle.api.tasks.bundling.Zip
/**
* Encapsulates build configuration for an Elasticsearch plugin.
*/
class PluginBuildPlugin extends BuildPlugin {
public class PluginBuildPlugin extends BuildPlugin {
@Override
void apply(Project project) {
public void apply(Project project) {
super.apply(project)
configureDependencies(project)
// this afterEvaluate must happen before the afterEvaluate added by integTest configure,
// this afterEvaluate must happen before the afterEvaluate added by integTest creation,
// so that the file name resolution for installing the plugin will be setup
project.afterEvaluate {
String name = project.pluginProperties.extension.name
project.jar.baseName = name
project.bundlePlugin.baseName = name
project.integTest.dependsOn(project.bundlePlugin)
project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
project.tasks.run.dependsOn(project.bundlePlugin)
project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
}
RestIntegTestTask.configure(project)
RunTask.configure(project)
Task bundle = configureBundleTask(project)
project.configurations.archives.artifacts.removeAll { it.archiveTask.is project.jar }
project.configurations.getByName('default').extendsFrom = []
project.artifacts {
archives bundle
'default' bundle
if (project.path.startsWith(':modules:')) {
project.integTest.clusterConfig.module(project)
project.tasks.run.clusterConfig.module(project)
} else {
project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
}
}
createIntegTestTask(project)
createBundleTask(project)
project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build
}
static void configureDependencies(Project project) {
private static void configureDependencies(Project project) {
project.dependencies {
provided "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}"
testCompile "org.elasticsearch:test-framework:${project.versions.elasticsearch}"
@ -72,21 +73,36 @@ class PluginBuildPlugin extends BuildPlugin {
}
}
static Task configureBundleTask(Project project) {
PluginPropertiesTask buildProperties = project.tasks.create(name: 'pluginProperties', type: PluginPropertiesTask)
File pluginMetadata = project.file("src/main/plugin-metadata")
project.sourceSets.test {
output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties')
resources {
srcDir pluginMetadata
}
}
Task bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties])
bundle.configure {
from buildProperties
from pluginMetadata
from project.jar
from bundle.project.configurations.runtime - bundle.project.configurations.provided
/** Adds an integTest task which runs rest tests */
private static void createIntegTestTask(Project project) {
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
integTest.mustRunAfter(project.precommit, project.test)
project.check.dependsOn(integTest)
}
/**
* Adds a bundlePlugin task which builds the zip containing the plugin jars,
* metadata, properties, and packaging files
*/
private static void createBundleTask(Project project) {
File pluginMetadata = project.file('src/main/plugin-metadata')
// create a task to build the properties file for this plugin
PluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', PluginPropertiesTask.class)
// add the plugin properties and metadata to test resources, so unit tests can
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
SourceSet testSourceSet = project.sourceSets.test
testSourceSet.output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties')
testSourceSet.resources.srcDir(pluginMetadata)
// create the actual bundle task, which zips up all the files for the plugin
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) {
from buildProperties // plugin properties file
from pluginMetadata // metadata (eg custom security policy)
from project.jar // this plugin's jar
from project.configurations.runtime - project.configurations.provided // the dep jars
// extra files for the plugin to go into the zip
from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging
from('src/main') {
include 'config/**'
@ -97,6 +113,13 @@ class PluginBuildPlugin extends BuildPlugin {
}
}
project.assemble.dependsOn(bundle)
return bundle
// remove jar from the archives (things that will be published), and set it to the zip
project.configurations.archives.artifacts.removeAll { it.archiveTask.is project.jar }
project.artifacts.add('archives', bundle)
// also make the zip the default artifact (used when depending on this project)
project.configurations.getByName('default').extendsFrom = []
project.artifacts.add('default', bundle)
}
}

View File

@ -27,7 +27,7 @@ import org.gradle.api.tasks.Input
class ClusterConfiguration {
@Input
String distribution = 'zip'
String distribution = 'integ-test-zip'
@Input
int numNodes = 1
@ -71,6 +71,8 @@ class ClusterConfiguration {
LinkedHashMap<String, Object> plugins = new LinkedHashMap<>()
List<Project> modules = new ArrayList<>()
LinkedHashMap<String, Object[]> setupCommands = new LinkedHashMap<>()
@Input
@ -93,6 +95,12 @@ class ClusterConfiguration {
plugins.put(name, pluginProject)
}
/** Add a module to the cluster. The project must be an esplugin and have a single zip default artifact. */
@Input
void module(Project moduleProject) {
modules.add(moduleProject)
}
@Input
void setupCommand(String name, Object... args) {
setupCommands.put(name, args)

View File

@ -60,7 +60,12 @@ class ClusterFormationTasks {
/** Adds a dependency on the given distribution */
static void configureDistributionDependency(Project project, String distro) {
String elasticsearchVersion = VersionProperties.elasticsearch
String packaging = distro == 'tar' ? 'tar.gz' : distro
String packaging = distro
if (distro == 'tar') {
packaging = 'tar.gz'
} else if (distro == 'integ-test-zip') {
packaging = 'zip'
}
project.configurations {
elasticsearchDistro
}
@ -103,6 +108,12 @@ class ClusterFormationTasks {
setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
// install modules
for (Project module : node.config.modules) {
String actionName = pluginTaskName('install', module.name, 'Module')
setup = configureInstallModuleTask(taskName(task, node, actionName), project, setup, node, module)
}
// install plugins
for (Map.Entry<String, Object> plugin : node.config.plugins.entrySet()) {
String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin')
@ -138,6 +149,7 @@ class ClusterFormationTasks {
by the source tree. If it isn't then Bad Things(TM) will happen. */
Task extract
switch (node.config.distribution) {
case 'integ-test-zip':
case 'zip':
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) }
@ -286,6 +298,20 @@ class ClusterFormationTasks {
return copyPlugins
}
static Task configureInstallModuleTask(String name, Project project, Task setup, NodeInfo node, Project module) {
if (node.config.distribution != 'integ-test-zip') {
throw new GradleException("Module ${module.path} not allowed be installed distributions other than integ-test-zip because they should already have all modules bundled!")
}
if (module.plugins.hasPlugin(PluginBuildPlugin) == false) {
throw new GradleException("Task ${name} cannot include module ${module.path} which is not an esplugin")
}
Copy installModule = project.tasks.create(name, Copy.class)
installModule.dependsOn(setup)
installModule.into(new File(node.homeDir, "modules/${module.name}"))
installModule.from({ project.zipTree(module.tasks.bundlePlugin.outputs.files.singleFile) })
return installModule
}
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Object plugin) {
FileCollection pluginZip
if (plugin instanceof Project) {

View File

@ -173,6 +173,7 @@ class NodeInfo {
static File homeDir(File baseDir, String distro) {
String path
switch (distro) {
case 'integ-test-zip':
case 'zip':
case 'tar':
path = "elasticsearch-${VersionProperties.elasticsearch}"
@ -188,8 +189,8 @@ class NodeInfo {
}
static File confDir(File baseDir, String distro) {
String Path
switch (distro) {
case 'integ-test-zip':
case 'zip':
case 'tar':
return new File(homeDir(baseDir, distro), 'config')

View File

@ -31,55 +31,38 @@ import org.gradle.util.ConfigureUtil
* Runs integration tests, but first starts an ES cluster,
* and passes the ES cluster info as parameters to the tests.
*/
class RestIntegTestTask extends RandomizedTestingTask {
public class RestIntegTestTask extends RandomizedTestingTask {
ClusterConfiguration clusterConfig = new ClusterConfiguration()
/** Flag indicating whether the rest tests in the rest spec should be run. */
@Input
boolean includePackaged = false
static RestIntegTestTask configure(Project project) {
Map integTestOptions = [
name: 'integTest',
type: RestIntegTestTask,
dependsOn: 'testClasses',
group: JavaBasePlugin.VERIFICATION_GROUP,
description: 'Runs rest tests against an elasticsearch cluster.'
]
RestIntegTestTask integTest = project.tasks.create(integTestOptions)
integTest.configure(BuildPlugin.commonTestConfig(project))
integTest.configure {
include '**/*IT.class'
systemProperty 'tests.rest.load_packaged', 'false'
}
RandomizedTestingTask test = project.tasks.findByName('test')
if (test != null) {
integTest.classpath = test.classpath
integTest.testClassesDir = test.testClassesDir
integTest.mustRunAfter(test)
}
integTest.mustRunAfter(project.precommit)
project.check.dependsOn(integTest)
public RestIntegTestTask() {
description = 'Runs rest tests against an elasticsearch cluster.'
group = JavaBasePlugin.VERIFICATION_GROUP
dependsOn(project.testClasses)
classpath = project.sourceSets.test.runtimeClasspath
testClassesDir = project.sourceSets.test.output.classesDir
// start with the common test configuration
configure(BuildPlugin.commonTestConfig(project))
// override/add more for rest tests
parallelism = '1'
include('**/*IT.class')
systemProperty('tests.rest.load_packaged', 'false')
// copy the rest spec/tests into the test resources
RestSpecHack.configureDependencies(project)
project.afterEvaluate {
integTest.dependsOn(RestSpecHack.configureTask(project, integTest.includePackaged))
dependsOn(RestSpecHack.configureTask(project, includePackaged))
systemProperty('tests.cluster', "localhost:${clusterConfig.baseTransportPort}")
}
return integTest
}
RestIntegTestTask() {
// this must run after all projects have been configured, so we know any project
// references can be accessed as a fully configured
project.gradle.projectsEvaluated {
Task test = project.tasks.findByName('test')
if (test != null) {
mustRunAfter(test)
}
ClusterFormationTasks.setup(project, this, clusterConfig)
configure {
parallelism '1'
systemProperty 'tests.cluster', "localhost:${clusterConfig.baseTransportPort}"
}
}
}
@ -92,11 +75,11 @@ class RestIntegTestTask extends RandomizedTestingTask {
}
@Input
void cluster(Closure closure) {
public void cluster(Closure closure) {
ConfigureUtil.configure(closure, clusterConfig)
}
ClusterConfiguration getCluster() {
public ClusterConfiguration getCluster() {
return clusterConfig
}
}

View File

@ -28,12 +28,12 @@ import org.gradle.api.tasks.Copy
* currently must be available on the local filesystem. This class encapsulates
* setting up tasks to copy the rest spec api to test resources.
*/
class RestSpecHack {
public class RestSpecHack {
/**
* Sets dependencies needed to copy the rest spec.
* @param project The project to add rest spec dependency to
*/
static void configureDependencies(Project project) {
public static void configureDependencies(Project project) {
project.configurations {
restSpec
}
@ -48,7 +48,7 @@ class RestSpecHack {
* @param project The project to add the copy task to
* @param includePackagedTests true if the packaged tests should be copied, false otherwise
*/
static Task configureTask(Project project, boolean includePackagedTests) {
public static Task configureTask(Project project, boolean includePackagedTests) {
Map copyRestSpecProps = [
name : 'copyRestSpec',
type : Copy,
@ -65,7 +65,6 @@ class RestSpecHack {
project.idea {
module {
if (scopes.TEST != null) {
// TODO: need to add the TEST scope somehow for rest test plugin...
scopes.TEST.plus.add(project.configurations.restSpec)
}
}

View File

@ -18,22 +18,19 @@
*/
package org.elasticsearch.gradle.test
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import org.gradle.api.Plugin
import org.gradle.api.Project
/** Configures the build to have a rest integration test. */
class RestTestPlugin implements Plugin<Project> {
/** A plugin to add rest integration tests. Used for qa projects. */
public class RestTestPlugin implements Plugin<Project> {
@Override
void apply(Project project) {
public void apply(Project project) {
project.pluginManager.apply(StandaloneTestBasePlugin)
RandomizedTestingTask integTest = RestIntegTestTask.configure(project)
RestSpecHack.configureDependencies(project)
integTest.configure {
classpath = project.sourceSets.test.runtimeClasspath
testClassesDir project.sourceSets.test.output.classesDir
}
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
integTest.cluster.distribution = 'zip' // rest tests should run with the real zip
integTest.mustRunAfter(project.precommit)
project.check.dependsOn(integTest)
}
}

View File

@ -2,13 +2,17 @@ package org.elasticsearch.gradle.test
import org.gradle.api.DefaultTask
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.internal.tasks.options.Option
import org.gradle.util.ConfigureUtil
class RunTask extends DefaultTask {
public class RunTask extends DefaultTask {
ClusterConfiguration clusterConfig = new ClusterConfiguration(baseHttpPort: 9200, baseTransportPort: 9300, daemonize: false)
RunTask() {
public RunTask() {
description = "Runs elasticsearch with '${project.path}'"
group = 'Verification'
project.afterEvaluate {
ClusterFormationTasks.setup(project, this, clusterConfig)
}
@ -22,11 +26,10 @@ class RunTask extends DefaultTask {
clusterConfig.debug = enabled;
}
static void configure(Project project) {
RunTask task = project.tasks.create(
name: 'run',
type: RunTask,
description: "Runs elasticsearch with '${project.path}'",
group: 'Verification')
/** Configure the cluster that will be run. */
@Override
public Task configure(Closure closure) {
ConfigureUtil.configure(closure, clusterConfig)
return this
}
}

View File

@ -27,35 +27,26 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.plugins.ide.eclipse.model.EclipseClasspath
/** Configures the build to have a rest integration test. */
class StandaloneTestBasePlugin implements Plugin<Project> {
public class StandaloneTestBasePlugin implements Plugin<Project> {
@Override
void apply(Project project) {
public void apply(Project project) {
project.pluginManager.apply(JavaBasePlugin)
project.pluginManager.apply(RandomizedTestingPlugin)
BuildPlugin.globalBuildInfo(project)
BuildPlugin.configureRepositories(project)
// remove some unnecessary tasks for a qa test
project.tasks.removeAll { it.name in ['assemble', 'buildDependents'] }
// only setup tests to build
project.sourceSets {
test
}
project.dependencies {
testCompile "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}"
}
project.sourceSets.create('test')
project.dependencies.add('testCompile', "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}")
project.eclipse.classpath.sourceSets = [project.sourceSets.test]
project.eclipse.classpath.plusConfigurations = [project.configurations.testRuntime]
project.eclipse {
classpath {
sourceSets = [project.sourceSets.test]
plusConfigurations = [project.configurations.testRuntime]
}
}
PrecommitTasks.create(project, false)
project.check.dependsOn(project.precommit)
}

View File

@ -25,11 +25,11 @@ import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.plugins.JavaBasePlugin
/** Configures the build to have only unit tests. */
class StandaloneTestPlugin implements Plugin<Project> {
/** A plugin to add tests only. Used for QA tests that run arbitrary unit tests. */
public class StandaloneTestPlugin implements Plugin<Project> {
@Override
void apply(Project project) {
public void apply(Project project) {
project.pluginManager.apply(StandaloneTestBasePlugin)
Map testOptions = [
@ -41,10 +41,8 @@ class StandaloneTestPlugin implements Plugin<Project> {
]
RandomizedTestingTask test = project.tasks.create(testOptions)
test.configure(BuildPlugin.commonTestConfig(project))
test.configure {
classpath = project.sourceSets.test.runtimeClasspath
testClassesDir project.sourceSets.test.output.classesDir
}
test.classpath = project.sourceSets.test.runtimeClasspath
test.testClassesDir project.sourceSets.test.output.classesDir
test.mustRunAfter(project.precommit)
project.check.dependsOn(test)
}

View File

@ -129,8 +129,4 @@ if (isEclipse == false || project.path == ":core-tests") {
}
check.dependsOn integTest
integTest.mustRunAfter test
RestSpecHack.configureDependencies(project)
Task copyRestSpec = RestSpecHack.configureTask(project, true)
integTest.dependsOn copyRestSpec
}

View File

@ -72,14 +72,14 @@ public class NodeInfo extends BaseNodeResponse {
private HttpInfo http;
@Nullable
private PluginsInfo plugins;
private PluginsAndModules plugins;
NodeInfo() {
}
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map<String, String> serviceAttributes, @Nullable Settings settings,
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool,
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsInfo plugins) {
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins) {
super(node);
this.version = version;
this.build = build;
@ -172,7 +172,7 @@ public class NodeInfo extends BaseNodeResponse {
}
@Nullable
public PluginsInfo getPlugins() {
public PluginsAndModules getPlugins() {
return this.plugins;
}
@ -217,7 +217,8 @@ public class NodeInfo extends BaseNodeResponse {
http = HttpInfo.readHttpInfo(in);
}
if (in.readBoolean()) {
plugins = PluginsInfo.readPluginsInfo(in);
plugins = new PluginsAndModules();
plugins.readFrom(in);
}
}

View File

@ -0,0 +1,115 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.node.info;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.PluginInfo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Information about plugins and modules
*/
public class PluginsAndModules implements Streamable, ToXContent {
private List<PluginInfo> plugins;
private List<PluginInfo> modules;
public PluginsAndModules() {
plugins = new ArrayList<>();
modules = new ArrayList<>();
}
/**
* Returns an ordered list based on plugins name
*/
public List<PluginInfo> getPluginInfos() {
List<PluginInfo> plugins = new ArrayList<>(this.plugins);
Collections.sort(plugins, (p1, p2) -> p1.getName().compareTo(p2.getName()));
return plugins;
}
/**
* Returns an ordered list based on modules name
*/
public List<PluginInfo> getModuleInfos() {
List<PluginInfo> modules = new ArrayList<>(this.modules);
Collections.sort(modules, (p1, p2) -> p1.getName().compareTo(p2.getName()));
return modules;
}
public void addPlugin(PluginInfo info) {
plugins.add(info);
}
public void addModule(PluginInfo info) {
modules.add(info);
}
@Override
public void readFrom(StreamInput in) throws IOException {
if (plugins.isEmpty() == false || modules.isEmpty() == false) {
throw new IllegalStateException("instance is already populated");
}
int plugins_size = in.readInt();
for (int i = 0; i < plugins_size; i++) {
plugins.add(PluginInfo.readFromStream(in));
}
int modules_size = in.readInt();
for (int i = 0; i < modules_size; i++) {
modules.add(PluginInfo.readFromStream(in));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeInt(plugins.size());
for (PluginInfo plugin : getPluginInfos()) {
plugin.writeTo(out);
}
out.writeInt(modules.size());
for (PluginInfo module : getModuleInfos()) {
module.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray("plugins");
for (PluginInfo pluginInfo : getPluginInfos()) {
pluginInfo.toXContent(builder, params);
}
builder.endArray();
// TODO: not ideal, make a better api for this (e.g. with jar metadata, and so on)
builder.startArray("modules");
for (PluginInfo moduleInfo : getModuleInfos()) {
moduleInfo.toXContent(builder, params);
}
builder.endArray();
return builder;
}
}

View File

@ -1,101 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.node.info;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.plugins.PluginInfo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class PluginsInfo implements Streamable, ToXContent {
static final class Fields {
static final XContentBuilderString PLUGINS = new XContentBuilderString("plugins");
}
private List<PluginInfo> infos;
public PluginsInfo() {
infos = new ArrayList<>();
}
public PluginsInfo(int size) {
infos = new ArrayList<>(size);
}
/**
* @return an ordered list based on plugins name
*/
public List<PluginInfo> getInfos() {
Collections.sort(infos, new Comparator<PluginInfo>() {
@Override
public int compare(final PluginInfo o1, final PluginInfo o2) {
return o1.getName().compareTo(o2.getName());
}
});
return infos;
}
public void add(PluginInfo info) {
infos.add(info);
}
public static PluginsInfo readPluginsInfo(StreamInput in) throws IOException {
PluginsInfo infos = new PluginsInfo();
infos.readFrom(in);
return infos;
}
@Override
public void readFrom(StreamInput in) throws IOException {
int plugins_size = in.readInt();
for (int i = 0; i < plugins_size; i++) {
infos.add(PluginInfo.readFromStream(in));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeInt(infos.size());
for (PluginInfo plugin : getInfos()) {
plugin.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray(Fields.PLUGINS);
for (PluginInfo pluginInfo : getInfos()) {
pluginInfo.toXContent(builder, params);
}
builder.endArray();
return builder;
}
}

View File

@ -74,7 +74,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
versions.add(nodeResponse.nodeInfo().getVersion());
process.addNodeStats(nodeResponse.nodeStats());
jvm.addNodeInfoStats(nodeResponse.nodeInfo(), nodeResponse.nodeStats());
plugins.addAll(nodeResponse.nodeInfo().getPlugins().getInfos());
plugins.addAll(nodeResponse.nodeInfo().getPlugins().getPluginInfos());
// now do the stats that should be deduped by hardware (implemented by ip deduping)
TransportAddress publishAddress = nodeResponse.nodeInfo().getTransport().address().publishAddress();

View File

@ -131,34 +131,48 @@ final class Security {
@SuppressForbidden(reason = "proper use of URL")
static Map<String,Policy> getPluginPermissions(Environment environment) throws IOException, NoSuchAlgorithmException {
Map<String,Policy> map = new HashMap<>();
// collect up lists of plugins and modules
List<Path> pluginsAndModules = new ArrayList<>();
if (Files.exists(environment.pluginsFile())) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
for (Path plugin : stream) {
Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
if (Files.exists(policyFile)) {
// first get a list of URLs for the plugins' jars:
// we resolve symlinks so map is keyed on the normalize codebase name
List<URL> codebases = new ArrayList<>();
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
for (Path jar : jarStream) {
codebases.add(jar.toRealPath().toUri().toURL());
}
}
// parse the plugin's policy file into a set of permissions
Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
// consult this policy for each of the plugin's jars:
for (URL url : codebases) {
if (map.put(url.getFile(), policy) != null) {
// just be paranoid ok?
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
}
}
pluginsAndModules.add(plugin);
}
}
}
if (Files.exists(environment.modulesFile())) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.modulesFile())) {
for (Path plugin : stream) {
pluginsAndModules.add(plugin);
}
}
}
// now process each one
for (Path plugin : pluginsAndModules) {
Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
if (Files.exists(policyFile)) {
// first get a list of URLs for the plugins' jars:
// we resolve symlinks so map is keyed on the normalize codebase name
List<URL> codebases = new ArrayList<>();
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
for (Path jar : jarStream) {
codebases.add(jar.toRealPath().toUri().toURL());
}
}
// parse the plugin's policy file into a set of permissions
Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
// consult this policy for each of the plugin's jars:
for (URL url : codebases) {
if (map.put(url.getFile(), policy) != null) {
// just be paranoid ok?
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
}
}
}
}
return Collections.unmodifiableMap(map);
}
@ -228,6 +242,7 @@ final class Security {
// read-only dirs
addPath(policy, "path.home", environment.binFile(), "read,readlink");
addPath(policy, "path.home", environment.libFile(), "read,readlink");
addPath(policy, "path.home", environment.modulesFile(), "read,readlink");
addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink");
addPath(policy, "path.conf", environment.configFile(), "read,readlink");
addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink");

View File

@ -125,7 +125,7 @@ public class TransportClient extends AbstractClient {
.put(CLIENT_TYPE_SETTING, CLIENT_TYPE)
.build();
PluginsService pluginsService = new PluginsService(settings, null, pluginClasses);
PluginsService pluginsService = new PluginsService(settings, null, null, pluginClasses);
this.settings = pluginsService.updatedSettings();
Version version = Version.CURRENT;

View File

@ -65,8 +65,8 @@ public class MacAddressProvider {
byte[] address = null;
try {
address = getMacAddress();
} catch( SocketException se ) {
logger.warn("Unable to get mac address, will use a dummy address", se);
} catch (Throwable t) {
logger.warn("Unable to get mac address, will use a dummy address", t);
// address will be set below
}

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
@ -41,7 +42,7 @@ import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.new
public class NodesFaultDetection extends FaultDetection {
public static final String PING_ACTION_NAME = "internal:discovery/zen/fd/ping";
public abstract static class Listener {
public void onNodeFailure(DiscoveryNode node, String reason) {}
@ -145,14 +146,18 @@ public class NodesFaultDetection extends FaultDetection {
}
private void notifyNodeFailure(final DiscoveryNode node, final String reason) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (Listener listener : listeners) {
listener.onNodeFailure(node, reason);
try {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (Listener listener : listeners) {
listener.onNodeFailure(node, reason);
}
}
}
});
});
} catch (EsRejectedExecutionException ex) {
logger.trace("[node ] [{}] ignoring node failure (reason [{}]). Local node is shutting down", ex, node, reason);
}
}
private void notifyPingReceived(final PingRequest pingRequest) {

View File

@ -58,6 +58,8 @@ public class Environment {
private final Path pluginsFile;
private final Path modulesFile;
private final Path sharedDataFile;
/** location of bin/, used by plugin manager */
@ -157,6 +159,7 @@ public class Environment {
binFile = homeFile.resolve("bin");
libFile = homeFile.resolve("lib");
modulesFile = homeFile.resolve("modules");
}
/**
@ -275,6 +278,10 @@ public class Environment {
return libFile;
}
public Path modulesFile() {
return modulesFile;
}
public Path logsFile() {
return logsFile;
}

View File

@ -336,8 +336,6 @@ public class DocumentMapper implements ToXContent {
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
assert mappingLock.isWriteLockedByCurrentThread();
// first ensure we don't have any incompatible new fields
mapperService.checkNewMappersCompatibility(objectMappers, fieldMappers, updateAllTypes);
// update mappers for this document type
Map<String, ObjectMapper> builder = new HashMap<>(this.objectMappers);
@ -356,6 +354,7 @@ public class DocumentMapper implements ToXContent {
public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) {
try (ReleasableLock lock = mappingWriteLock.acquire()) {
mapperService.checkMappersCompatibility(type, mapping, updateAllTypes);
final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes);
this.mapping.merge(mapping, mergeResult);
if (simulate == false) {

View File

@ -28,8 +28,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
@ -47,7 +45,6 @@ import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/** A parser for documents, given mappings from a DocumentMapper */
@ -716,37 +713,64 @@ class DocumentParser implements Closeable {
// The path of the dest field might be completely different from the current one so we need to reset it
context = context.overridePath(new ContentPath(0));
String[] paths = Strings.splitStringToArray(field, '.');
String fieldName = paths[paths.length-1];
ObjectMapper mapper = context.root();
String objectPath = "";
String fieldPath = field;
int posDot = field.lastIndexOf('.');
if (posDot > 0) {
objectPath = field.substring(0, posDot);
context.path().add(objectPath);
mapper = context.docMapper().objectMappers().get(objectPath);
fieldPath = field.substring(posDot + 1);
ObjectMapper[] mappers = new ObjectMapper[paths.length-1];
if (paths.length > 1) {
ObjectMapper parent = context.root();
for (int i = 0; i < paths.length-1; i++) {
mapper = context.docMapper().objectMappers().get(context.path().fullPathAsText(paths[i]));
if (mapper == null) {
// One mapping is missing, check if we are allowed to create a dynamic one.
ObjectMapper.Dynamic dynamic = parent.dynamic();
if (dynamic == null) {
dynamic = dynamicOrDefault(context.root().dynamic());
}
switch (dynamic) {
case STRICT:
throw new StrictDynamicMappingException(parent.fullPath(), paths[i]);
case TRUE:
Mapper.Builder builder = context.root().findTemplateBuilder(context, paths[i], "object");
if (builder == null) {
// if this is a non root object, then explicitly set the dynamic behavior if set
if (!(parent instanceof RootObjectMapper) && parent.dynamic() != ObjectMapper.Defaults.DYNAMIC) {
((ObjectMapper.Builder) builder).dynamic(parent.dynamic());
}
builder = MapperBuilders.object(paths[i]).enabled(true).pathType(parent.pathType());
}
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
mapper = (ObjectMapper) builder.build(builderContext);
if (mapper.nested() != ObjectMapper.Nested.NO) {
throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().fullPathAsText(paths[i]) + "]) through `copy_to`");
}
break;
case FALSE:
// Maybe we should log something to tell the user that the copy_to is ignored in this case.
break;
default:
throw new AssertionError("Unexpected dynamic type " + dynamic);
}
}
context.path().add(paths[i]);
mappers[i] = mapper;
parent = mapper;
}
}
if (mapper == null) {
//TODO: Create an object dynamically?
throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]");
}
ObjectMapper update = parseDynamicValue(context, mapper, fieldPath, context.parser().currentToken());
ObjectMapper update = parseDynamicValue(context, mapper, fieldName, context.parser().currentToken());
assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping
// propagate the update to the root
while (objectPath.length() > 0) {
String parentPath = "";
ObjectMapper parent = context.root();
posDot = objectPath.lastIndexOf('.');
if (posDot > 0) {
parentPath = objectPath.substring(0, posDot);
parent = context.docMapper().objectMappers().get(parentPath);
if (paths.length > 1) {
for (int i = paths.length - 2; i >= 0; i--) {
ObjectMapper parent = context.root();
if (i > 0) {
parent = mappers[i-1];
}
assert parent != null;
update = parent.mappingUpdate(update);
}
if (parent == null) {
throw new IllegalStateException("[" + objectPath + "] has no parent for path [" + parentPath + "]");
}
update = parent.mappingUpdate(update);
objectPath = parentPath;
}
context.addDynamicMappingsUpdate(update);
}

View File

@ -307,7 +307,6 @@ public abstract class FieldMapper extends Mapper {
if (ref.get().equals(fieldType()) == false) {
throw new IllegalStateException("Cannot overwrite field type reference to unequal reference");
}
ref.incrementAssociatedMappers();
this.fieldTypeRef = ref;
}
@ -380,11 +379,6 @@ public abstract class FieldMapper extends Mapper {
return;
}
boolean strict = this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false;
fieldType().checkCompatibility(fieldMergeWith.fieldType(), subConflicts, strict);
for (String conflict : subConflicts) {
mergeResult.addConflict(conflict);
}
multiFields.merge(mergeWith, mergeResult);
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.regex.Regex;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@ -38,18 +39,49 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
/** Full field name to field type */
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType;
/** Full field name to types containing a mapping for this full name. */
private final CopyOnWriteHashMap<String, Set<String>> fullNameToTypes;
/** Index field name to field type */
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> indexNameToFieldType;
/** Index field name to types containing a mapping for this index name. */
private final CopyOnWriteHashMap<String, Set<String>> indexNameToTypes;
/** Create a new empty instance. */
public FieldTypeLookup() {
fullNameToFieldType = new CopyOnWriteHashMap<>();
fullNameToTypes = new CopyOnWriteHashMap<>();
indexNameToFieldType = new CopyOnWriteHashMap<>();
indexNameToTypes = new CopyOnWriteHashMap<>();
}
private FieldTypeLookup(CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName, CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName) {
fullNameToFieldType = fullName;
indexNameToFieldType = indexName;
private FieldTypeLookup(
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName,
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes,
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName,
CopyOnWriteHashMap<String, Set<String>> indexNameToTypes) {
this.fullNameToFieldType = fullName;
this.fullNameToTypes = fullNameToTypes;
this.indexNameToFieldType = indexName;
this.indexNameToTypes = indexNameToTypes;
}
private static CopyOnWriteHashMap<String, Set<String>> addType(CopyOnWriteHashMap<String, Set<String>> map, String key, String type) {
Set<String> types = map.get(key);
if (types == null) {
return map.copyAndPut(key, Collections.singleton(type));
} else if (types.contains(type)) {
// noting to do
return map;
} else {
Set<String> newTypes = new HashSet<>(types.size() + 1);
newTypes.addAll(types);
newTypes.add(type);
assert newTypes.size() == types.size() + 1;
newTypes = Collections.unmodifiableSet(newTypes);
return map.copyAndPut(key, newTypes);
}
}
/**
@ -63,7 +95,9 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
throw new IllegalArgumentException("Default mappings should not be added to the lookup");
}
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName = this.fullNameToFieldType;
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes = this.fullNameToTypes;
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName = this.indexNameToFieldType;
CopyOnWriteHashMap<String, Set<String>> indexNameToTypes = this.indexNameToTypes;
for (FieldMapper fieldMapper : newFieldMappers) {
MappedFieldType fieldType = fieldMapper.fieldType();
@ -91,8 +125,23 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
// this new field bridges between two existing field names (a full and index name), which we cannot support
throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName());
}
fullNameToTypes = addType(fullNameToTypes, fieldType.names().fullName(), type);
indexNameToTypes = addType(indexNameToTypes, fieldType.names().indexName(), type);
}
return new FieldTypeLookup(fullName, fullNameToTypes, indexName, indexNameToTypes);
}
private static boolean beStrict(String type, Set<String> types, boolean updateAllTypes) {
assert types.size() >= 1;
if (updateAllTypes) {
return false;
} else if (types.size() == 1 && types.contains(type)) {
// we are implicitly updating all types
return false;
} else {
return true;
}
return new FieldTypeLookup(fullName, indexName);
}
/**
@ -100,14 +149,15 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
* If any are not compatible, an IllegalArgumentException is thrown.
* If updateAllTypes is true, only basic compatibility is checked.
*/
public void checkCompatibility(Collection<FieldMapper> newFieldMappers, boolean updateAllTypes) {
for (FieldMapper fieldMapper : newFieldMappers) {
public void checkCompatibility(String type, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
for (FieldMapper fieldMapper : fieldMappers) {
MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName());
if (ref != null) {
List<String> conflicts = new ArrayList<>();
ref.get().checkTypeName(fieldMapper.fieldType(), conflicts);
if (conflicts.isEmpty()) { // only check compat if they are the same type
boolean strict = updateAllTypes == false;
final Set<String> types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName());
boolean strict = beStrict(type, types, updateAllTypes);
ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
}
if (conflicts.isEmpty() == false) {
@ -121,7 +171,8 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
List<String> conflicts = new ArrayList<>();
indexNameRef.get().checkTypeName(fieldMapper.fieldType(), conflicts);
if (conflicts.isEmpty()) { // only check compat if they are the same type
boolean strict = updateAllTypes == false;
final Set<String> types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName());
boolean strict = beStrict(type, types, updateAllTypes);
indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
}
if (conflicts.isEmpty() == false) {
@ -138,6 +189,15 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
return ref.get();
}
/** Get the set of types that have a mapping for the given field. */
public Set<String> getTypes(String field) {
Set<String> types = fullNameToTypes.get(field);
if (types == null) {
types = Collections.emptySet();
}
return types;
}
/** Returns the field type for the given index name */
public MappedFieldType getByIndexName(String field) {
MappedFieldTypeReference ref = indexNameToFieldType.get(field);
@ -145,6 +205,15 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
return ref.get();
}
/** Get the set of types that have a mapping for the given field. */
public Set<String> getTypesByIndexName(String field) {
Set<String> types = indexNameToTypes.get(field);
if (types == null) {
types = Collections.emptySet();
}
return types;
}
/**
* Returns a list of the index names of a simple match regex like pattern against full name and index name.
*/

View File

@ -23,12 +23,10 @@ package org.elasticsearch.index.mapper;
*/
public class MappedFieldTypeReference {
private MappedFieldType fieldType; // the current field type this reference points to
private int numAssociatedMappers;
public MappedFieldTypeReference(MappedFieldType fieldType) {
fieldType.freeze(); // ensure frozen
this.fieldType = fieldType;
this.numAssociatedMappers = 1;
}
public MappedFieldType get() {
@ -40,11 +38,4 @@ public class MappedFieldTypeReference {
this.fieldType = fieldType;
}
public int getNumAssociatedMappers() {
return numAssociatedMappers;
}
public void incrementAssociatedMappers() {
++numAssociatedMappers;
}
}

View File

@ -33,6 +33,7 @@ import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
@ -260,13 +261,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
assert result.hasConflicts() == false; // we already simulated
return oldMapper;
} else {
List<ObjectMapper> newObjectMappers = new ArrayList<>();
List<FieldMapper> newFieldMappers = new ArrayList<>();
for (MetadataFieldMapper metadataMapper : mapper.mapping().metadataMappers) {
newFieldMappers.add(metadataMapper);
}
MapperUtils.collect(mapper.mapping().root, newObjectMappers, newFieldMappers);
checkNewMappersCompatibility(newObjectMappers, newFieldMappers, updateAllTypes);
Tuple<Collection<ObjectMapper>, Collection<FieldMapper>> newMappers = checkMappersCompatibility(
mapper.type(), mapper.mapping(), updateAllTypes);
Collection<ObjectMapper> newObjectMappers = newMappers.v1();
Collection<FieldMapper> newFieldMappers = newMappers.v2();
addMappers(mapper.type(), newObjectMappers, newFieldMappers);
for (DocumentTypeListener typeListener : typeListeners) {
@ -302,9 +300,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return true;
}
protected void checkNewMappersCompatibility(Collection<ObjectMapper> newObjectMappers, Collection<FieldMapper> newFieldMappers, boolean updateAllTypes) {
protected void checkMappersCompatibility(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
assert mappingLock.isWriteLockedByCurrentThread();
for (ObjectMapper newObjectMapper : newObjectMappers) {
for (ObjectMapper newObjectMapper : objectMappers) {
ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath());
if (existingObjectMapper != null) {
MergeResult result = new MergeResult(true, updateAllTypes);
@ -315,7 +313,19 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
}
}
fieldTypes.checkCompatibility(newFieldMappers, updateAllTypes);
fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes);
}
protected Tuple<Collection<ObjectMapper>, Collection<FieldMapper>> checkMappersCompatibility(
String type, Mapping mapping, boolean updateAllTypes) {
List<ObjectMapper> objectMappers = new ArrayList<>();
List<FieldMapper> fieldMappers = new ArrayList<>();
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
fieldMappers.add(metadataMapper);
}
MapperUtils.collect(mapping.root, objectMappers, fieldMappers);
checkMappersCompatibility(type, objectMappers, fieldMappers, updateAllTypes);
return new Tuple<>(objectMappers, fieldMappers);
}
protected void addMappers(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {

View File

@ -135,6 +135,15 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
super(ref);
}
@Override
public void checkCompatibility(MappedFieldType other,
List<String> conflicts, boolean strict) {
super.checkCompatibility(other, conflicts, strict);
if (numericPrecisionStep() != other.numericPrecisionStep()) {
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step] values");
}
}
public abstract NumberFieldType clone();
@Override
@ -251,11 +260,6 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
return;
}
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
if (this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false) {
if (fieldType().numericPrecisionStep() != nfmMergeWith.fieldType().numericPrecisionStep()) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] is used by multiple types. Set update_all_types to true to update precision_step across all types.");
}
}
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
this.includeInAll = nfmMergeWith.includeInAll;

View File

@ -1034,7 +1034,7 @@ public class IndexShard extends AbstractIndexShardComponent {
boolean wasActive = active.getAndSet(false);
if (wasActive) {
updateBufferSize(IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
logger.debug("shard is now inactive");
logger.debug("marking shard as inactive (inactive_time=[{}]) indexing wise", inactiveTime);
indexEventListener.onShardInactive(this);
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.indices.memory;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@ -33,7 +32,6 @@ import org.elasticsearch.index.engine.FlushNotAllowedEngineException;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.threadpool.ThreadPool;
@ -200,159 +198,57 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
return translogBuffer;
}
protected List<ShardId> availableShards() {
ArrayList<ShardId> list = new ArrayList<>();
protected List<IndexShard> availableShards() {
List<IndexShard> availableShards = new ArrayList<>();
for (IndexService indexService : indicesService) {
for (IndexShard indexShard : indexService) {
if (shardAvailable(indexShard)) {
list.add(indexShard.shardId());
for (IndexShard shard : indexService) {
if (shardAvailable(shard)) {
availableShards.add(shard);
}
}
}
return list;
return availableShards;
}
/** returns true if shard exists and is availabe for updates */
protected boolean shardAvailable(ShardId shardId) {
return shardAvailable(getShard(shardId));
}
/** returns true if shard exists and is availabe for updates */
protected boolean shardAvailable(@Nullable IndexShard shard) {
protected boolean shardAvailable(IndexShard shard) {
// shadow replica doesn't have an indexing buffer
return shard != null && shard.canIndex() && CAN_UPDATE_INDEX_BUFFER_STATES.contains(shard.state());
}
/** gets an {@link IndexShard} instance for the given shard. returns null if the shard doesn't exist */
protected IndexShard getShard(ShardId shardId) {
IndexService indexService = indicesService.indexService(shardId.index().name());
if (indexService != null) {
IndexShard indexShard = indexService.getShardOrNull(shardId.id());
return indexShard;
}
return null;
return shard.canIndex() && CAN_UPDATE_INDEX_BUFFER_STATES.contains(shard.state());
}
/** set new indexing and translog buffers on this shard. this may cause the shard to refresh to free up heap. */
protected void updateShardBuffers(ShardId shardId, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
final IndexShard shard = getShard(shardId);
if (shard != null) {
try {
shard.updateBufferSize(shardIndexingBufferSize, shardTranslogBufferSize);
} catch (EngineClosedException e) {
// ignore
} catch (FlushNotAllowedEngineException e) {
// ignore
} catch (Exception e) {
logger.warn("failed to set shard {} index buffer to [{}]", e, shardId, shardIndexingBufferSize);
}
protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
try {
shard.updateBufferSize(shardIndexingBufferSize, shardTranslogBufferSize);
} catch (EngineClosedException | FlushNotAllowedEngineException e) {
// ignore
} catch (Exception e) {
logger.warn("failed to set shard {} index buffer to [{}]", e, shard.shardId(), shardIndexingBufferSize);
}
}
/** returns {@link IndexShard#getActive} if the shard exists, else null */
protected Boolean getShardActive(ShardId shardId) {
final IndexShard indexShard = getShard(shardId);
if (indexShard == null) {
return null;
}
return indexShard.getActive();
}
/** check if any shards active status changed, now. */
public void forceCheck() {
statusChecker.run();
}
class ShardsIndicesStatusChecker implements Runnable {
// True if the shard was active last time we checked
private final Map<ShardId,Boolean> shardWasActive = new HashMap<>();
@Override
public synchronized void run() {
EnumSet<ShardStatusChangeType> changes = purgeDeletedAndClosedShards();
updateShardStatuses(changes);
if (changes.isEmpty() == false) {
// Something changed: recompute indexing buffers:
calcAndSetShardBuffers("[" + changes + "]");
}
}
/**
* goes through all existing shards and check whether there are changes in their active status
*/
private void updateShardStatuses(EnumSet<ShardStatusChangeType> changes) {
for (ShardId shardId : availableShards()) {
// Is the shard active now?
Boolean isActive = getShardActive(shardId);
if (isActive == null) {
// shard was closed..
continue;
}
// Was the shard active last time we checked?
Boolean wasActive = shardWasActive.get(shardId);
if (wasActive == null) {
// First time we are seeing this shard
shardWasActive.put(shardId, isActive);
changes.add(ShardStatusChangeType.ADDED);
} else if (isActive) {
// Shard is active now
if (wasActive == false) {
// Shard became active itself, since we last checked (due to new indexing op arriving)
changes.add(ShardStatusChangeType.BECAME_ACTIVE);
logger.debug("marking shard {} as active indexing wise", shardId);
shardWasActive.put(shardId, true);
} else if (checkIdle(shardId) == Boolean.TRUE) {
// Make shard inactive now
changes.add(ShardStatusChangeType.BECAME_INACTIVE);
shardWasActive.put(shardId, false);
}
}
}
}
/**
* purge any existing statuses that are no longer updated
*
* @return the changes applied
*/
private EnumSet<ShardStatusChangeType> purgeDeletedAndClosedShards() {
EnumSet<ShardStatusChangeType> changes = EnumSet.noneOf(ShardStatusChangeType.class);
Iterator<ShardId> statusShardIdIterator = shardWasActive.keySet().iterator();
while (statusShardIdIterator.hasNext()) {
ShardId shardId = statusShardIdIterator.next();
if (shardAvailable(shardId) == false) {
changes.add(ShardStatusChangeType.DELETED);
statusShardIdIterator.remove();
}
}
return changes;
}
private void calcAndSetShardBuffers(String reason) {
// Count how many shards are now active:
int activeShardCount = 0;
for (Map.Entry<ShardId,Boolean> ent : shardWasActive.entrySet()) {
if (ent.getValue()) {
activeShardCount++;
List<IndexShard> availableShards = availableShards();
List<IndexShard> activeShards = new ArrayList<>();
for (IndexShard shard : availableShards) {
if (!checkIdle(shard)) {
activeShards.add(shard);
}
}
int activeShardCount = activeShards.size();
// TODO: we could be smarter here by taking into account how RAM the IndexWriter on each shard
// is actually using (using IW.ramBytesUsed), so that small indices (e.g. Marvel) would not
// get the same indexing buffer as large indices. But it quickly gets tricky...
if (activeShardCount == 0) {
logger.debug("no active shards (reason={})", reason);
return;
}
@ -372,13 +268,10 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
shardTranslogBufferSize = maxShardTranslogBufferSize;
}
logger.debug("recalculating shard indexing buffer (reason={}), total is [{}] with [{}] active shards, each shard set to indexing=[{}], translog=[{}]", reason, indexingBuffer, activeShardCount, shardIndexingBufferSize, shardTranslogBufferSize);
logger.debug("recalculating shard indexing buffer, total is [{}] with [{}] active shards, each shard set to indexing=[{}], translog=[{}]", indexingBuffer, activeShardCount, shardIndexingBufferSize, shardTranslogBufferSize);
for (Map.Entry<ShardId,Boolean> ent : shardWasActive.entrySet()) {
if (ent.getValue()) {
// This shard is active
updateShardBuffers(ent.getKey(), shardIndexingBufferSize, shardTranslogBufferSize);
}
for (IndexShard shard : activeShards) {
updateShardBuffers(shard, shardIndexingBufferSize, shardTranslogBufferSize);
}
}
}
@ -387,38 +280,17 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
return System.nanoTime();
}
/** ask this shard to check now whether it is inactive, and reduces its indexing and translog buffers if so. returns Boolean.TRUE if
* it did deactive, Boolean.FALSE if it did not, and null if the shard is unknown */
protected Boolean checkIdle(ShardId shardId) {
String ignoreReason; // eclipse compiler does not know it is really final
final IndexShard shard = getShard(shardId);
if (shard != null) {
try {
if (shard.checkIdle()) {
logger.debug("marking shard {} as inactive (inactive_time[{}]) indexing wise",
shardId,
shard.getInactiveTime());
return Boolean.TRUE;
}
return Boolean.FALSE;
} catch (EngineClosedException e) {
// ignore
ignoreReason = "EngineClosedException";
} catch (FlushNotAllowedEngineException e) {
// ignore
ignoreReason = "FlushNotAllowedEngineException";
}
} else {
ignoreReason = "shard not found";
/**
* ask this shard to check now whether it is inactive, and reduces its indexing and translog buffers if so.
* return false if the shard is not idle, otherwise true
*/
protected boolean checkIdle(IndexShard shard) {
try {
return shard.checkIdle();
} catch (EngineClosedException | FlushNotAllowedEngineException e) {
logger.trace("ignore [{}] while marking shard {} as inactive", e.getClass().getSimpleName(), shard.shardId());
return true;
}
if (ignoreReason != null) {
logger.trace("ignore [{}] while marking shard {} as inactive", ignoreReason, shardId);
}
return null;
}
private static enum ShardStatusChangeType {
ADDED, DELETED, BECAME_ACTIVE, BECAME_INACTIVE
}
@Override

View File

@ -147,7 +147,7 @@ public class Node implements Releasable {
tmpEnv.configFile(), Arrays.toString(tmpEnv.dataFiles()), tmpEnv.logsFile(), tmpEnv.pluginsFile());
}
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.pluginsFile(), classpathPlugins);
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.modulesFile(), tmpEnv.pluginsFile(), classpathPlugins);
this.settings = pluginsService.updatedSettings();
// create the environment based on the finalized (processed) view of the settings
this.environment = new Environment(this.settings());

View File

@ -71,7 +71,7 @@ public abstract class Plugin {
}
/**
* Called before a new index is created on a node. The given module can be used to regsiter index-leve
* Called before a new index is created on a node. The given module can be used to register index-level
* extensions.
*/
public void onIndexModule(IndexModule indexModule) {}

View File

@ -66,6 +66,10 @@ public class PluginManager {
"plugin",
"plugin.bat",
"service.bat"));
static final Set<String> MODULES = unmodifiableSet(newHashSet(
"lang-expression",
"lang-groovy"));
static final Set<String> OFFICIAL_PLUGINS = unmodifiableSet(newHashSet(
"analysis-icu",
@ -78,8 +82,6 @@ public class PluginManager {
"discovery-ec2",
"discovery-gce",
"discovery-multicast",
"lang-expression",
"lang-groovy",
"lang-javascript",
"lang-python",
"mapper-attachments",
@ -221,6 +223,12 @@ public class PluginManager {
PluginInfo info = PluginInfo.readFromProperties(root);
terminal.println(VERBOSE, "%s", info);
// don't let luser install plugin as a module...
// they might be unavoidably in maven central and are packaged up the same way)
if (MODULES.contains(info.getName())) {
throw new IOException("plugin '" + info.getName() + "' cannot be installed like this, it is a system module");
}
// update name in handle based on 'name' property found in descriptor file
pluginHandle = new PluginHandle(info.getName(), pluginHandle.version, pluginHandle.user);
final Path extractLocation = pluginHandle.extractedDir(environment);

View File

@ -25,9 +25,8 @@ import org.apache.lucene.analysis.util.TokenizerFactory;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
import org.elasticsearch.bootstrap.JarHell;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
@ -39,10 +38,7 @@ import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexEventListener;
import java.io.Closeable;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@ -50,6 +46,7 @@ import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
@ -69,10 +66,10 @@ import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory;
public class PluginsService extends AbstractComponent {
/**
* We keep around a list of plugins
* We keep around a list of plugins and modules
*/
private final List<Tuple<PluginInfo, Plugin>> plugins;
private final PluginsInfo info;
private final PluginsAndModules info;
private final Map<Plugin, List<OnModuleReference>> onModuleReferences;
@ -89,13 +86,15 @@ public class PluginsService extends AbstractComponent {
/**
* Constructs a new PluginService
* @param settings The settings of the system
* @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem
* @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem
* @param classpathPlugins Plugins that exist in the classpath which should be loaded
*/
public PluginsService(Settings settings, Path pluginsDirectory, Collection<Class<? extends Plugin>> classpathPlugins) {
public PluginsService(Settings settings, Path modulesDirectory, Path pluginsDirectory, Collection<Class<? extends Plugin>> classpathPlugins) {
super(settings);
info = new PluginsAndModules();
List<Tuple<PluginInfo, Plugin>> tupleBuilder = new ArrayList<>();
List<Tuple<PluginInfo, Plugin>> pluginsLoaded = new ArrayList<>();
// first we load plugins that are on the classpath. this is for tests and transport clients
for (Class<? extends Plugin> pluginClass : classpathPlugins) {
@ -104,24 +103,39 @@ public class PluginsService extends AbstractComponent {
if (logger.isTraceEnabled()) {
logger.trace("plugin loaded from classpath [{}]", pluginInfo);
}
tupleBuilder.add(new Tuple<>(pluginInfo, plugin));
pluginsLoaded.add(new Tuple<>(pluginInfo, plugin));
info.addPlugin(pluginInfo);
}
// load modules
if (modulesDirectory != null) {
try {
List<Bundle> bundles = getModuleBundles(modulesDirectory);
List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles);
pluginsLoaded.addAll(loaded);
for (Tuple<PluginInfo, Plugin> module : loaded) {
info.addModule(module.v1());
}
} catch (IOException ex) {
throw new IllegalStateException("Unable to initialize modules", ex);
}
}
// now, find all the ones that are in plugins/
if (pluginsDirectory != null) {
try {
List<Bundle> bundles = getPluginBundles(pluginsDirectory);
tupleBuilder.addAll(loadBundles(bundles));
List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles);
pluginsLoaded.addAll(loaded);
for (Tuple<PluginInfo, Plugin> plugin : loaded) {
info.addPlugin(plugin.v1());
}
} catch (IOException ex) {
throw new IllegalStateException("Unable to initialize plugins", ex);
}
}
plugins = Collections.unmodifiableList(tupleBuilder);
info = new PluginsInfo();
for (Tuple<PluginInfo, Plugin> tuple : plugins) {
info.add(tuple.v1());
}
plugins = Collections.unmodifiableList(pluginsLoaded);
// We need to build a List of jvm and site plugins for checking mandatory plugins
Map<String, Plugin> jvmPlugins = new HashMap<>();
@ -151,7 +165,18 @@ public class PluginsService extends AbstractComponent {
}
}
logger.info("loaded {}, sites {}", jvmPlugins.keySet(), sitePlugins);
// we don't log jars in lib/ we really shouldnt log modules,
// but for now: just be transparent so we can debug any potential issues
Set<String> moduleNames = new HashSet<>();
Set<String> jvmPluginNames = new HashSet<>();
for (PluginInfo moduleInfo : info.getModuleInfos()) {
moduleNames.add(moduleInfo.getName());
}
for (PluginInfo pluginInfo : info.getPluginInfos()) {
jvmPluginNames.add(pluginInfo.getName());
}
logger.info("modules {}, plugins {}, sites {}", moduleNames, jvmPluginNames, sitePlugins);
Map<Plugin, List<OnModuleReference>> onModuleReferences = new HashMap<>();
for (Plugin plugin : jvmPlugins.values()) {
@ -160,6 +185,10 @@ public class PluginsService extends AbstractComponent {
if (!method.getName().equals("onModule")) {
continue;
}
// this is a deprecated final method, so all Plugin subclasses have it
if (method.getParameterTypes().length == 1 && method.getParameterTypes()[0].equals(IndexModule.class)) {
continue;
}
if (method.getParameterTypes().length == 0 || method.getParameterTypes().length > 1) {
logger.warn("Plugin: {} implementing onModule with no parameters or more than one parameter", plugin.name());
continue;
@ -178,7 +207,7 @@ public class PluginsService extends AbstractComponent {
this.onModuleReferences = Collections.unmodifiableMap(onModuleReferences);
}
public List<Tuple<PluginInfo, Plugin>> plugins() {
private List<Tuple<PluginInfo, Plugin>> plugins() {
return plugins;
}
@ -249,12 +278,12 @@ public class PluginsService extends AbstractComponent {
}
}
/**
* Get information about plugins (jvm and site plugins).
* Get information about plugins and modules
*/
public PluginsInfo info() {
public PluginsAndModules info() {
return info;
}
// a "bundle" is a group of plugins in a single classloader
// really should be 1-1, but we are not so fortunate
static class Bundle {
@ -262,6 +291,40 @@ public class PluginsService extends AbstractComponent {
List<URL> urls = new ArrayList<>();
}
// similar in impl to getPluginBundles, but DO NOT try to make them share code.
// we don't need to inherit all the leniency, and things are different enough.
static List<Bundle> getModuleBundles(Path modulesDirectory) throws IOException {
// damn leniency
if (Files.notExists(modulesDirectory)) {
return Collections.emptyList();
}
List<Bundle> bundles = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(modulesDirectory)) {
for (Path module : stream) {
if (FileSystemUtils.isHidden(module)) {
continue; // skip over .DS_Store etc
}
PluginInfo info = PluginInfo.readFromProperties(module);
if (!info.isJvm()) {
throw new IllegalStateException("modules must be jvm plugins: " + info);
}
if (!info.isIsolated()) {
throw new IllegalStateException("modules must be isolated: " + info);
}
Bundle bundle = new Bundle();
bundle.plugins.add(info);
// gather urls for jar files
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(module, "*.jar")) {
for (Path jar : jarStream) {
bundle.urls.add(jar.toUri().toURL());
}
}
bundles.add(bundle);
}
}
return bundles;
}
static List<Bundle> getPluginBundles(Path pluginsDirectory) throws IOException {
ESLogger logger = Loggers.getLogger(PluginsService.class);
@ -269,7 +332,7 @@ public class PluginsService extends AbstractComponent {
if (!isAccessibleDirectory(pluginsDirectory, logger)) {
return Collections.emptyList();
}
List<Bundle> bundles = new ArrayList<>();
// a special purgatory for plugins that directly depend on each other
bundles.add(new Bundle());
@ -281,7 +344,14 @@ public class PluginsService extends AbstractComponent {
continue;
}
logger.trace("--- adding plugin [{}]", plugin.toAbsolutePath());
PluginInfo info = PluginInfo.readFromProperties(plugin);
final PluginInfo info;
try {
info = PluginInfo.readFromProperties(plugin);
} catch (IOException e) {
throw new IllegalStateException("Could not load plugin descriptor for existing plugin ["
+ plugin.getFileName() + "]. Was the plugin built before 2.0?", e);
}
List<URL> urls = new ArrayList<>();
if (info.isJvm()) {
// a jvm plugin: gather urls for jar files
@ -302,7 +372,7 @@ public class PluginsService extends AbstractComponent {
bundle.urls.addAll(urls);
}
}
return bundles;
}
@ -320,7 +390,7 @@ public class PluginsService extends AbstractComponent {
} catch (Exception e) {
throw new IllegalStateException("failed to load bundle " + bundle.urls + " due to jar hell", e);
}
// create a child to load the plugins in this bundle
ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader());
for (PluginInfo pluginInfo : bundle.plugins) {

View File

@ -45,9 +45,6 @@ public class RestForceMergeAction extends BaseRestHandler {
super(settings, controller, client);
controller.registerHandler(POST, "/_forcemerge", this);
controller.registerHandler(POST, "/{index}/_forcemerge", this);
controller.registerHandler(GET, "/_forcemerge", this);
controller.registerHandler(GET, "/{index}/_forcemerge", this);
}
@Override

View File

@ -95,7 +95,7 @@ public class RestPluginsAction extends AbstractCatAction {
for (DiscoveryNode node : nodes) {
NodeInfo info = nodesInfo.getNodesMap().get(node.id());
for (PluginInfo pluginInfo : info.getPlugins().getInfos()) {
for (PluginInfo pluginInfo : info.getPlugins().getPluginInfos()) {
table.startRow();
table.addCell(node.id());
table.addCell(node.name());

View File

@ -0,0 +1,171 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script;
import java.security.BasicPermission;
import java.security.Permission;
import java.security.PermissionCollection;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
/**
* Checked by scripting engines to allow loading a java class.
* <p>
* Examples:
* <p>
* Allow permission to {@code java.util.List}
* <pre>permission org.elasticsearch.script.ClassPermission "java.util.List";</pre>
* Allow permission to classes underneath {@code java.util} (and its subpackages such as {@code java.util.zip})
* <pre>permission org.elasticsearch.script.ClassPermission "java.util.*";</pre>
* Allow permission to standard predefined list of basic classes (see list below)
* <pre>permission org.elasticsearch.script.ClassPermission "&lt;&lt;STANDARD&gt;&gt;";</pre>
* Allow permission to all classes
* <pre>permission org.elasticsearch.script.ClassPermission "*";</pre>
* <p>
* Set of classes (allowed by special value <code>&lt;&lt;STANDARD&gt;&gt;</code>):
* <ul>
* <li>{@link java.lang.Boolean}</li>
* <li>{@link java.lang.Byte}</li>
* <li>{@link java.lang.Character}</li>
* <li>{@link java.lang.Double}</li>
* <li>{@link java.lang.Integer}</li>
* <li>{@link java.lang.Long}</li>
* <li>{@link java.lang.Math}</li>
* <li>{@link java.lang.Object}</li>
* <li>{@link java.lang.Short}</li>
* <li>{@link java.lang.String}</li>
* <li>{@link java.math.BigDecimal}</li>
* <li>{@link java.util.ArrayList}</li>
* <li>{@link java.util.Arrays}</li>
* <li>{@link java.util.Date}</li>
* <li>{@link java.util.HashMap}</li>
* <li>{@link java.util.HashSet}</li>
* <li>{@link java.util.Iterator}</li>
* <li>{@link java.util.List}</li>
* <li>{@link java.util.Map}</li>
* <li>{@link java.util.Set}</li>
* <li>{@link java.util.UUID}</li>
* <li>{@link org.joda.time.DateTime}</li>
* <li>{@link org.joda.time.DateTimeUtils}</li>
* <li>{@link org.joda.time.DateTimeZone}</li>
* <li>{@link org.joda.time.Instant}</li>
* </ul>
*/
public final class ClassPermission extends BasicPermission {
private static final long serialVersionUID = 3530711429252193884L;
public static final String STANDARD = "<<STANDARD>>";
/** Typical set of classes for scripting: basic data types, math, dates, and simple collections */
// this is the list from the old grovy sandbox impl (+ some things like String, Iterator, etc that were missing)
public static final Set<String> STANDARD_CLASSES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
// jdk classes
java.lang.Boolean.class.getName(),
java.lang.Byte.class.getName(),
java.lang.Character.class.getName(),
java.lang.Double.class.getName(),
java.lang.Integer.class.getName(),
java.lang.Long.class.getName(),
java.lang.Math.class.getName(),
java.lang.Object.class.getName(),
java.lang.Short.class.getName(),
java.lang.String.class.getName(),
java.math.BigDecimal.class.getName(),
java.util.ArrayList.class.getName(),
java.util.Arrays.class.getName(),
java.util.Date.class.getName(),
java.util.HashMap.class.getName(),
java.util.HashSet.class.getName(),
java.util.Iterator.class.getName(),
java.util.List.class.getName(),
java.util.Map.class.getName(),
java.util.Set.class.getName(),
java.util.UUID.class.getName(),
// joda-time
org.joda.time.DateTime.class.getName(),
org.joda.time.DateTimeUtils.class.getName(),
org.joda.time.DateTimeZone.class.getName(),
org.joda.time.Instant.class.getName()
)));
/**
* Creates a new ClassPermission object.
*
* @param name class to grant permission to
*/
public ClassPermission(String name) {
super(name);
}
/**
* Creates a new ClassPermission object.
* This constructor exists for use by the {@code Policy} object to instantiate new Permission objects.
*
* @param name class to grant permission to
* @param actions ignored
*/
public ClassPermission(String name, String actions) {
this(name);
}
@Override
public boolean implies(Permission p) {
// check for a special value of STANDARD to imply the basic set
if (p != null && p.getClass() == getClass()) {
ClassPermission other = (ClassPermission) p;
if (STANDARD.equals(getName()) && STANDARD_CLASSES.contains(other.getName())) {
return true;
}
}
return super.implies(p);
}
@Override
public PermissionCollection newPermissionCollection() {
// BasicPermissionCollection only handles wildcards, we expand <<STANDARD>> here
PermissionCollection impl = super.newPermissionCollection();
return new PermissionCollection() {
private static final long serialVersionUID = 6792220143549780002L;
@Override
public void add(Permission permission) {
if (permission instanceof ClassPermission && STANDARD.equals(permission.getName())) {
for (String clazz : STANDARD_CLASSES) {
impl.add(new ClassPermission(clazz));
}
} else {
impl.add(permission);
}
}
@Override
public boolean implies(Permission permission) {
return impl.implies(permission);
}
@Override
public Enumeration<Permission> elements() {
return impl.elements();
}
};
}
}

View File

@ -84,9 +84,6 @@ grant {
// the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely!
permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write";
// needed by lucene SPI currently
permission java.lang.RuntimePermission "getClassLoader";
// needed by Settings
permission java.lang.RuntimePermission "getenv.*";

View File

@ -34,5 +34,6 @@ grant {
permission java.util.PropertyPermission "rhino.stack.style", "read";
// needed IndyInterface selectMethod (setCallSiteTarget)
// TODO: clean this up / only give it to engines that really must have it
permission java.lang.RuntimePermission "getClassLoader";
};

View File

@ -43,8 +43,6 @@ OFFICIAL PLUGINS
- discovery-ec2
- discovery-gce
- discovery-multicast
- lang-expression
- lang-groovy
- lang-javascript
- lang-python
- mapper-attachments

View File

@ -766,11 +766,11 @@ public class ClusterServiceIT extends ESIntegTestCase {
return false;
}
}
int numberOfThreads = randomIntBetween(2, 256);
int numberOfThreads = randomIntBetween(2, 8);
int tasksSubmittedPerThread = randomIntBetween(1, 1024);
ConcurrentMap<String, AtomicInteger> counters = new ConcurrentHashMap<>();
CountDownLatch latch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread);
CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread);
ClusterStateTaskListener listener = new ClusterStateTaskListener() {
@Override
public void onFailure(String source, Throwable t) {
@ -780,7 +780,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
counters.computeIfAbsent(source, key -> new AtomicInteger()).incrementAndGet();
latch.countDown();
updateLatch.countDown();
}
};
@ -814,7 +814,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
clusterService.submitStateUpdateTask(
Thread.currentThread().getName(),
new Task(),
ClusterStateTaskConfig.build(Priority.NORMAL),
ClusterStateTaskConfig.build(randomFrom(Priority.values())),
executor,
listener);
}
@ -829,14 +829,16 @@ public class ClusterServiceIT extends ESIntegTestCase {
}
// wait until all the cluster state updates have been processed
latch.await();
updateLatch.await();
// assert the number of executed tasks is correct
assertEquals(numberOfThreads * tasksSubmittedPerThread, counter.get());
// assert each executor executed the correct number of tasks
for (TaskExecutor executor : executors) {
assertEquals((int)counts.get(executor), executor.counter.get());
if (counts.containsKey(executor)) {
assertEquals((int) counts.get(executor), executor.counter.get());
}
}
// assert the correct number of clusterStateProcessed events were triggered

View File

@ -37,6 +37,8 @@ public class FieldTypeLookupTests extends ESTestCase {
FieldTypeLookup lookup = new FieldTypeLookup();
assertNull(lookup.get("foo"));
assertNull(lookup.getByIndexName("foo"));
assertEquals(Collections.emptySet(), lookup.getTypes("foo"));
assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo"));
Collection<String> names = lookup.simpleMatchToFullName("foo");
assertNotNull(names);
assertTrue(names.isEmpty());
@ -70,6 +72,14 @@ public class FieldTypeLookupTests extends ESTestCase {
assertNull(lookup.get("bar"));
assertEquals(f.fieldType(), lookup2.getByIndexName("bar"));
assertNull(lookup.getByIndexName("foo"));
assertEquals(Collections.emptySet(), lookup.getTypes("foo"));
assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo"));
assertEquals(Collections.emptySet(), lookup.getTypes("bar"));
assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("bar"));
assertEquals(Collections.singleton("type"), lookup2.getTypes("foo"));
assertEquals(Collections.emptySet(), lookup2.getTypesByIndexName("foo"));
assertEquals(Collections.emptySet(), lookup2.getTypes("bar"));
assertEquals(Collections.singleton("type"), lookup2.getTypesByIndexName("bar"));
assertEquals(1, size(lookup2.iterator()));
}
@ -144,7 +154,7 @@ public class FieldTypeLookupTests extends ESTestCase {
public void testCheckCompatibilityNewField() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldTypeLookup lookup = new FieldTypeLookup();
lookup.checkCompatibility(newList(f1), false);
lookup.checkCompatibility("type", newList(f1), false);
}
public void testCheckCompatibilityMismatchedTypes() {
@ -155,14 +165,14 @@ public class FieldTypeLookupTests extends ESTestCase {
MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo");
FieldMapper f2 = new FakeFieldMapper("foo", ft2);
try {
lookup.checkCompatibility(newList(f2), false);
lookup.checkCompatibility("type2", newList(f2), false);
fail("expected type mismatch");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]"));
}
// fails even if updateAllTypes == true
try {
lookup.checkCompatibility(newList(f2), true);
lookup.checkCompatibility("type2", newList(f2), true);
fail("expected type mismatch");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]"));
@ -178,25 +188,27 @@ public class FieldTypeLookupTests extends ESTestCase {
ft2.setBoost(2.0f);
FieldMapper f2 = new FakeFieldMapper("foo", ft2);
try {
lookup.checkCompatibility(newList(f2), false);
// different type
lookup.checkCompatibility("type2", newList(f2), false);
fail("expected conflict");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("to update [boost] across all types"));
}
lookup.checkCompatibility(newList(f2), true); // boost is updateable, so ok if forcing
lookup.checkCompatibility("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types
lookup.checkCompatibility("type2", newList(f2), true); // boost is updateable, so ok if forcing
// now with a non changeable setting
MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar");
ft3.setStored(true);
FieldMapper f3 = new FakeFieldMapper("foo", ft3);
try {
lookup.checkCompatibility(newList(f3), false);
lookup.checkCompatibility("type2", newList(f3), false);
fail("expected conflict");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("has different [store] values"));
}
// even with updateAllTypes == true, incompatible
try {
lookup.checkCompatibility(newList(f3), true);
lookup.checkCompatibility("type2", newList(f3), true);
fail("expected conflict");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("has different [store] values"));

View File

@ -30,6 +30,7 @@ import org.elasticsearch.test.ESIntegTestCase;
import java.io.IOException;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
@ -68,6 +69,25 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
}
public void testDynamicObjectCopyTo() throws Exception {
String mapping = jsonBuilder().startObject().startObject("doc").startObject("properties")
.startObject("foo")
.field("type", "string")
.field("copy_to", "root.top.child")
.endObject()
.endObject().endObject().endObject().string();
assertAcked(
client().admin().indices().prepareCreate("test-idx")
.addMapping("doc", mapping)
);
client().prepareIndex("test-idx", "doc", "1")
.setSource("foo", "bar")
.get();
client().admin().indices().prepareRefresh("test-idx").execute().actionGet();
SearchResponse response = client().prepareSearch("test-idx")
.setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get();
assertThat(response.getHits().totalHits(), equalTo(1L));
}
private XContentBuilder createDynamicTemplateMapping() throws IOException {
return XContentFactory.jsonBuilder().startObject().startObject("doc")

View File

@ -167,27 +167,126 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
}
public void testCopyToFieldsNonExistingInnerObjectParsing() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
public void testCopyToDynamicInnerObjectParsing() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("copy_to", "very.inner.field")
.field("type", "string")
.field("copy_to", "very.inner.field")
.endObject()
.endObject().endObject().endObject().string();
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
.field("new_field", "bar")
.endObject().bytes();
ParseContext.Document doc = docMapper.parse("test", "type1", "1", json).rootDoc();
assertThat(doc.getFields("copy_test").length, equalTo(1));
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
assertThat(doc.getFields("very.inner.field").length, equalTo(1));
assertThat(doc.getFields("very.inner.field")[0].stringValue(), equalTo("foo"));
assertThat(doc.getFields("new_field").length, equalTo(1));
assertThat(doc.getFields("new_field")[0].stringValue(), equalTo("bar"));
}
public void testCopyToDynamicInnerInnerObjectParsing() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("copy_to", "very.far.inner.field")
.endObject()
.startObject("very")
.field("type", "object")
.startObject("properties")
.startObject("far")
.field("type", "object")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
.field("new_field", "bar")
.endObject().bytes();
ParseContext.Document doc = docMapper.parse("test", "type1", "1", json).rootDoc();
assertThat(doc.getFields("copy_test").length, equalTo(1));
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
assertThat(doc.getFields("very.far.inner.field").length, equalTo(1));
assertThat(doc.getFields("very.far.inner.field")[0].stringValue(), equalTo("foo"));
assertThat(doc.getFields("new_field").length, equalTo(1));
assertThat(doc.getFields("new_field")[0].stringValue(), equalTo("bar"));
}
public void testCopyToStrictDynamicInnerObjectParsing() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type1")
.field("dynamic", "strict")
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("copy_to", "very.inner.field")
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
.endObject().bytes();
try {
docMapper.parse("test", "type1", "1", json).rootDoc();
fail();
} catch (MapperParsingException ex) {
assertThat(ex.getMessage(), startsWith("attempt to copy value to non-existing object"));
assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [very] within [type1] is not allowed"));
}
}
public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("copy_to", "very.far.field")
.endObject()
.startObject("very")
.field("type", "object")
.startObject("properties")
.startObject("far")
.field("type", "object")
.field("dynamic", "strict")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
.endObject().bytes();
try {
docMapper.parse("test", "type1", "1", json).rootDoc();
fail();
} catch (MapperParsingException ex) {
assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed"));
}
}
@ -337,6 +436,41 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
}
}
public void testCopyToDynamicNestedObjectParsing() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type1")
.startArray("dynamic_templates")
.startObject()
.startObject("objects")
.field("match_mapping_type", "object")
.startObject("mapping")
.field("type", "nested")
.endObject()
.endObject()
.endObject()
.endArray()
.startObject("properties")
.startObject("copy_test")
.field("type", "string")
.field("copy_to", "very.inner.field")
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
BytesReference json = jsonBuilder().startObject()
.field("copy_test", "foo")
.field("new_field", "bar")
.endObject().bytes();
try {
docMapper.parse("test", "type1", "1", json).rootDoc();
fail();
} catch (MapperParsingException ex) {
assertThat(ex.getMessage(), startsWith("It is forbidden to create dynamic nested objects ([very]) through `copy_to`"));
}
}
private void assertFieldValue(Document doc, String field, Number... expected) {
IndexableField[] values = doc.getFields(field);
if (values == null) {

View File

@ -25,12 +25,14 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.search.SearchHitField;
@ -715,28 +717,25 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse(stage1Mapping);
MapperService mapperService = createIndex("test", settings).mapperService();
DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false);
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
.field("geohash", false).endObject().endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts().length, equalTo(3));
// todo better way of checking conflict?
assertThat("mapper [point] has different [lat_lon]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
assertThat("mapper [point] has different [geohash]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
assertThat("mapper [point] has different [geohash_precision]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
try {
mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]"));
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]"));
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]"));
}
// correct mapping and ensure no failures
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false);
}
public void testGeoHashSearch() throws Exception {

View File

@ -22,12 +22,14 @@ import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -35,6 +37,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.isIn;
@ -376,23 +379,21 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
.startObject("shape").field("type", "geo_shape").field("tree", "geohash").field("strategy", "recursive")
.field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw")
.endObject().endObject().endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse(stage1Mapping);
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false);
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree")
.field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26)
.field("orientation", "cw").endObject().endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
// check correct conflicts
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts().length, equalTo(4));
ArrayList<String> conflicts = new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()));
assertThat("mapper [shape] has different [strategy]", isIn(conflicts));
assertThat("mapper [shape] has different [tree]", isIn(conflicts));
assertThat("mapper [shape] has different [tree_levels]", isIn(conflicts));
assertThat("mapper [shape] has different [precision]", isIn(conflicts));
try {
mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]"));
assertThat(e.getMessage(), containsString("mapper [shape] has different [tree]"));
assertThat(e.getMessage(), containsString("mapper [shape] has different [tree_levels]"));
assertThat(e.getMessage(), containsString("mapper [shape] has different [precision]"));
}
// verify nothing changed
FieldMapper fieldMapper = stage1.mappers().getMapper("shape");
@ -411,11 +412,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
.field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), false, false);
// verify mapping changes, and ensure no failures
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false);
fieldMapper = stage1.mappers().getMapper("shape");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));

View File

@ -22,9 +22,11 @@ package org.elasticsearch.index.mapper.multifield.merge;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -32,6 +34,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.Arrays;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@ -113,9 +116,9 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json");
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue());
@ -129,12 +132,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json");
DocumentMapper docMapper2 = parser.parse(mapping);
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper2.mapping(), false, false);
mapperService.merge("person", new CompressedXContent(mapping), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -151,12 +149,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
assertThat(f, notNullValue());
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json");
DocumentMapper docMapper3 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper3.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper3.mapping(), false, false);
mapperService.merge("person", new CompressedXContent(mapping), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -168,24 +161,19 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json");
DocumentMapper docMapper4 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper4.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different [index] values"));
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different [store] values"));
try {
mapperService.merge("person", new CompressedXContent(mapping), false, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("mapper [name] has different [index] values"));
assertThat(e.getMessage(), containsString("mapper [name] has different [store] values"));
}
mergeResult = docMapper.merge(docMapper4.mapping(), false, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different [index] values"));
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different [store] values"));
// There are conflicts, but the `name.not_indexed3` has been added, b/c that field has no conflicts
// There are conflicts, so the `name.not_indexed3` has not been added
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue());
assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue());
assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue());
assertThat(docMapper.mappers().getMapper("name.not_indexed3"), notNullValue());
assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue());
}
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -478,7 +479,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("field").field("type", "string").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse(mapping);
DocumentMapper defaultMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -507,10 +508,12 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
.endObject().endObject().endObject().endObject().string();
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false);
assertTrue(mergeResult.hasConflicts());
assertEquals(1, mergeResult.buildConflicts().length);
assertTrue(mergeResult.buildConflicts()[0].contains("different [omit_norms]"));
try {
defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("different [omit_norms]"));
}
}
/**

View File

@ -41,6 +41,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
@ -557,7 +558,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
public void testMergingConflicts() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true)
.startObject("fielddata").field("format", "doc_values").endObject()
.field("store", "yes")
.field("index", "analyzed")
.field("path", "foo")
@ -565,9 +565,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser();
MapperService mapperService = createIndex("test", indexSettings).mapperService();
DocumentMapper docMapper = parser.parse(mapping);
DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false);
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", false)
@ -579,20 +579,32 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject().string();
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false);
List<String> expectedConflicts = new ArrayList<>(Arrays.asList(
"mapper [_timestamp] has different [index] values",
"mapper [_timestamp] has different [store] values",
"Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02",
"Cannot update path in _timestamp value. Value is foo path in merged mapping is bar"));
for (String conflict : mergeResult.buildConflicts()) {
assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict));
try {
mapperService.merge("type", new CompressedXContent(mapping), false, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values"));
assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [store] values"));
}
assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty());
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
assertTrue(docMapper.timestampFieldMapper().enabled());
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true)
.field("store", "yes")
.field("index", "analyzed")
.field("path", "bar")
.field("default", "1970-01-02")
.endObject()
.endObject().endObject().string();
try {
mapperService.merge("type", new CompressedXContent(mapping), false, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02"));
assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value. Value is foo path in merged mapping is bar"));
}
}
public void testBackcompatMergingConflictsForIndexValues() throws Exception {

View File

@ -48,7 +48,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
public void testAllConflicts() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_create_index.json");
String mappingUpdate = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json");
String[] errorMessage = {"[_all] enabled is true now encountering false",
String[] errorMessage = {
"[_all] has different [omit_norms] values",
"[_all] has different [store] values",
"[_all] has different [store_term_vector] values",
@ -61,6 +61,13 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
testConflict(mapping, mappingUpdate, errorMessage);
}
public void testAllDisabled() throws Exception {
XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", true).endObject().endObject().endObject().endObject();
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
String errorMessage = "[_all] enabled is true now encountering false";
testConflict(mapping.string(), mappingUpdate.string(), errorMessage);
}
public void testAllWithDefault() throws Exception {
String defaultMapping = jsonBuilder().startObject().startObject("_default_")
.startObject("_all")

View File

@ -123,14 +123,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
mapperService.merge("type", new CompressedXContent(update.string()), false, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("Merge failed"));
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
}
try {
mapperService.merge("type", new CompressedXContent(update.string()), false, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("Merge failed"));
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
}
assertTrue(mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper);
@ -167,7 +167,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
}
// same as the testConflictNewType except that the mapping update is on an existing type
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/15049")
public void testConflictNewTypeUpdate() throws Exception {
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("foo").field("type", "long").endObject()

View File

@ -140,7 +140,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}").execute().actionGet();
fail("Expected MergeMappingException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("mapper [body] of different type"));
assertThat(e.getMessage(), containsString("mapper [body] cannot be changed from type [string] to [int]"));
}
}

View File

@ -22,54 +22,51 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
public class IndexingMemoryControllerTests extends ESTestCase {
public class IndexingMemoryControllerTests extends ESSingleNodeTestCase {
static class MockController extends IndexingMemoryController {
final static ByteSizeValue INACTIVE = new ByteSizeValue(-1);
final Map<ShardId, ByteSizeValue> indexingBuffers = new HashMap<>();
final Map<ShardId, ByteSizeValue> translogBuffers = new HashMap<>();
final Map<IndexShard, ByteSizeValue> indexingBuffers = new HashMap<>();
final Map<IndexShard, ByteSizeValue> translogBuffers = new HashMap<>();
final Map<ShardId, Long> lastIndexTimeNanos = new HashMap<>();
final Set<ShardId> activeShards = new HashSet<>();
final Map<IndexShard, Long> lastIndexTimeNanos = new HashMap<>();
final Set<IndexShard> activeShards = new HashSet<>();
long currentTimeSec = TimeValue.timeValueNanos(System.nanoTime()).seconds();
public MockController(Settings settings) {
super(Settings.builder()
.put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate
.put(settings)
.build(),
null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb
.put(SHARD_INACTIVE_INTERVAL_TIME_SETTING, "200h") // disable it
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "1ms") // nearly immediate
.put(settings)
.build(),
null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb
}
public void deleteShard(ShardId id) {
public void deleteShard(IndexShard id) {
indexingBuffers.remove(id);
translogBuffers.remove(id);
}
public void assertBuffers(ShardId id, ByteSizeValue indexing, ByteSizeValue translog) {
public void assertBuffers(IndexShard id, ByteSizeValue indexing, ByteSizeValue translog) {
assertThat(indexingBuffers.get(id), equalTo(indexing));
assertThat(translogBuffers.get(id), equalTo(translog));
}
public void assertInActive(ShardId id) {
public void assertInactive(IndexShard id) {
assertThat(indexingBuffers.get(id), equalTo(INACTIVE));
assertThat(translogBuffers.get(id), equalTo(INACTIVE));
}
@ -80,36 +77,31 @@ public class IndexingMemoryControllerTests extends ESTestCase {
}
@Override
protected List<ShardId> availableShards() {
protected List<IndexShard> availableShards() {
return new ArrayList<>(indexingBuffers.keySet());
}
@Override
protected boolean shardAvailable(ShardId shardId) {
return indexingBuffers.containsKey(shardId);
protected boolean shardAvailable(IndexShard shard) {
return indexingBuffers.containsKey(shard);
}
@Override
protected Boolean getShardActive(ShardId shardId) {
return activeShards.contains(shardId);
protected void updateShardBuffers(IndexShard shard, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
indexingBuffers.put(shard, shardIndexingBufferSize);
translogBuffers.put(shard, shardTranslogBufferSize);
}
@Override
protected void updateShardBuffers(ShardId shardId, ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
indexingBuffers.put(shardId, shardIndexingBufferSize);
translogBuffers.put(shardId, shardTranslogBufferSize);
}
@Override
protected Boolean checkIdle(ShardId shardId) {
protected boolean checkIdle(IndexShard shard) {
final TimeValue inactiveTime = settings.getAsTime(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5));
Long ns = lastIndexTimeNanos.get(shardId);
Long ns = lastIndexTimeNanos.get(shard);
if (ns == null) {
return null;
return true;
} else if (currentTimeInNanos() - ns >= inactiveTime.nanos()) {
indexingBuffers.put(shardId, INACTIVE);
translogBuffers.put(shardId, INACTIVE);
activeShards.remove(shardId);
indexingBuffers.put(shard, INACTIVE);
translogBuffers.put(shard, INACTIVE);
activeShards.remove(shard);
return true;
} else {
return false;
@ -120,118 +112,126 @@ public class IndexingMemoryControllerTests extends ESTestCase {
currentTimeSec += sec;
}
public void simulateIndexing(ShardId shardId) {
lastIndexTimeNanos.put(shardId, currentTimeInNanos());
if (indexingBuffers.containsKey(shardId) == false) {
public void simulateIndexing(IndexShard shard) {
lastIndexTimeNanos.put(shard, currentTimeInNanos());
if (indexingBuffers.containsKey(shard) == false) {
// First time we are seeing this shard; start it off with inactive buffers as IndexShard does:
indexingBuffers.put(shardId, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER);
translogBuffers.put(shardId, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
indexingBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER);
translogBuffers.put(shard, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER);
}
activeShards.add(shardId);
activeShards.add(shard);
forceCheck();
}
}
public void testShardAdditionAndRemoval() {
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 3).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService("test");
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build());
final ShardId shard1 = new ShardId("test", 1);
controller.simulateIndexing(shard1);
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build());
IndexShard shard0 = test.getShard(0);
controller.simulateIndexing(shard0);
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
// add another shard
final ShardId shard2 = new ShardId("test", 2);
controller.simulateIndexing(shard2);
IndexShard shard1 = test.getShard(1);
controller.simulateIndexing(shard1);
controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
controller.assertBuffers(shard2, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
// remove first shard
controller.deleteShard(shard1);
controller.deleteShard(shard0);
controller.forceCheck();
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
// remove second shard
controller.deleteShard(shard2);
controller.deleteShard(shard1);
controller.forceCheck();
// add a new one
final ShardId shard3 = new ShardId("test", 3);
controller.simulateIndexing(shard3);
controller.assertBuffers(shard3, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
IndexShard shard2 = test.getShard(2);
controller.simulateIndexing(shard2);
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
}
public void testActiveInactive() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb")
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s")
.build());
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService("test");
final ShardId shard1 = new ShardId("test", 1);
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb")
.put(IndexShard.INDEX_SHARD_INACTIVE_TIME_SETTING, "5s")
.build());
IndexShard shard0 = test.getShard(0);
controller.simulateIndexing(shard0);
IndexShard shard1 = test.getShard(1);
controller.simulateIndexing(shard1);
final ShardId shard2 = new ShardId("test", 2);
controller.simulateIndexing(shard2);
controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
controller.assertBuffers(shard2, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
// index into both shards, move the clock and see that they are still active
controller.simulateIndexing(shard0);
controller.simulateIndexing(shard1);
controller.simulateIndexing(shard2);
controller.incrementTimeSec(10);
controller.forceCheck();
// both shards now inactive
controller.assertInActive(shard1);
controller.assertInActive(shard2);
controller.assertInactive(shard0);
controller.assertInactive(shard1);
// index into one shard only, see it becomes active
controller.simulateIndexing(shard1);
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
controller.assertInActive(shard2);
controller.simulateIndexing(shard0);
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
controller.assertInactive(shard1);
controller.incrementTimeSec(3); // increment but not enough to become inactive
controller.forceCheck();
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
controller.assertInActive(shard2);
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
controller.assertInactive(shard1);
controller.incrementTimeSec(3); // increment some more
controller.forceCheck();
controller.assertInActive(shard1);
controller.assertInActive(shard2);
controller.assertInactive(shard0);
controller.assertInactive(shard1);
// index some and shard becomes immediately active
controller.simulateIndexing(shard2);
controller.assertInActive(shard1);
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
controller.simulateIndexing(shard1);
controller.assertInactive(shard0);
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB));
}
public void testMinShardBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
.put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb")
.put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build());
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
.put(IndexingMemoryController.MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, "6mb")
.put(IndexingMemoryController.MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "40kb").build());
assertTwoActiveShards(controller, new ByteSizeValue(6, ByteSizeUnit.MB), new ByteSizeValue(40, ByteSizeUnit.KB));
}
public void testMaxShardBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
.put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb")
.put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build());
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "50kb")
.put(IndexingMemoryController.MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, "3mb")
.put(IndexingMemoryController.MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, "10kb").build());
assertTwoActiveShards(controller, new ByteSizeValue(3, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.KB));
}
public void testRelativeBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%")
.build());
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "50%")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.5%")
.build());
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(50, ByteSizeUnit.MB)));
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
@ -240,10 +240,10 @@ public class IndexingMemoryControllerTests extends ESTestCase {
public void testMinBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%")
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb")
.put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%")
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb")
.put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
@ -251,23 +251,24 @@ public class IndexingMemoryControllerTests extends ESTestCase {
public void testMaxBufferSizes() {
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%")
.put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb")
.put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "90%")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "90%")
.put(IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, "6mb")
.put(IndexingMemoryController.MAX_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
assertThat(controller.indexingBufferSize(), equalTo(new ByteSizeValue(6, ByteSizeUnit.MB)));
assertThat(controller.translogBufferSize(), equalTo(new ByteSizeValue(512, ByteSizeUnit.KB)));
}
protected void assertTwoActiveShards(MockController controller, ByteSizeValue indexBufferSize, ByteSizeValue translogBufferSize) {
final ShardId shard1 = new ShardId("test", 1);
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService("test");
IndexShard shard0 = test.getShard(0);
controller.simulateIndexing(shard0);
IndexShard shard1 = test.getShard(1);
controller.simulateIndexing(shard1);
final ShardId shard2 = new ShardId("test", 2);
controller.simulateIndexing(shard2);
controller.assertBuffers(shard0, indexBufferSize, translogBufferSize);
controller.assertBuffers(shard1, indexBufferSize, translogBufferSize);
controller.assertBuffers(shard2, indexBufferSize, translogBufferSize);
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.plugins;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -259,14 +259,14 @@ public class PluginInfoTests extends ESTestCase {
}
public void testPluginListSorted() {
PluginsInfo pluginsInfo = new PluginsInfo(5);
pluginsInfo.add(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.add(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.add(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.add(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.add(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
PluginsAndModules pluginsInfo = new PluginsAndModules();
pluginsInfo.addPlugin(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.addPlugin(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.addPlugin(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.addPlugin(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.addPlugin(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
final List<PluginInfo> infos = pluginsInfo.getInfos();
final List<PluginInfo> infos = pluginsInfo.getPluginInfos();
List<String> names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList());
assertThat(names, contains("a", "b", "c", "d", "e"));
}

View File

@ -26,6 +26,8 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.test.ESTestCase;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
public class PluginsServiceTests extends ESTestCase {
@ -81,7 +83,7 @@ public class PluginsServiceTests extends ESTestCase {
}
static PluginsService newPluginsService(Settings settings, Class<? extends Plugin>... classpathPlugins) {
return new PluginsService(settings, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins));
return new PluginsService(settings, null, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins));
}
public void testAdditionalSettings() {
@ -123,4 +125,15 @@ public class PluginsServiceTests extends ESTestCase {
assertEquals("boom", ex.getCause().getCause().getMessage());
}
}
public void testExistingPluginMissingDescriptor() throws Exception {
Path pluginsDir = createTempDir();
Files.createDirectory(pluginsDir.resolve("plugin-missing-descriptor"));
try {
PluginsService.getPluginBundles(pluginsDir);
fail();
} catch (IllegalStateException e) {
assertTrue(e.getMessage(), e.getMessage().contains("Could not load plugin descriptor for existing plugin"));
}
}
}

View File

@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script;
import org.elasticsearch.test.ESTestCase;
import java.security.AllPermission;
import java.security.PermissionCollection;
/** Very simple sanity checks for {@link ClassPermission} */
public class ClassPermissionTests extends ESTestCase {
public void testEquals() {
assertEquals(new ClassPermission("pkg.MyClass"), new ClassPermission("pkg.MyClass"));
assertFalse(new ClassPermission("pkg.MyClass").equals(new AllPermission()));
}
public void testImplies() {
assertTrue(new ClassPermission("pkg.MyClass").implies(new ClassPermission("pkg.MyClass")));
assertFalse(new ClassPermission("pkg.MyClass").implies(new ClassPermission("pkg.MyOtherClass")));
assertFalse(new ClassPermission("pkg.MyClass").implies(null));
assertFalse(new ClassPermission("pkg.MyClass").implies(new AllPermission()));
}
public void testStandard() {
assertTrue(new ClassPermission("<<STANDARD>>").implies(new ClassPermission("java.lang.Math")));
assertFalse(new ClassPermission("<<STANDARD>>").implies(new ClassPermission("pkg.MyClass")));
}
public void testPermissionCollection() {
ClassPermission math = new ClassPermission("java.lang.Math");
PermissionCollection collection = math.newPermissionCollection();
collection.add(math);
assertTrue(collection.implies(new ClassPermission("java.lang.Math")));
assertFalse(collection.implies(new ClassPermission("pkg.MyClass")));
}
public void testPermissionCollectionStandard() {
ClassPermission standard = new ClassPermission("<<STANDARD>>");
PermissionCollection collection = standard.newPermissionCollection();
collection.add(standard);
assertTrue(collection.implies(new ClassPermission("java.lang.Math")));
assertFalse(collection.implies(new ClassPermission("pkg.MyClass")));
}
/** not recommended but we test anyway */
public void testWildcards() {
assertTrue(new ClassPermission("*").implies(new ClassPermission("pkg.MyClass")));
assertTrue(new ClassPermission("pkg.*").implies(new ClassPermission("pkg.MyClass")));
assertTrue(new ClassPermission("pkg.*").implies(new ClassPermission("pkg.sub.MyClass")));
assertFalse(new ClassPermission("pkg.My*").implies(new ClassPermission("pkg.MyClass")));
assertFalse(new ClassPermission("pkg*").implies(new ClassPermission("pkg.MyClass")));
}
public void testPermissionCollectionWildcards() {
ClassPermission lang = new ClassPermission("java.lang.*");
PermissionCollection collection = lang.newPermissionCollection();
collection.add(lang);
assertTrue(collection.implies(new ClassPermission("java.lang.Math")));
assertFalse(collection.implies(new ClassPermission("pkg.MyClass")));
}
}

View File

@ -39,20 +39,61 @@ buildscript {
}
}
allprojects {
project.ext {
// this is common configuration for distributions, but we also add it here for the license check to use
dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
// this is common configuration for distributions, but we also add it here for the license check to use
ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
/*****************************************************************************
* Modules *
*****************************************************************************/
task buildModules(type: Copy) {
into 'build/modules'
}
ext.restTestExpansions = [
'expected.modules.count': 0,
]
// we create the buildModules task above so the distribution subprojects can
// depend on it, but we don't actually configure it until projects are evaluated
// so it can depend on the bundling of plugins (ie modules must have been configured)
project.gradle.projectsEvaluated {
project.rootProject.subprojects.findAll { it.path.startsWith(':modules:') }.each { Project module ->
buildModules {
dependsOn module.bundlePlugin
into(module.name) {
from { zipTree(module.bundlePlugin.outputs.files.singleFile) }
}
}
configure(subprojects.findAll { it.name != 'integ-test-zip' }) { Project distribution ->
distribution.integTest.mustRunAfter(module.integTest)
}
restTestExpansions['expected.modules.count'] += 1
}
}
// make sure we have a clean task since we aren't a java project, but we have tasks that
// put stuff in the build dir
task clean(type: Delete) {
delete 'build'
}
subprojects {
/*****************************************************************************
* Rest test config *
*****************************************************************************/
apply plugin: 'elasticsearch.rest-test'
integTest {
includePackaged true
project.integTest {
dependsOn(project.assemble)
includePackaged project.name == 'integ-test-zip'
cluster {
distribution = project.name
}
}
processTestResources {
inputs.properties(project(':distribution').restTestExpansions)
MavenFilteringHack.filter(it, project(':distribution').restTestExpansions)
}
/*****************************************************************************
@ -81,7 +122,12 @@ subprojects {
libFiles = copySpec {
into 'lib'
from project(':core').jar
from dependencyFiles
from project(':distribution').dependencyFiles
}
modulesFiles = copySpec {
into 'modules'
from project(':distribution').buildModules
}
configFiles = copySpec {
@ -103,7 +149,7 @@ subprojects {
/*****************************************************************************
* Zip and tgz configuration *
*****************************************************************************/
configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
configure(subprojects.findAll { ['zip', 'tar', 'integ-test-zip'].contains(it.name) }) {
project.ext.archivesFiles = copySpec {
into("elasticsearch-${version}") {
with libFiles
@ -121,6 +167,9 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
from('../src/main/resources') {
include 'bin/*.exe'
}
if (project.name != 'integ-test-zip') {
with modulesFiles
}
}
}
}
@ -143,7 +192,7 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
* directly from the filesystem. It doesn't want to process them through
* MavenFilteringHack or any other copy-style action.
*/
configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) {
integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false
File packagingFiles = new File(buildDir, 'packaging')
project.ext.packagingFiles = packagingFiles
@ -233,6 +282,7 @@ configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
user 'root'
permissionGroup 'root'
with libFiles
with modulesFiles
with copySpec {
with commonFiles
if (project.name == 'deb') {
@ -305,7 +355,7 @@ task updateShas(type: UpdateShasTask) {
parentTask = dependencyLicenses
}
RunTask.configure(project)
task run(type: RunTask) {}
/**
* Build some variables that are replaced in the packages. This includes both

View File

@ -18,7 +18,7 @@
*/
task buildDeb(type: Deb) {
dependsOn dependencyFiles, preparePackagingFiles
dependsOn preparePackagingFiles
baseName 'elasticsearch' // this is what pom generation uses for artifactId
// Follow elasticsearch's deb file naming convention
archiveName "${packageName}-${project.version}.deb"
@ -44,6 +44,4 @@ integTest {
skip the test if they aren't around. */
enabled = new File('/usr/bin/dpkg-deb').exists() || // Standard location
new File('/usr/local/bin/dpkg-deb').exists() // Homebrew location
dependsOn buildDeb
clusterConfig.distribution = 'deb'
}

View File

@ -0,0 +1,13 @@
# Integration tests for distributions with modules
#
"Correct Modules Count":
- do:
cluster.state: {}
# Get master node id
- set: { master_node: master }
- do:
nodes.info: {}
- length: { nodes.$master.plugins: ${expected.modules.count} }

View File

@ -17,24 +17,15 @@
* under the License.
*/
package org.elasticsearch.script.expression;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptModule;
public class ExpressionPlugin extends Plugin {
@Override
public String name() {
return "lang-expression";
}
@Override
public String description() {
return "Lucene expressions integration for Elasticsearch";
}
public void onModule(ScriptModule module) {
module.addScriptEngine(ExpressionScriptEngineService.class);
}
task buildZip(type: Zip) {
baseName = 'elasticsearch'
with archivesFiles
}
artifacts {
'default' buildZip
archives buildZip
}
integTest.dependsOn buildZip

View File

@ -19,20 +19,20 @@
package org.elasticsearch.test.rest;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.parser.RestTestParseException;
import java.io.IOException;
/** Rest API tests subset 3 */
public class Rest3IT extends ESRestTestCase {
public Rest3IT(@Name("yaml") RestTestCandidate testCandidate) {
/** Rest integration test. runs against external cluster in 'mvn verify' */
public class RestIT extends ESRestTestCase {
public RestIT(RestTestCandidate testCandidate) {
super(testCandidate);
}
// we run them all sequentially: start simple!
@ParametersFactory
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
return createParameters(3, 8);
return createParameters(0, 1);
}
}

View File

@ -42,6 +42,4 @@ integTest {
enabled = new File('/bin/rpm').exists() || // Standard location
new File('/usr/bin/rpm').exists() || // Debian location
new File('/usr/local/bin/rpm').exists() // Homebrew location
dependsOn buildRpm
clusterConfig.distribution = 'rpm'
}

View File

@ -0,0 +1,13 @@
# Integration tests for distributions with modules
#
"Correct Modules Count":
- do:
cluster.state: {}
# Get master node id
- set: { master_node: master }
- do:
nodes.info: {}
- length: { nodes.$master.plugins: ${expected.modules.count} }

View File

@ -17,7 +17,7 @@
* under the License.
*/
task buildTar(type: Tar, dependsOn: dependencyFiles) {
task buildTar(type: Tar) {
baseName = 'elasticsearch'
extension = 'tar.gz'
with archivesFiles
@ -28,8 +28,3 @@ artifacts {
'default' buildTar
archives buildTar
}
integTest {
dependsOn buildTar
clusterConfig.distribution = 'tar'
}

View File

@ -0,0 +1,13 @@
# Integration tests for distributions with modules
#
"Correct Modules Count":
- do:
cluster.state: {}
# Get master node id
- set: { master_node: master }
- do:
nodes.info: {}
- length: { nodes.$master.plugins: ${expected.modules.count} }

View File

@ -17,7 +17,7 @@
* under the License.
*/
task buildZip(type: Zip, dependsOn: dependencyFiles) {
task buildZip(type: Zip) {
baseName = 'elasticsearch'
with archivesFiles
}

View File

@ -0,0 +1,13 @@
# Integration tests for distributions with modules
#
"Correct Modules Count":
- do:
cluster.state: {}
# Get master node id
- set: { master_node: master }
- do:
nodes.info: {}
- length: { nodes.$master.plugins: ${expected.modules.count} }

View File

@ -90,6 +90,9 @@ The search exists api has been removed in favour of using the search api with
The deprecated `/_optimize` endpoint has been removed. The `/_forcemerge`
endpoint should be used in lieu of optimize.
The `GET` HTTP verb for `/_forcemerge` is no longer supported, please use the
`POST` HTTP verb.
==== Deprecated queries removed
The following deprecated queries have been removed:

View File

@ -60,7 +60,7 @@ default.
It is a good idea to place these directories in a different location so that
there is no chance of deleting them when upgrading Elasticsearch. These
custom paths can be <<paths,configured>> with the `path.config` and
custom paths can be <<paths,configured>> with the `path.conf` and
`path.data` settings.
The Debian and RPM packages place these directories in the
@ -80,7 +80,7 @@ To upgrade using a zip or compressed tarball:
overwrite the `config` or `data` directories.
* Either copy the files in the `config` directory from your old installation
to your new installation, or use the `--path.config` option on the command
to your new installation, or use the `--path.conf` option on the command
line to point to an external config directory.
* Either copy the files in the `data` directory from your old installation

View File

@ -21,12 +21,10 @@ consult this table:
[cols="1<m,1<m,3",options="header",]
|=======================================================================
|Upgrade From |Upgrade To |Supported Upgrade Type
|0.90.x |1.x, 2.x |<<restart-upgrade,Full cluster restart>>
|< 0.90.7 |0.90.x |<<restart-upgrade,Full cluster restart>>
|>= 0.90.7 |0.90.x |<<rolling-upgrades,Rolling upgrade>>
|1.0.0 - 1.3.1 |1.x |<<rolling-upgrades,Rolling upgrade>> (if <<recovery,`indices.recovery.compress`>> set to `false`)
|>= 1.3.2 |1.x |<<rolling-upgrades,Rolling upgrade>>
|0.90.x |2.x |<<restart-upgrade,Full cluster restart>>
|1.x |2.x |<<restart-upgrade,Full cluster restart>>
|2.x |2.y |<<rolling-upgrades,Rolling upgrade>> (where `y > x `)
|2.x |3.x |<<restart-upgrade,Full cluster restart>>
|=======================================================================
TIP: Take plugins into consideration as well when upgrading. Most plugins will have to be upgraded alongside Elasticsearch, although some plugins accessed primarily through the browser (`_site` plugins) may continue to work given that API changes are compatible.

46
modules/build.gradle Normal file
View File

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
subprojects {
apply plugin: 'elasticsearch.esplugin'
esplugin {
// for local ES plugins, the name of the plugin is the same as the directory
name project.name
}
if (project.file('src/main/packaging').exists()) {
throw new InvalidModelException("Modules cannot contain packaging files")
}
if (project.file('src/main/bin').exists()) {
throw new InvalidModelException("Modules cannot contain bin files")
}
if (project.file('src/main/config').exists()) {
throw new InvalidModelException("Modules cannot contain config files")
}
project.afterEvaluate {
if (esplugin.isolated == false) {
throw new InvalidModelException("Modules cannot disable isolation")
}
if (esplugin.jvm == false) {
throw new InvalidModelException("Modules must be jvm plugins")
}
}
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script.expression;
import org.apache.lucene.expressions.js.JavascriptCompiler;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptModule;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.text.ParseException;
public class ExpressionPlugin extends Plugin {
// lucene expressions has crazy checks in its clinit for the functions map
// it violates rules of classloaders to detect accessibility
// TODO: clean that up
static {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(new SpecialPermission());
}
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
try {
JavascriptCompiler.compile("0");
} catch (ParseException e) {
throw new RuntimeException(e);
}
return null;
}
});
}
@Override
public String name() {
return "lang-expression";
}
@Override
public String description() {
return "Lucene expressions integration for Elasticsearch";
}
public void onModule(ScriptModule module) {
module.addScriptEngine(ExpressionScriptEngineService.class);
}
}

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.script.ClassPermission;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptEngineService;
@ -44,6 +45,7 @@ import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.lookup.SearchLookup;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.text.ParseException;
@ -95,7 +97,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
@Override
public Object compile(String script) {
// classloader created here
SecurityManager sm = System.getSecurityManager();
final SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(new SpecialPermission());
}
@ -103,8 +105,24 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
@Override
public Expression run() {
try {
// snapshot our context here, we check on behalf of the expression
AccessControlContext engineContext = AccessController.getContext();
ClassLoader loader = getClass().getClassLoader();
if (sm != null) {
loader = new ClassLoader(loader) {
@Override
protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
try {
engineContext.checkPermission(new ClassPermission(name));
} catch (SecurityException e) {
throw new ClassNotFoundException(name, e);
}
return super.loadClass(name, resolve);
}
};
}
// NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here
return JavascriptCompiler.compile(script);
return JavascriptCompiler.compile(script, JavascriptCompiler.DEFAULT_FUNCTIONS, loader);
} catch (ParseException e) {
throw new ScriptException("Failed to parse expression: " + script, e);
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
grant {
// needed to generate runtime classes
permission java.lang.RuntimePermission "createClassLoader";
// needed because of security problems in JavascriptCompiler
permission java.lang.RuntimePermission "getClassLoader";
// expression runtime
permission org.elasticsearch.script.ClassPermission "java.lang.String";
permission org.elasticsearch.script.ClassPermission "org.apache.lucene.expressions.Expression";
permission org.elasticsearch.script.ClassPermission "org.apache.lucene.queries.function.FunctionValues";
// available functions
permission org.elasticsearch.script.ClassPermission "java.lang.Math";
permission org.elasticsearch.script.ClassPermission "org.apache.lucene.util.MathUtil";
permission org.elasticsearch.script.ClassPermission "org.apache.lucene.util.SloppyMath";
};

Some files were not shown because too many files have changed in this diff Show More