Merge pull request #15233 from rjernst/jigsaw

Add modules to distributions, and move lang-expression and lang-groovy to them
This commit is contained in:
Ryan Ernst 2015-12-04 11:44:27 -08:00
commit 70107c5c3c
136 changed files with 513 additions and 236 deletions

View File

@ -109,6 +109,7 @@ subprojects {
"org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec',
"org.elasticsearch:elasticsearch:${version}": ':core',
"org.elasticsearch:test-framework:${version}": ':test-framework',
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip',
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip',
"org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar',
"org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm',

View File

@ -41,10 +41,16 @@ public class PluginBuildPlugin extends BuildPlugin {
String name = project.pluginProperties.extension.name
project.jar.baseName = name
project.bundlePlugin.baseName = name
project.integTest.dependsOn(project.bundlePlugin)
project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
project.tasks.run.dependsOn(project.bundlePlugin)
project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
if (project.path.startsWith(':modules:')) {
project.integTest.clusterConfig.module(project)
project.tasks.run.clusterConfig.module(project)
} else {
project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
}
}
createIntegTestTask(project)
createBundleTask(project)

View File

@ -27,7 +27,7 @@ import org.gradle.api.tasks.Input
class ClusterConfiguration {
@Input
String distribution = 'zip'
String distribution = 'integ-test-zip'
@Input
int numNodes = 1
@ -71,6 +71,8 @@ class ClusterConfiguration {
LinkedHashMap<String, Object> plugins = new LinkedHashMap<>()
List<Project> modules = new ArrayList<>()
LinkedHashMap<String, Object[]> setupCommands = new LinkedHashMap<>()
@Input
@ -93,6 +95,12 @@ class ClusterConfiguration {
plugins.put(name, pluginProject)
}
/** Add a module to the cluster. The project must be an esplugin and have a single zip default artifact. */
@Input
void module(Project moduleProject) {
modules.add(moduleProject)
}
@Input
void setupCommand(String name, Object... args) {
setupCommands.put(name, args)

View File

@ -60,7 +60,12 @@ class ClusterFormationTasks {
/** Adds a dependency on the given distribution */
static void configureDistributionDependency(Project project, String distro) {
String elasticsearchVersion = VersionProperties.elasticsearch
String packaging = distro == 'tar' ? 'tar.gz' : distro
String packaging = distro
if (distro == 'tar') {
packaging = 'tar.gz'
} else if (distro == 'integ-test-zip') {
packaging = 'zip'
}
project.configurations {
elasticsearchDistro
}
@ -103,6 +108,12 @@ class ClusterFormationTasks {
setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
// install modules
for (Project module : node.config.modules) {
String actionName = pluginTaskName('install', module.name, 'Module')
setup = configureInstallModuleTask(taskName(task, node, actionName), project, setup, node, module)
}
// install plugins
for (Map.Entry<String, Object> plugin : node.config.plugins.entrySet()) {
String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin')
@ -138,6 +149,7 @@ class ClusterFormationTasks {
by the source tree. If it isn't then Bad Things(TM) will happen. */
Task extract
switch (node.config.distribution) {
case 'integ-test-zip':
case 'zip':
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) }
@ -286,6 +298,20 @@ class ClusterFormationTasks {
return copyPlugins
}
static Task configureInstallModuleTask(String name, Project project, Task setup, NodeInfo node, Project module) {
if (node.config.distribution != 'integ-test-zip') {
throw new GradleException("Module ${module.path} not allowed be installed distributions other than integ-test-zip because they should already have all modules bundled!")
}
if (module.plugins.hasPlugin(PluginBuildPlugin) == false) {
throw new GradleException("Task ${name} cannot include module ${module.path} which is not an esplugin")
}
Copy installModule = project.tasks.create(name, Copy.class)
installModule.dependsOn(setup)
installModule.into(new File(node.homeDir, "modules/${module.name}"))
installModule.from({ project.zipTree(module.tasks.bundlePlugin.outputs.files.singleFile) })
return installModule
}
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Object plugin) {
FileCollection pluginZip
if (plugin instanceof Project) {

View File

@ -173,6 +173,7 @@ class NodeInfo {
static File homeDir(File baseDir, String distro) {
String path
switch (distro) {
case 'integ-test-zip':
case 'zip':
case 'tar':
path = "elasticsearch-${VersionProperties.elasticsearch}"
@ -188,8 +189,8 @@ class NodeInfo {
}
static File confDir(File baseDir, String distro) {
String Path
switch (distro) {
case 'integ-test-zip':
case 'zip':
case 'tar':
return new File(homeDir(baseDir, distro), 'config')

View File

@ -29,6 +29,7 @@ public class RestTestPlugin implements Plugin<Project> {
project.pluginManager.apply(StandaloneTestBasePlugin)
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
integTest.cluster.distribution = 'zip' // rest tests should run with the real zip
integTest.mustRunAfter(project.precommit)
project.check.dependsOn(integTest)
}

View File

@ -72,14 +72,14 @@ public class NodeInfo extends BaseNodeResponse {
private HttpInfo http;
@Nullable
private PluginsInfo plugins;
private PluginsAndModules plugins;
NodeInfo() {
}
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map<String, String> serviceAttributes, @Nullable Settings settings,
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool,
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsInfo plugins) {
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins) {
super(node);
this.version = version;
this.build = build;
@ -172,7 +172,7 @@ public class NodeInfo extends BaseNodeResponse {
}
@Nullable
public PluginsInfo getPlugins() {
public PluginsAndModules getPlugins() {
return this.plugins;
}
@ -217,7 +217,8 @@ public class NodeInfo extends BaseNodeResponse {
http = HttpInfo.readHttpInfo(in);
}
if (in.readBoolean()) {
plugins = PluginsInfo.readPluginsInfo(in);
plugins = new PluginsAndModules();
plugins.readFrom(in);
}
}

View File

@ -0,0 +1,115 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.node.info;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.PluginInfo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Information about plugins and modules
*/
public class PluginsAndModules implements Streamable, ToXContent {
private List<PluginInfo> plugins;
private List<PluginInfo> modules;
public PluginsAndModules() {
plugins = new ArrayList<>();
modules = new ArrayList<>();
}
/**
* Returns an ordered list based on plugins name
*/
public List<PluginInfo> getPluginInfos() {
List<PluginInfo> plugins = new ArrayList<>(this.plugins);
Collections.sort(plugins, (p1, p2) -> p1.getName().compareTo(p2.getName()));
return plugins;
}
/**
* Returns an ordered list based on modules name
*/
public List<PluginInfo> getModuleInfos() {
List<PluginInfo> modules = new ArrayList<>(this.modules);
Collections.sort(modules, (p1, p2) -> p1.getName().compareTo(p2.getName()));
return modules;
}
public void addPlugin(PluginInfo info) {
plugins.add(info);
}
public void addModule(PluginInfo info) {
modules.add(info);
}
@Override
public void readFrom(StreamInput in) throws IOException {
if (plugins.isEmpty() == false || modules.isEmpty() == false) {
throw new IllegalStateException("instance is already populated");
}
int plugins_size = in.readInt();
for (int i = 0; i < plugins_size; i++) {
plugins.add(PluginInfo.readFromStream(in));
}
int modules_size = in.readInt();
for (int i = 0; i < modules_size; i++) {
modules.add(PluginInfo.readFromStream(in));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeInt(plugins.size());
for (PluginInfo plugin : getPluginInfos()) {
plugin.writeTo(out);
}
out.writeInt(modules.size());
for (PluginInfo module : getModuleInfos()) {
module.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray("plugins");
for (PluginInfo pluginInfo : getPluginInfos()) {
pluginInfo.toXContent(builder, params);
}
builder.endArray();
// TODO: not ideal, make a better api for this (e.g. with jar metadata, and so on)
builder.startArray("modules");
for (PluginInfo moduleInfo : getModuleInfos()) {
moduleInfo.toXContent(builder, params);
}
builder.endArray();
return builder;
}
}

View File

@ -1,101 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.node.info;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.plugins.PluginInfo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class PluginsInfo implements Streamable, ToXContent {
static final class Fields {
static final XContentBuilderString PLUGINS = new XContentBuilderString("plugins");
}
private List<PluginInfo> infos;
public PluginsInfo() {
infos = new ArrayList<>();
}
public PluginsInfo(int size) {
infos = new ArrayList<>(size);
}
/**
* @return an ordered list based on plugins name
*/
public List<PluginInfo> getInfos() {
Collections.sort(infos, new Comparator<PluginInfo>() {
@Override
public int compare(final PluginInfo o1, final PluginInfo o2) {
return o1.getName().compareTo(o2.getName());
}
});
return infos;
}
public void add(PluginInfo info) {
infos.add(info);
}
public static PluginsInfo readPluginsInfo(StreamInput in) throws IOException {
PluginsInfo infos = new PluginsInfo();
infos.readFrom(in);
return infos;
}
@Override
public void readFrom(StreamInput in) throws IOException {
int plugins_size = in.readInt();
for (int i = 0; i < plugins_size; i++) {
infos.add(PluginInfo.readFromStream(in));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeInt(infos.size());
for (PluginInfo plugin : getInfos()) {
plugin.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray(Fields.PLUGINS);
for (PluginInfo pluginInfo : getInfos()) {
pluginInfo.toXContent(builder, params);
}
builder.endArray();
return builder;
}
}

View File

@ -74,7 +74,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
versions.add(nodeResponse.nodeInfo().getVersion());
process.addNodeStats(nodeResponse.nodeStats());
jvm.addNodeInfoStats(nodeResponse.nodeInfo(), nodeResponse.nodeStats());
plugins.addAll(nodeResponse.nodeInfo().getPlugins().getInfos());
plugins.addAll(nodeResponse.nodeInfo().getPlugins().getPluginInfos());
// now do the stats that should be deduped by hardware (implemented by ip deduping)
TransportAddress publishAddress = nodeResponse.nodeInfo().getTransport().address().publishAddress();

View File

@ -131,34 +131,48 @@ final class Security {
@SuppressForbidden(reason = "proper use of URL")
static Map<String,Policy> getPluginPermissions(Environment environment) throws IOException, NoSuchAlgorithmException {
Map<String,Policy> map = new HashMap<>();
// collect up lists of plugins and modules
List<Path> pluginsAndModules = new ArrayList<>();
if (Files.exists(environment.pluginsFile())) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
for (Path plugin : stream) {
Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
if (Files.exists(policyFile)) {
// first get a list of URLs for the plugins' jars:
// we resolve symlinks so map is keyed on the normalize codebase name
List<URL> codebases = new ArrayList<>();
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
for (Path jar : jarStream) {
codebases.add(jar.toRealPath().toUri().toURL());
}
}
// parse the plugin's policy file into a set of permissions
Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
// consult this policy for each of the plugin's jars:
for (URL url : codebases) {
if (map.put(url.getFile(), policy) != null) {
// just be paranoid ok?
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
}
}
pluginsAndModules.add(plugin);
}
}
}
if (Files.exists(environment.modulesFile())) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.modulesFile())) {
for (Path plugin : stream) {
pluginsAndModules.add(plugin);
}
}
}
// now process each one
for (Path plugin : pluginsAndModules) {
Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
if (Files.exists(policyFile)) {
// first get a list of URLs for the plugins' jars:
// we resolve symlinks so map is keyed on the normalize codebase name
List<URL> codebases = new ArrayList<>();
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
for (Path jar : jarStream) {
codebases.add(jar.toRealPath().toUri().toURL());
}
}
// parse the plugin's policy file into a set of permissions
Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
// consult this policy for each of the plugin's jars:
for (URL url : codebases) {
if (map.put(url.getFile(), policy) != null) {
// just be paranoid ok?
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
}
}
}
}
return Collections.unmodifiableMap(map);
}
@ -228,6 +242,7 @@ final class Security {
// read-only dirs
addPath(policy, "path.home", environment.binFile(), "read,readlink");
addPath(policy, "path.home", environment.libFile(), "read,readlink");
addPath(policy, "path.home", environment.modulesFile(), "read,readlink");
addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink");
addPath(policy, "path.conf", environment.configFile(), "read,readlink");
addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink");

View File

@ -125,7 +125,7 @@ public class TransportClient extends AbstractClient {
.put(CLIENT_TYPE_SETTING, CLIENT_TYPE)
.build();
PluginsService pluginsService = new PluginsService(settings, null, pluginClasses);
PluginsService pluginsService = new PluginsService(settings, null, null, pluginClasses);
this.settings = pluginsService.updatedSettings();
Version version = Version.CURRENT;

View File

@ -58,6 +58,8 @@ public class Environment {
private final Path pluginsFile;
private final Path modulesFile;
private final Path sharedDataFile;
/** location of bin/, used by plugin manager */
@ -157,6 +159,7 @@ public class Environment {
binFile = homeFile.resolve("bin");
libFile = homeFile.resolve("lib");
modulesFile = homeFile.resolve("modules");
}
/**
@ -275,6 +278,10 @@ public class Environment {
return libFile;
}
public Path modulesFile() {
return modulesFile;
}
public Path logsFile() {
return logsFile;
}

View File

@ -147,7 +147,7 @@ public class Node implements Releasable {
tmpEnv.configFile(), Arrays.toString(tmpEnv.dataFiles()), tmpEnv.logsFile(), tmpEnv.pluginsFile());
}
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.pluginsFile(), classpathPlugins);
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.modulesFile(), tmpEnv.pluginsFile(), classpathPlugins);
this.settings = pluginsService.updatedSettings();
// create the environment based on the finalized (processed) view of the settings
this.environment = new Environment(this.settings());

View File

@ -71,7 +71,7 @@ public abstract class Plugin {
}
/**
* Called before a new index is created on a node. The given module can be used to regsiter index-leve
* Called before a new index is created on a node. The given module can be used to register index-level
* extensions.
*/
public void onIndexModule(IndexModule indexModule) {}

View File

@ -66,6 +66,10 @@ public class PluginManager {
"plugin",
"plugin.bat",
"service.bat"));
static final Set<String> MODULES = unmodifiableSet(newHashSet(
"lang-expression",
"lang-groovy"));
static final Set<String> OFFICIAL_PLUGINS = unmodifiableSet(newHashSet(
"analysis-icu",
@ -78,8 +82,6 @@ public class PluginManager {
"discovery-ec2",
"discovery-gce",
"discovery-multicast",
"lang-expression",
"lang-groovy",
"lang-javascript",
"lang-python",
"mapper-attachments",
@ -221,6 +223,12 @@ public class PluginManager {
PluginInfo info = PluginInfo.readFromProperties(root);
terminal.println(VERBOSE, "%s", info);
// don't let luser install plugin as a module...
// they might be unavoidably in maven central and are packaged up the same way)
if (MODULES.contains(info.getName())) {
throw new IOException("plugin '" + info.getName() + "' cannot be installed like this, it is a system module");
}
// update name in handle based on 'name' property found in descriptor file
pluginHandle = new PluginHandle(info.getName(), pluginHandle.version, pluginHandle.user);
final Path extractLocation = pluginHandle.extractedDir(environment);

View File

@ -25,9 +25,8 @@ import org.apache.lucene.analysis.util.TokenizerFactory;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
import org.elasticsearch.bootstrap.JarHell;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
@ -39,10 +38,7 @@ import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexEventListener;
import java.io.Closeable;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@ -69,10 +65,10 @@ import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory;
public class PluginsService extends AbstractComponent {
/**
* We keep around a list of plugins
* We keep around a list of plugins and modules
*/
private final List<Tuple<PluginInfo, Plugin>> plugins;
private final PluginsInfo info;
private final PluginsAndModules info;
private final Map<Plugin, List<OnModuleReference>> onModuleReferences;
@ -89,13 +85,15 @@ public class PluginsService extends AbstractComponent {
/**
* Constructs a new PluginService
* @param settings The settings of the system
* @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem
* @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem
* @param classpathPlugins Plugins that exist in the classpath which should be loaded
*/
public PluginsService(Settings settings, Path pluginsDirectory, Collection<Class<? extends Plugin>> classpathPlugins) {
public PluginsService(Settings settings, Path modulesDirectory, Path pluginsDirectory, Collection<Class<? extends Plugin>> classpathPlugins) {
super(settings);
info = new PluginsAndModules();
List<Tuple<PluginInfo, Plugin>> tupleBuilder = new ArrayList<>();
List<Tuple<PluginInfo, Plugin>> pluginsLoaded = new ArrayList<>();
// first we load plugins that are on the classpath. this is for tests and transport clients
for (Class<? extends Plugin> pluginClass : classpathPlugins) {
@ -104,24 +102,39 @@ public class PluginsService extends AbstractComponent {
if (logger.isTraceEnabled()) {
logger.trace("plugin loaded from classpath [{}]", pluginInfo);
}
tupleBuilder.add(new Tuple<>(pluginInfo, plugin));
pluginsLoaded.add(new Tuple<>(pluginInfo, plugin));
info.addPlugin(pluginInfo);
}
// load modules
if (modulesDirectory != null) {
try {
List<Bundle> bundles = getModuleBundles(modulesDirectory);
List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles);
pluginsLoaded.addAll(loaded);
for (Tuple<PluginInfo, Plugin> module : loaded) {
info.addModule(module.v1());
}
} catch (IOException ex) {
throw new IllegalStateException("Unable to initialize modules", ex);
}
}
// now, find all the ones that are in plugins/
if (pluginsDirectory != null) {
try {
List<Bundle> bundles = getPluginBundles(pluginsDirectory);
tupleBuilder.addAll(loadBundles(bundles));
List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles);
pluginsLoaded.addAll(loaded);
for (Tuple<PluginInfo, Plugin> plugin : loaded) {
info.addPlugin(plugin.v1());
}
} catch (IOException ex) {
throw new IllegalStateException("Unable to initialize plugins", ex);
}
}
plugins = Collections.unmodifiableList(tupleBuilder);
info = new PluginsInfo();
for (Tuple<PluginInfo, Plugin> tuple : plugins) {
info.add(tuple.v1());
}
plugins = Collections.unmodifiableList(pluginsLoaded);
// We need to build a List of jvm and site plugins for checking mandatory plugins
Map<String, Plugin> jvmPlugins = new HashMap<>();
@ -151,7 +164,18 @@ public class PluginsService extends AbstractComponent {
}
}
logger.info("loaded {}, sites {}", jvmPlugins.keySet(), sitePlugins);
// we don't log jars in lib/ we really shouldnt log modules,
// but for now: just be transparent so we can debug any potential issues
Set<String> moduleNames = new HashSet<>();
Set<String> jvmPluginNames = new HashSet<>();
for (PluginInfo moduleInfo : info.getModuleInfos()) {
moduleNames.add(moduleInfo.getName());
}
for (PluginInfo pluginInfo : info.getPluginInfos()) {
jvmPluginNames.add(pluginInfo.getName());
}
logger.info("modules {}, plugins {}, sites {}", moduleNames, jvmPluginNames, sitePlugins);
Map<Plugin, List<OnModuleReference>> onModuleReferences = new HashMap<>();
for (Plugin plugin : jvmPlugins.values()) {
@ -160,6 +184,10 @@ public class PluginsService extends AbstractComponent {
if (!method.getName().equals("onModule")) {
continue;
}
// this is a deprecated final method, so all Plugin subclasses have it
if (method.getParameterTypes().length == 1 && method.getParameterTypes()[0].equals(IndexModule.class)) {
continue;
}
if (method.getParameterTypes().length == 0 || method.getParameterTypes().length > 1) {
logger.warn("Plugin: {} implementing onModule with no parameters or more than one parameter", plugin.name());
continue;
@ -178,7 +206,7 @@ public class PluginsService extends AbstractComponent {
this.onModuleReferences = Collections.unmodifiableMap(onModuleReferences);
}
public List<Tuple<PluginInfo, Plugin>> plugins() {
private List<Tuple<PluginInfo, Plugin>> plugins() {
return plugins;
}
@ -249,9 +277,9 @@ public class PluginsService extends AbstractComponent {
}
}
/**
* Get information about plugins (jvm and site plugins).
* Get information about plugins and modules
*/
public PluginsInfo info() {
public PluginsAndModules info() {
return info;
}
@ -262,6 +290,40 @@ public class PluginsService extends AbstractComponent {
List<URL> urls = new ArrayList<>();
}
// similar in impl to getPluginBundles, but DO NOT try to make them share code.
// we don't need to inherit all the leniency, and things are different enough.
static List<Bundle> getModuleBundles(Path modulesDirectory) throws IOException {
// damn leniency
if (Files.notExists(modulesDirectory)) {
return Collections.emptyList();
}
List<Bundle> bundles = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(modulesDirectory)) {
for (Path module : stream) {
if (FileSystemUtils.isHidden(module)) {
continue; // skip over .DS_Store etc
}
PluginInfo info = PluginInfo.readFromProperties(module);
if (!info.isJvm()) {
throw new IllegalStateException("modules must be jvm plugins: " + info);
}
if (!info.isIsolated()) {
throw new IllegalStateException("modules must be isolated: " + info);
}
Bundle bundle = new Bundle();
bundle.plugins.add(info);
// gather urls for jar files
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(module, "*.jar")) {
for (Path jar : jarStream) {
bundle.urls.add(jar.toUri().toURL());
}
}
bundles.add(bundle);
}
}
return bundles;
}
static List<Bundle> getPluginBundles(Path pluginsDirectory) throws IOException {
ESLogger logger = Loggers.getLogger(PluginsService.class);

View File

@ -95,7 +95,7 @@ public class RestPluginsAction extends AbstractCatAction {
for (DiscoveryNode node : nodes) {
NodeInfo info = nodesInfo.getNodesMap().get(node.id());
for (PluginInfo pluginInfo : info.getPlugins().getInfos()) {
for (PluginInfo pluginInfo : info.getPlugins().getPluginInfos()) {
table.startRow();
table.addCell(node.id());
table.addCell(node.name());

View File

@ -43,8 +43,6 @@ OFFICIAL PLUGINS
- discovery-ec2
- discovery-gce
- discovery-multicast
- lang-expression
- lang-groovy
- lang-javascript
- lang-python
- mapper-attachments

View File

@ -20,7 +20,7 @@
package org.elasticsearch.plugins;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -259,14 +259,14 @@ public class PluginInfoTests extends ESTestCase {
}
public void testPluginListSorted() {
PluginsInfo pluginsInfo = new PluginsInfo(5);
pluginsInfo.add(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.add(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.add(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.add(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.add(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
PluginsAndModules pluginsInfo = new PluginsAndModules();
pluginsInfo.addPlugin(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.addPlugin(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.addPlugin(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.addPlugin(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true));
pluginsInfo.addPlugin(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
final List<PluginInfo> infos = pluginsInfo.getInfos();
final List<PluginInfo> infos = pluginsInfo.getPluginInfos();
List<String> names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList());
assertThat(names, contains("a", "b", "c", "d", "e"));
}

View File

@ -81,7 +81,7 @@ public class PluginsServiceTests extends ESTestCase {
}
static PluginsService newPluginsService(Settings settings, Class<? extends Plugin>... classpathPlugins) {
return new PluginsService(settings, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins));
return new PluginsService(settings, null, new Environment(settings).pluginsFile(), Arrays.asList(classpathPlugins));
}
public void testAdditionalSettings() {

View File

@ -39,20 +39,51 @@ buildscript {
}
}
allprojects {
project.ext {
// this is common configuration for distributions, but we also add it here for the license check to use
dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
// this is common configuration for distributions, but we also add it here for the license check to use
ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
/*****************************************************************************
* Modules *
*****************************************************************************/
task buildModules(type: Copy) {
into 'build/modules'
}
// we create the buildModules task above so the distribution subprojects can
// depend on it, but we don't actually configure it until projects are evaluated
// so it can depend on the bundling of plugins (ie modules must have been configured)
project.gradle.projectsEvaluated {
project.rootProject.subprojects.findAll { it.path.startsWith(':modules:') }.each { Project module ->
buildModules {
dependsOn module.bundlePlugin
into(module.name) {
from { zipTree(module.bundlePlugin.outputs.files.singleFile) }
}
}
configure(subprojects.findAll { it.name != 'integ-test-zip' }) { Project distribution ->
distribution.integTest.mustRunAfter(module.integTest)
}
}
}
// make sure we have a clean task since we aren't a java project, but we have tasks that
// put stuff in the build dir
task clean(type: Delete) {
delete 'build'
}
subprojects {
/*****************************************************************************
* Rest test config *
*****************************************************************************/
apply plugin: 'elasticsearch.rest-test'
project.integTest {
dependsOn(project.assemble)
includePackaged true
cluster {
distribution = project.name
}
}
/*****************************************************************************
@ -81,7 +112,12 @@ subprojects {
libFiles = copySpec {
into 'lib'
from project(':core').jar
from dependencyFiles
from project(':distribution').dependencyFiles
}
modulesFiles = copySpec {
into 'modules'
from project(':distribution').buildModules
}
configFiles = copySpec {
@ -103,7 +139,7 @@ subprojects {
/*****************************************************************************
* Zip and tgz configuration *
*****************************************************************************/
configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
configure(subprojects.findAll { ['zip', 'tar', 'integ-test-zip'].contains(it.name) }) {
project.ext.archivesFiles = copySpec {
into("elasticsearch-${version}") {
with libFiles
@ -121,6 +157,9 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
from('../src/main/resources') {
include 'bin/*.exe'
}
if (project.name != 'integ-test-zip') {
with modulesFiles
}
}
}
}
@ -143,7 +182,7 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
* directly from the filesystem. It doesn't want to process them through
* MavenFilteringHack or any other copy-style action.
*/
configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) {
integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false
File packagingFiles = new File(buildDir, 'packaging')
project.ext.packagingFiles = packagingFiles
@ -233,6 +272,7 @@ configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
user 'root'
permissionGroup 'root'
with libFiles
with modulesFiles
with copySpec {
with commonFiles
if (project.name == 'deb') {

View File

@ -18,7 +18,7 @@
*/
task buildDeb(type: Deb) {
dependsOn dependencyFiles, preparePackagingFiles
dependsOn preparePackagingFiles
baseName 'elasticsearch' // this is what pom generation uses for artifactId
// Follow elasticsearch's deb file naming convention
archiveName "${packageName}-${project.version}.deb"
@ -44,6 +44,4 @@ integTest {
skip the test if they aren't around. */
enabled = new File('/usr/bin/dpkg-deb').exists() || // Standard location
new File('/usr/local/bin/dpkg-deb').exists() // Homebrew location
dependsOn buildDeb
clusterConfig.distribution = 'deb'
}

View File

@ -0,0 +1,31 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
task buildZip(type: Zip) {
baseName = 'elasticsearch'
with archivesFiles
}
artifacts {
'default' buildZip
archives buildZip
}
integTest.dependsOn buildZip

View File

@ -0,0 +1,38 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.parser.RestTestParseException;
import java.io.IOException;
/** Rest integration test. runs against external cluster in 'mvn verify' */
public class RestIT extends ESRestTestCase {
public RestIT(RestTestCandidate testCandidate) {
super(testCandidate);
}
// we run them all sequentially: start simple!
@ParametersFactory
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
return createParameters(0, 1);
}
}

View File

@ -42,6 +42,4 @@ integTest {
enabled = new File('/bin/rpm').exists() || // Standard location
new File('/usr/bin/rpm').exists() || // Debian location
new File('/usr/local/bin/rpm').exists() // Homebrew location
dependsOn buildRpm
clusterConfig.distribution = 'rpm'
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
task buildTar(type: Tar, dependsOn: dependencyFiles) {
task buildTar(type: Tar) {
baseName = 'elasticsearch'
extension = 'tar.gz'
with archivesFiles
@ -28,8 +28,3 @@ artifacts {
'default' buildTar
archives buildTar
}
integTest {
dependsOn buildTar
clusterConfig.distribution = 'tar'
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
task buildZip(type: Zip, dependsOn: dependencyFiles) {
task buildZip(type: Zip) {
baseName = 'elasticsearch'
with archivesFiles
}

46
modules/build.gradle Normal file
View File

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
subprojects {
apply plugin: 'elasticsearch.esplugin'
esplugin {
// for local ES plugins, the name of the plugin is the same as the directory
name project.name
}
if (project.file('src/main/packaging').exists()) {
throw new InvalidModelException("Modules cannot contain packaging files")
}
if (project.file('src/main/bin').exists()) {
throw new InvalidModelException("Modules cannot contain bin files")
}
if (project.file('src/main/config').exists()) {
throw new InvalidModelException("Modules cannot contain config files")
}
project.afterEvaluate {
if (esplugin.isolated == false) {
throw new InvalidModelException("Modules cannot disable isolation")
}
if (esplugin.jvm == false) {
throw new InvalidModelException("Modules must be jvm plugins")
}
}
}

View File

@ -10,5 +10,5 @@
- do:
nodes.info: {}
- match: { nodes.$master.plugins.0.name: lang-expression }
- match: { nodes.$master.plugins.0.jvm: true }
- match: { nodes.$master.modules.0.name: lang-expression }
- match: { nodes.$master.modules.0.jvm: true }

Some files were not shown because too many files have changed in this diff Show More