detect jar hell before installing a plugin

This commit is contained in:
Robert Muir 2015-06-30 16:42:01 -04:00
parent 182c59f5b4
commit a839644826
2 changed files with 80 additions and 4 deletions

View File

@ -41,21 +41,29 @@ import java.util.jar.JarFile;
import java.util.jar.Manifest;
/** Simple check for duplicate class files across the classpath */
class JarHell {
public class JarHell {
/**
* Checks the current classloader for duplicate classes
* @throws IllegalStateException if jar hell was found
*/
@SuppressForbidden(reason = "needs JarFile for speed, just reading entries")
static void checkJarHell() throws Exception {
public static void checkJarHell() throws Exception {
ClassLoader loader = JarHell.class.getClassLoader();
if (loader instanceof URLClassLoader == false) {
return;
}
checkJarHell(((URLClassLoader)loader).getURLs());
}
/**
* Checks the set of URLs for duplicate classes
* @throws IllegalStateException if jar hell was found
*/
@SuppressForbidden(reason = "needs JarFile for speed, just reading entries")
public static void checkJarHell(URL urls[]) throws Exception {
final Map<String,URL> clazzes = new HashMap<>(32768);
Set<String> seenJars = new HashSet<>();
for (final URL url : ((URLClassLoader)loader).getURLs()) {
for (final URL url : urls) {
String path = URLDecoder.decode(url.getPath(), "UTF-8");
if (path.endsWith(".jar")) {
if (!seenJars.add(path)) {

View File

@ -22,8 +22,10 @@ package org.elasticsearch.plugins;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.*;
import org.elasticsearch.bootstrap.JarHell;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.cli.Terminal;
import org.elasticsearch.common.collect.Tuple;
@ -35,14 +37,18 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.PosixFileAttributeView;
import java.nio.file.attribute.PosixFilePermission;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import static org.elasticsearch.common.Strings.hasLength;
import static org.elasticsearch.common.io.FileSystemUtils.moveFilesWithoutOverwriting;
@ -147,6 +153,41 @@ public class PluginManager {
if (!downloaded) {
throw new IOException("failed to download out of all possible locations..., use --verbose to get detailed information");
}
// unzip plugin to a temp dir
Path tmp = unzipToTemporary(pluginFile);
// create list of current jars in classpath
final List<URL> jars = new ArrayList<>();
ClassLoader loader = PluginManager.class.getClassLoader();
if (loader instanceof URLClassLoader) {
for (URL url : ((URLClassLoader) loader).getURLs()) {
jars.add(url);
}
}
// add any jars we find in the plugin to the list
Files.walkFileTree(tmp, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (file.toString().endsWith(".jar")) {
jars.add(file.toUri().toURL());
}
return FileVisitResult.CONTINUE;
}
});
// check combined (current classpath + new jars to-be-added)
try {
JarHell.checkJarHell(jars.toArray(new URL[0]));
} catch (Exception ex) {
throw new RuntimeException(ex);
}
// cleanup
IOUtils.rm(tmp);
// TODO: we have a tmpdir made above, so avoid zipfilesystem
try (FileSystem zipFile = FileSystems.newFileSystem(pluginFile, null)) {
for (final Path root : zipFile.getRootDirectories() ) {
final Path[] topLevelFiles = FileSystemUtils.files(root);
@ -260,6 +301,33 @@ public class PluginManager {
}
}
private Path unzipToTemporary(Path zip) throws IOException {
Path tmp = Files.createTempDirectory(environment.tmpFile(), null);
try (ZipInputStream zipInput = new ZipInputStream(Files.newInputStream(zip))) {
ZipEntry entry;
byte[] buffer = new byte[8192];
while ((entry = zipInput.getNextEntry()) != null) {
Path targetFile = tmp.resolve(entry.getName());
// be on the safe side: do not rely on that directories are always extracted
// before their children (although this makes sense, but is it guaranteed?)
Files.createDirectories(targetFile.getParent());
if (!entry.isDirectory()) {
OutputStream out = Files.newOutputStream(targetFile);
int len;
while((len = zipInput.read(buffer)) >= 0) {
out.write(buffer, 0, len);
}
out.close();
}
zipInput.closeEntry();
}
}
return tmp;
}
public void removePlugin(String name) throws IOException {
if (name == null) {
throw new IllegalArgumentException("plugin name must be supplied with --remove [name].");