Remove uneeded class loading stuff from hdfs plugin

This commit is contained in:
Ryan Ernst 2015-12-18 17:01:38 -08:00
parent 91fe99a7f6
commit c2c5081830
3 changed files with 5 additions and 123 deletions

View File

@ -40,6 +40,7 @@ import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.RepositoriesModule;
import org.elasticsearch.repositories.Repository;
import org.elasticsearch.repositories.hdfs.HdfsRepository;
//
// Note this plugin is somewhat special as Hadoop itself loads a number of libraries and thus requires a number of permissions to run even in client mode.
@ -79,95 +80,6 @@ public class HdfsPlugin extends Plugin {
@SuppressWarnings("unchecked")
public void onModule(RepositoriesModule repositoriesModule) {
String baseLib = Utils.detectLibFolder();
List<URL> cp = getHadoopClassLoaderPath(baseLib);
ClassLoader hadoopCL = URLClassLoader.newInstance(cp.toArray(new URL[cp.size()]), getClass().getClassLoader());
Class<? extends Repository> repository = null;
try {
repository = (Class<? extends Repository>) hadoopCL.loadClass("org.elasticsearch.repositories.hdfs.HdfsRepository");
} catch (ClassNotFoundException cnfe) {
throw new IllegalStateException("Cannot load plugin class; is the plugin class setup correctly?", cnfe);
}
repositoriesModule.registerRepository("hdfs", repository, BlobStoreIndexShardRepository.class);
Loggers.getLogger(HdfsPlugin.class).info("Loaded Hadoop [{}] libraries from {}", getHadoopVersion(hadoopCL), baseLib);
}
protected List<URL> getHadoopClassLoaderPath(String baseLib) {
List<URL> cp = new ArrayList<>();
// add plugin internal jar
discoverJars(createURI(baseLib, "internal-libs"), cp, false);
// add Hadoop jars
discoverJars(createURI(baseLib, "hadoop-libs"), cp, true);
return cp;
}
private String getHadoopVersion(ClassLoader hadoopCL) {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
// unprivileged code such as scripts do not have SpecialPermission
sm.checkPermission(new SpecialPermission());
}
return AccessController.doPrivileged(new PrivilegedAction<String>() {
@Override
public String run() {
// Hadoop 2 relies on TCCL to determine the version
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(hadoopCL);
return doGetHadoopVersion(hadoopCL);
} finally {
Thread.currentThread().setContextClassLoader(tccl);
}
}
}, Utils.hadoopACC());
}
private String doGetHadoopVersion(ClassLoader hadoopCL) {
String version = "Unknown";
Class<?> clz = null;
try {
clz = hadoopCL.loadClass("org.apache.hadoop.util.VersionInfo");
} catch (ClassNotFoundException cnfe) {
// unknown
}
if (clz != null) {
try {
Method method = clz.getMethod("getVersion");
version = method.invoke(null).toString();
} catch (Exception ex) {
// class has changed, ignore
}
}
return version;
}
private URI createURI(String base, String suffix) {
String location = base + suffix;
try {
return new URI(location);
} catch (URISyntaxException ex) {
throw new IllegalStateException(String.format(Locale.ROOT, "Cannot detect plugin folder; [%s] seems invalid", location), ex);
}
}
@SuppressForbidden(reason = "discover nested jar")
private void discoverJars(URI libPath, List<URL> cp, boolean optional) {
try {
Path[] jars = FileSystemUtils.files(PathUtils.get(libPath), "*.jar");
for (Path path : jars) {
cp.add(path.toUri().toURL());
}
} catch (IOException ex) {
if (!optional) {
throw new IllegalStateException("Cannot compute plugin classpath", ex);
}
}
repositoriesModule.registerRepository("hdfs", HdfsRepository.class, BlobStoreIndexShardRepository.class);
}
}

View File

@ -1,31 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugin.hadoop.hdfs;
import java.net.URL;
import java.util.Collections;
import java.util.List;
public class HdfsTestPlugin extends HdfsPlugin {
@Override
protected List<URL> getHadoopClassLoaderPath(String baseLib) {
return Collections.emptyList();
}
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.RepositoryException;
import org.elasticsearch.repositories.RepositoryMissingException;
import org.elasticsearch.repositories.hdfs.TestingFs;
import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin;
import org.elasticsearch.snapshots.SnapshotState;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@ -74,7 +75,7 @@ public class HdfsTests extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(HdfsTestPlugin.class);
return pluginList(HdfsPlugin.class);
}
private String path;