HADOOP-10893. isolated classloader on the client side. Contributed by Sangjin Lee

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1619605 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jason Darrell Lowe 2014-08-21 21:41:24 +00:00
parent 572a158cf8
commit 23b4c0b6c1
18 changed files with 561 additions and 209 deletions

View File

@ -8,6 +8,9 @@ Release 2.6.0 - UNRELEASED
HADOOP-10433. Key Management Server based on KeyProvider API. (tucu) HADOOP-10433. Key Management Server based on KeyProvider API. (tucu)
HADOOP-10893. isolated classloader on the client side (Sangjin Lee via
jlowe)
IMPROVEMENTS IMPROVEMENTS
HADOOP-10808. Remove unused native code for munlock. (cnauroth) HADOOP-10808. Remove unused native code for munlock. (cnauroth)

View File

@ -111,6 +111,11 @@
<Method name="driver" /> <Method name="driver" />
<Bug pattern="DM_EXIT" /> <Bug pattern="DM_EXIT" />
</Match> </Match>
<Match>
<Class name="org.apache.hadoop.util.RunJar" />
<Method name="run" />
<Bug pattern="DM_EXIT" />
</Match>
<!-- <!--
We need to cast objects between old and new api objects We need to cast objects between old and new api objects
--> -->

View File

@ -282,10 +282,12 @@ if not "%HADOOP_MAPRED_HOME%\%MAPRED_DIR%" == "%HADOOP_YARN_HOME%\%YARN_DIR%" (
@rem @rem
if defined HADOOP_CLASSPATH ( if defined HADOOP_CLASSPATH (
if defined HADOOP_USER_CLASSPATH_FIRST ( if not defined HADOOP_USE_CLIENT_CLASSLOADER (
set CLASSPATH=%HADOOP_CLASSPATH%;%CLASSPATH%; if defined HADOOP_USER_CLASSPATH_FIRST (
) else ( set CLASSPATH=%HADOOP_CLASSPATH%;%CLASSPATH%;
set CLASSPATH=%CLASSPATH%;%HADOOP_CLASSPATH%; ) else (
set CLASSPATH=%CLASSPATH%;%HADOOP_CLASSPATH%;
)
) )
) )

View File

@ -29,6 +29,23 @@
# by doing # by doing
# export HADOOP_USER_CLASSPATH_FIRST=true # export HADOOP_USER_CLASSPATH_FIRST=true
# #
# HADOOP_USE_CLIENT_CLASSLOADER When defined, HADOOP_CLASSPATH and the jar
# as the hadoop jar argument are handled by
# by a separate isolated client classloader.
# If it is set, HADOOP_USER_CLASSPATH_FIRST
# is ignored. Can be defined by doing
# export HADOOP_USE_CLIENT_CLASSLOADER=true
#
# HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES
# When defined, it overrides the default
# definition of system classes for the client
# classloader when
# HADOOP_USE_CLIENT_CLASSLOADER is enabled.
# Names ending in '.' (period) are treated as
# package names, and names starting with a
# '-' are treated as negative matches.
# For example,
# export HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES="-org.apache.hadoop.UserClass,java.,javax.,org.apache.hadoop."
this="${BASH_SOURCE-$0}" this="${BASH_SOURCE-$0}"
common_bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P) common_bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P)
@ -282,7 +299,9 @@ fi
# Add the user-specified CLASSPATH via HADOOP_CLASSPATH # Add the user-specified CLASSPATH via HADOOP_CLASSPATH
# Add it first or last depending on if user has # Add it first or last depending on if user has
# set env-var HADOOP_USER_CLASSPATH_FIRST # set env-var HADOOP_USER_CLASSPATH_FIRST
if [ "$HADOOP_CLASSPATH" != "" ]; then # if the user set HADOOP_USE_CLIENT_CLASSLOADER, HADOOP_CLASSPATH is not added
# to the classpath
if [[ ( "$HADOOP_CLASSPATH" != "" ) && ( "$HADOOP_USE_CLIENT_CLASSLOADER" = "" ) ]]; then
# Prefix it if its to be preceded # Prefix it if its to be preceded
if [ "$HADOOP_USER_CLASSPATH_FIRST" != "" ]; then if [ "$HADOOP_USER_CLASSPATH_FIRST" != "" ]; then
CLASSPATH=${HADOOP_CLASSPATH}:${CLASSPATH} CLASSPATH=${HADOOP_CLASSPATH}:${CLASSPATH}

View File

@ -0,0 +1,219 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import java.io.File;
import java.io.FilenameFilter;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
/**
* A {@link URLClassLoader} for application isolation. Classes from the
* application JARs are loaded in preference to the parent loader.
*/
@Public
@Unstable
public class ApplicationClassLoader extends URLClassLoader {
/**
* Default value of the system classes if the user did not override them.
* JDK classes, hadoop classes and resources, and some select third-party
* classes are considered system classes, and are not loaded by the
* application classloader.
*/
public static final String DEFAULT_SYSTEM_CLASSES =
"java.," +
"javax.," +
"org.w3c.dom.," +
"org.xml.sax.," +
"org.apache.commons.logging.," +
"org.apache.log4j.," +
"org.apache.hadoop.," +
"core-default.xml," +
"hdfs-default.xml," +
"mapred-default.xml," +
"yarn-default.xml";
private static final Log LOG =
LogFactory.getLog(ApplicationClassLoader.class.getName());
private static final FilenameFilter JAR_FILENAME_FILTER =
new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".jar") || name.endsWith(".JAR");
}
};
private final ClassLoader parent;
private final List<String> systemClasses;
public ApplicationClassLoader(URL[] urls, ClassLoader parent,
List<String> systemClasses) {
super(urls, parent);
if (LOG.isDebugEnabled()) {
LOG.debug("urls: " + Arrays.toString(urls));
LOG.debug("system classes: " + systemClasses);
}
this.parent = parent;
if (parent == null) {
throw new IllegalArgumentException("No parent classloader!");
}
// if the caller-specified system classes are null or empty, use the default
this.systemClasses = (systemClasses == null || systemClasses.isEmpty()) ?
Arrays.asList(StringUtils.getTrimmedStrings(DEFAULT_SYSTEM_CLASSES)) :
systemClasses;
LOG.info("system classes: " + this.systemClasses);
}
public ApplicationClassLoader(String classpath, ClassLoader parent,
List<String> systemClasses) throws MalformedURLException {
this(constructUrlsFromClasspath(classpath), parent, systemClasses);
}
static URL[] constructUrlsFromClasspath(String classpath)
throws MalformedURLException {
List<URL> urls = new ArrayList<URL>();
for (String element : classpath.split(File.pathSeparator)) {
if (element.endsWith("/*")) {
String dir = element.substring(0, element.length() - 1);
File[] files = new File(dir).listFiles(JAR_FILENAME_FILTER);
if (files != null) {
for (File file : files) {
urls.add(file.toURI().toURL());
}
}
} else {
File file = new File(element);
if (file.exists()) {
urls.add(new File(element).toURI().toURL());
}
}
}
return urls.toArray(new URL[urls.size()]);
}
@Override
public URL getResource(String name) {
URL url = null;
if (!isSystemClass(name, systemClasses)) {
url= findResource(name);
if (url == null && name.startsWith("/")) {
if (LOG.isDebugEnabled()) {
LOG.debug("Remove leading / off " + name);
}
url= findResource(name.substring(1));
}
}
if (url == null) {
url= parent.getResource(name);
}
if (url != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("getResource("+name+")=" + url);
}
}
return url;
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
return this.loadClass(name, false);
}
@Override
protected synchronized Class<?> loadClass(String name, boolean resolve)
throws ClassNotFoundException {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading class: " + name);
}
Class<?> c = findLoadedClass(name);
ClassNotFoundException ex = null;
if (c == null && !isSystemClass(name, systemClasses)) {
// Try to load class from this classloader's URLs. Note that this is like
// the servlet spec, not the usual Java 2 behaviour where we ask the
// parent to attempt to load first.
try {
c = findClass(name);
if (LOG.isDebugEnabled() && c != null) {
LOG.debug("Loaded class: " + name + " ");
}
} catch (ClassNotFoundException e) {
if (LOG.isDebugEnabled()) {
LOG.debug(e);
}
ex = e;
}
}
if (c == null) { // try parent
c = parent.loadClass(name);
if (LOG.isDebugEnabled() && c != null) {
LOG.debug("Loaded class from parent: " + name + " ");
}
}
if (c == null) {
throw ex != null ? ex : new ClassNotFoundException(name);
}
if (resolve) {
resolveClass(c);
}
return c;
}
public static boolean isSystemClass(String name, List<String> systemClasses) {
if (systemClasses != null) {
String canonicalName = name.replace('/', '.');
while (canonicalName.startsWith(".")) {
canonicalName=canonicalName.substring(1);
}
for (String c : systemClasses) {
boolean result = true;
if (c.startsWith("-")) {
c = c.substring(1);
result = false;
}
if (c.endsWith(".") && canonicalName.startsWith(c)) {
return result;
} else if (canonicalName.equals(c)) {
return result;
}
}
}
return false;
}
}

View File

@ -18,23 +18,25 @@
package org.apache.hadoop.util; package org.apache.hadoop.util;
import java.lang.reflect.Array; import java.io.File;
import java.lang.reflect.Method;
import java.lang.reflect.InvocationTargetException;
import java.net.URL;
import java.net.URLClassLoader;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.File; import java.lang.reflect.Array;
import java.util.regex.Pattern; import java.lang.reflect.InvocationTargetException;
import java.util.Arrays; import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.jar.JarFile; import java.util.List;
import java.util.jar.JarEntry; import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest; import java.util.jar.Manifest;
import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -55,6 +57,21 @@ public class RunJar {
*/ */
public static final int SHUTDOWN_HOOK_PRIORITY = 10; public static final int SHUTDOWN_HOOK_PRIORITY = 10;
/**
* Environment key for using the client classloader.
*/
public static final String HADOOP_USE_CLIENT_CLASSLOADER =
"HADOOP_USE_CLIENT_CLASSLOADER";
/**
* Environment key for the (user-provided) hadoop classpath.
*/
public static final String HADOOP_CLASSPATH = "HADOOP_CLASSPATH";
/**
* Environment key for the system classes.
*/
public static final String HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES =
"HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES";
/** /**
* Unpack a jar file into a directory. * Unpack a jar file into a directory.
* *
@ -116,6 +133,10 @@ private static void ensureDirectory(File dir) throws IOException {
/** Run a Hadoop job jar. If the main class is not in the jar's manifest, /** Run a Hadoop job jar. If the main class is not in the jar's manifest,
* then it must be provided on the command line. */ * then it must be provided on the command line. */
public static void main(String[] args) throws Throwable { public static void main(String[] args) throws Throwable {
new RunJar().run(args);
}
public void run(String[] args) throws Throwable {
String usage = "RunJar jarFile [mainClass] args..."; String usage = "RunJar jarFile [mainClass] args...";
if (args.length < 1) { if (args.length < 1) {
@ -187,19 +208,7 @@ public void run() {
unJar(file, workDir); unJar(file, workDir);
ArrayList<URL> classPath = new ArrayList<URL>(); ClassLoader loader = createClassLoader(file, workDir);
classPath.add(new File(workDir+"/").toURI().toURL());
classPath.add(file.toURI().toURL());
classPath.add(new File(workDir, "classes/").toURI().toURL());
File[] libs = new File(workDir, "lib").listFiles();
if (libs != null) {
for (int i = 0; i < libs.length; i++) {
classPath.add(libs[i].toURI().toURL());
}
}
ClassLoader loader =
new URLClassLoader(classPath.toArray(new URL[0]));
Thread.currentThread().setContextClassLoader(loader); Thread.currentThread().setContextClassLoader(loader);
Class<?> mainClass = Class.forName(mainClassName, true, loader); Class<?> mainClass = Class.forName(mainClassName, true, loader);
@ -214,5 +223,65 @@ public void run() {
throw e.getTargetException(); throw e.getTargetException();
} }
} }
/**
* Creates a classloader based on the environment that was specified by the
* user. If HADOOP_USE_CLIENT_CLASSLOADER is specified, it creates an
* application classloader that provides the isolation of the user class space
* from the hadoop classes and their dependencies. It forms a class space for
* the user jar as well as the HADOOP_CLASSPATH. Otherwise, it creates a
* classloader that simply adds the user jar to the classpath.
*/
private ClassLoader createClassLoader(File file, final File workDir)
throws MalformedURLException {
ClassLoader loader;
// see if the client classloader is enabled
if (useClientClassLoader()) {
StringBuilder sb = new StringBuilder();
sb.append(workDir+"/").
append(File.pathSeparator).append(file).
append(File.pathSeparator).append(workDir+"/classes/").
append(File.pathSeparator).append(workDir+"/lib/*");
// HADOOP_CLASSPATH is added to the client classpath
String hadoopClasspath = getHadoopClasspath();
if (hadoopClasspath != null && !hadoopClasspath.isEmpty()) {
sb.append(File.pathSeparator).append(hadoopClasspath);
}
String clientClasspath = sb.toString();
// get the system classes
String systemClasses = getSystemClasses();
List<String> systemClassesList = systemClasses == null ?
null :
Arrays.asList(StringUtils.getTrimmedStrings(systemClasses));
// create an application classloader that isolates the user classes
loader = new ApplicationClassLoader(clientClasspath,
getClass().getClassLoader(), systemClassesList);
} else {
List<URL> classPath = new ArrayList<URL>();
classPath.add(new File(workDir+"/").toURI().toURL());
classPath.add(file.toURI().toURL());
classPath.add(new File(workDir, "classes/").toURI().toURL());
File[] libs = new File(workDir, "lib").listFiles();
if (libs != null) {
for (int i = 0; i < libs.length; i++) {
classPath.add(libs[i].toURI().toURL());
}
}
// create a normal parent-delegating classloader
loader = new URLClassLoader(classPath.toArray(new URL[0]));
}
return loader;
}
boolean useClientClassLoader() {
return Boolean.parseBoolean(System.getenv(HADOOP_USE_CLIENT_CLASSLOADER));
}
String getHadoopClasspath() {
return System.getenv(HADOOP_CLASSPATH);
}
String getSystemClasses() {
return System.getenv(HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES);
}
} }

View File

@ -0,0 +1,33 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
public class ClassLoaderCheck {
/**
* Verifies the class is loaded by the right classloader.
*/
public static void checkClassLoader(Class cls,
boolean shouldBeLoadedByAppClassLoader) {
boolean loadedByAppClassLoader =
cls.getClassLoader() instanceof ApplicationClassLoader;
if ((shouldBeLoadedByAppClassLoader && !loadedByAppClassLoader) ||
(!shouldBeLoadedByAppClassLoader && loadedByAppClassLoader)) {
throw new RuntimeException("incorrect classloader used");
}
}
}

View File

@ -0,0 +1,34 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
/**
* Test class used by {@link TestRunJar} to verify that it is loaded by the
* {@link ApplicationClassLoader}.
*/
public class ClassLoaderCheckMain {
public static void main(String[] args) {
// ClassLoaderCheckMain should be loaded by the application classloader
ClassLoaderCheck.checkClassLoader(ClassLoaderCheckMain.class, true);
// ClassLoaderCheckSecond should NOT be loaded by the application
// classloader
ClassLoaderCheck.checkClassLoader(ClassLoaderCheckSecond.class, false);
// ClassLoaderCheckThird should be loaded by the application classloader
ClassLoaderCheck.checkClassLoader(ClassLoaderCheckThird.class, true);
}
}

View File

@ -0,0 +1,24 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
/**
* A class {@link ClassLoaderCheckMain} depends on that should be loaded by the
* system classloader.
*/
public class ClassLoaderCheckSecond {}

View File

@ -0,0 +1,24 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
/**
* A class {@link ClassLoaderCheckMain} depends on that should be loaded by the
* application classloader.
*/
public class ClassLoaderCheckThird {}

View File

@ -16,18 +16,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.yarn.util; package org.apache.hadoop.util;
import static org.apache.hadoop.util.ApplicationClassLoader.constructUrlsFromClasspath;
import static org.apache.hadoop.util.ApplicationClassLoader.isSystemClass;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.apache.hadoop.yarn.util.ApplicationClassLoader.constructUrlsFromClasspath;
import static org.apache.hadoop.yarn.util.ApplicationClassLoader.isSystemClass;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
@ -43,6 +40,9 @@
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
public class TestApplicationClassLoader { public class TestApplicationClassLoader {
private static File testDir = new File(System.getProperty("test.build.data", private static File testDir = new File(System.getProperty("test.build.data",

View File

@ -17,23 +17,30 @@
*/ */
package org.apache.hadoop.util; package org.apache.hadoop.util;
import junit.framework.TestCase; import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.BufferedInputStream;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.jar.JarOutputStream; import java.util.jar.JarOutputStream;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import junit.framework.TestCase;
import org.apache.hadoop.fs.FileUtil;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.apache.hadoop.fs.FileUtil;
public class TestRunJar extends TestCase { public class TestRunJar extends TestCase {
private File TEST_ROOT_DIR; private File TEST_ROOT_DIR;
private static final String TEST_JAR_NAME="test-runjar.jar"; private static final String TEST_JAR_NAME="test-runjar.jar";
private static final String TEST_JAR_2_NAME = "test-runjar2.jar";
@Override @Override
@Before @Before
@ -107,4 +114,59 @@ public void testUnJarWithPattern() throws Exception {
new File(unjarDir, "foobaz.txt").exists()); new File(unjarDir, "foobaz.txt").exists());
} }
/**
* Tests the client classloader to verify the main class and its dependent
* class are loaded correctly by the application classloader, and others are
* loaded by the system classloader.
*/
@Test
public void testClientClassLoader() throws Throwable {
RunJar runJar = spy(new RunJar());
// enable the client classloader
when(runJar.useClientClassLoader()).thenReturn(true);
// set the system classes and blacklist the test main class and the test
// third class so they can be loaded by the application classloader
String mainCls = ClassLoaderCheckMain.class.getName();
String thirdCls = ClassLoaderCheckThird.class.getName();
String systemClasses = "-" + mainCls + "," +
"-" + thirdCls + "," +
ApplicationClassLoader.DEFAULT_SYSTEM_CLASSES;
when(runJar.getSystemClasses()).thenReturn(systemClasses);
// create the test jar
File testJar = makeClassLoaderTestJar(mainCls, thirdCls);
// form the args
String[] args = new String[3];
args[0] = testJar.getAbsolutePath();
args[1] = mainCls;
// run RunJar
runJar.run(args);
// it should not throw an exception
}
private File makeClassLoaderTestJar(String... clsNames) throws IOException {
File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_2_NAME);
JarOutputStream jstream =
new JarOutputStream(new FileOutputStream(jarFile));
for (String clsName: clsNames) {
String name = clsName.replace('.', '/') + ".class";
InputStream entryInputStream = this.getClass().getResourceAsStream(
"/" + name);
ZipEntry entry = new ZipEntry(name);
jstream.putNextEntry(entry);
BufferedInputStream bufInputStream = new BufferedInputStream(
entryInputStream, 2048);
int count;
byte[] data = new byte[2048];
while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
jstream.write(data, 0, count);
}
jstream.closeEntry();
}
jstream.close();
return jarFile;
}
} }

View File

@ -34,6 +34,7 @@
import java.util.regex.Pattern; import java.util.regex.Pattern;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
@ -56,6 +57,7 @@
import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.util.ApplicationClassLoader;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringInterner;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
@ -67,7 +69,6 @@
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.util.ApplicationClassLoader;
import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.RollingFileAppender; import org.apache.log4j.RollingFileAppender;

View File

@ -50,6 +50,7 @@
import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.util.ApplicationClassLoader;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
@ -59,7 +60,6 @@
import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.ApplicationClassLoader;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
@ -510,7 +510,8 @@ public void testTaskStateUI() {
@Test @Test
public void testSystemClasses() { public void testSystemClasses() {
final List<String> systemClasses = final List<String> systemClasses =
Arrays.asList(MRApps.getSystemClasses(new Configuration())); Arrays.asList(StringUtils.getTrimmedStrings(
ApplicationClassLoader.DEFAULT_SYSTEM_CLASSES));
for (String defaultXml : DEFAULT_XMLS) { for (String defaultXml : DEFAULT_XMLS) {
assertTrue(defaultXml + " must be system resource", assertTrue(defaultXml + " must be system resource",
ApplicationClassLoader.isSystemClass(defaultXml, systemClasses)); ApplicationClassLoader.isSystemClass(defaultXml, systemClasses));

View File

@ -1660,13 +1660,13 @@
<property> <property>
<name>mapreduce.job.classloader.system.classes</name> <name>mapreduce.job.classloader.system.classes</name>
<value>java.,javax.,org.w3c.dom.,org.xml.sax.,org.apache.commons.logging., <value></value>
org.apache.log4j.,org.apache.hadoop.,core-default.xml, <description>Used to override the default definition of the system classes for
hdfs-default.xml,mapred-default.xml,yarn-default.xml</value> the job classloader. The system classes are a comma-separated list of
<description>A comma-separated list of classes that should be loaded from the classes that should be loaded from the system classpath, not the
system classpath, not the user-supplied JARs, when mapreduce.job.classloader user-supplied JARs, when mapreduce.job.classloader is enabled. Names ending
is enabled. Names ending in '.' (period) are treated as package names, in '.' (period) are treated as package names, and names starting with a '-'
and names starting with a '-' are treated as negative matches. are treated as negative matches.
</description> </description>
</property> </property>

View File

@ -84,13 +84,13 @@
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.ApplicationClassLoader;
import org.apache.hadoop.util.JarFinder; import org.apache.hadoop.util.JarFinder;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.util.ApplicationClassLoader;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -242,8 +242,7 @@ private void testJobClassloader(boolean useCustomClasses) throws IOException,
// to test AM loading user classes such as output format class, we want // to test AM loading user classes such as output format class, we want
// to blacklist them from the system classes (they need to be prepended // to blacklist them from the system classes (they need to be prepended
// as the first match wins) // as the first match wins)
String systemClasses = String systemClasses = ApplicationClassLoader.DEFAULT_SYSTEM_CLASSES;
sleepConf.get(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER_SYSTEM_CLASSES);
// exclude the custom classes from system classes // exclude the custom classes from system classes
systemClasses = "-" + CustomOutputFormat.class.getName() + ",-" + systemClasses = "-" + CustomOutputFormat.class.getName() + ",-" +
CustomSpeculator.class.getName() + "," + CustomSpeculator.class.getName() + "," +

View File

@ -344,4 +344,11 @@
<Class name="org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider"/> <Class name="org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider"/>
<Bug pattern="DC_DOUBLECHECK" /> <Bug pattern="DC_DOUBLECHECK" />
</Match> </Match>
<!-- ApplicationClassLoader is deprecated and moved to hadoop-common; ignore
warning on the identical name as it should be removed later -->
<Match>
<Class name="org.apache.hadoop.yarn.util.ApplicationClassLoader"/>
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS"/>
</Match>
</FindBugsFilter> </FindBugsFilter>

View File

@ -18,180 +18,30 @@
package org.apache.hadoop.yarn.util; package org.apache.hadoop.yarn.util;
import java.io.File;
import java.io.FilenameFilter;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Splitter;
/** /**
* A {@link URLClassLoader} for YARN application isolation. Classes from * This type has been deprecated in favor of
* the application JARs are loaded in preference to the parent loader. * {@link org.apache.hadoop.util.ApplicationClassLoader}. All new uses of
* ApplicationClassLoader should use that type instead.
*/ */
@Public @Public
@Unstable @Unstable
public class ApplicationClassLoader extends URLClassLoader { @Deprecated
public class ApplicationClassLoader extends
private static final Log LOG = org.apache.hadoop.util.ApplicationClassLoader {
LogFactory.getLog(ApplicationClassLoader.class.getName());
private static final FilenameFilter JAR_FILENAME_FILTER =
new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".jar") || name.endsWith(".JAR");
}
};
private ClassLoader parent;
private List<String> systemClasses;
public ApplicationClassLoader(URL[] urls, ClassLoader parent, public ApplicationClassLoader(URL[] urls, ClassLoader parent,
List<String> systemClasses) { List<String> systemClasses) {
super(urls, parent); super(urls, parent, systemClasses);
this.parent = parent;
if (parent == null) {
throw new IllegalArgumentException("No parent classloader!");
}
this.systemClasses = systemClasses;
} }
public ApplicationClassLoader(String classpath, ClassLoader parent, public ApplicationClassLoader(String classpath, ClassLoader parent,
List<String> systemClasses) throws MalformedURLException { List<String> systemClasses) throws MalformedURLException {
this(constructUrlsFromClasspath(classpath), parent, systemClasses); super(classpath, parent, systemClasses);
} }
}
@VisibleForTesting
static URL[] constructUrlsFromClasspath(String classpath)
throws MalformedURLException {
List<URL> urls = new ArrayList<URL>();
for (String element : Splitter.on(File.pathSeparator).split(classpath)) {
if (element.endsWith("/*")) {
String dir = element.substring(0, element.length() - 1);
File[] files = new File(dir).listFiles(JAR_FILENAME_FILTER);
if (files != null) {
for (File file : files) {
urls.add(file.toURI().toURL());
}
}
} else {
File file = new File(element);
if (file.exists()) {
urls.add(new File(element).toURI().toURL());
}
}
}
return urls.toArray(new URL[urls.size()]);
}
@Override
public URL getResource(String name) {
URL url = null;
if (!isSystemClass(name, systemClasses)) {
url= findResource(name);
if (url == null && name.startsWith("/")) {
if (LOG.isDebugEnabled()) {
LOG.debug("Remove leading / off " + name);
}
url= findResource(name.substring(1));
}
}
if (url == null) {
url= parent.getResource(name);
}
if (url != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("getResource("+name+")=" + url);
}
}
return url;
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
return this.loadClass(name, false);
}
@Override
protected synchronized Class<?> loadClass(String name, boolean resolve)
throws ClassNotFoundException {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading class: " + name);
}
Class<?> c = findLoadedClass(name);
ClassNotFoundException ex = null;
if (c == null && !isSystemClass(name, systemClasses)) {
// Try to load class from this classloader's URLs. Note that this is like
// the servlet spec, not the usual Java 2 behaviour where we ask the
// parent to attempt to load first.
try {
c = findClass(name);
if (LOG.isDebugEnabled() && c != null) {
LOG.debug("Loaded class: " + name + " ");
}
} catch (ClassNotFoundException e) {
if (LOG.isDebugEnabled()) {
LOG.debug(e);
}
ex = e;
}
}
if (c == null) { // try parent
c = parent.loadClass(name);
if (LOG.isDebugEnabled() && c != null) {
LOG.debug("Loaded class from parent: " + name + " ");
}
}
if (c == null) {
throw ex != null ? ex : new ClassNotFoundException(name);
}
if (resolve) {
resolveClass(c);
}
return c;
}
@VisibleForTesting
public static boolean isSystemClass(String name, List<String> systemClasses) {
if (systemClasses != null) {
String canonicalName = name.replace('/', '.');
while (canonicalName.startsWith(".")) {
canonicalName=canonicalName.substring(1);
}
for (String c : systemClasses) {
boolean result = true;
if (c.startsWith("-")) {
c = c.substring(1);
result = false;
}
if (c.endsWith(".") && canonicalName.startsWith(c)) {
return result;
} else if (canonicalName.equals(c)) {
return result;
}
}
}
return false;
}
}