diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 313ded2d3d0..3616c60f7bd 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -8,6 +8,9 @@ Release 2.6.0 - UNRELEASED
HADOOP-10433. Key Management Server based on KeyProvider API. (tucu)
+ HADOOP-10893. isolated classloader on the client side (Sangjin Lee via
+ jlowe)
+
IMPROVEMENTS
HADOOP-10808. Remove unused native code for munlock. (cnauroth)
diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
index d685496266c..eead0352034 100644
--- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
@@ -111,6 +111,11 @@
+
+
+
+
+
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd
index 3ea576cef20..d8da5b16aab 100644
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd
@@ -282,10 +282,12 @@ if not "%HADOOP_MAPRED_HOME%\%MAPRED_DIR%" == "%HADOOP_YARN_HOME%\%YARN_DIR%" (
@rem
if defined HADOOP_CLASSPATH (
- if defined HADOOP_USER_CLASSPATH_FIRST (
- set CLASSPATH=%HADOOP_CLASSPATH%;%CLASSPATH%;
- ) else (
- set CLASSPATH=%CLASSPATH%;%HADOOP_CLASSPATH%;
+ if not defined HADOOP_USE_CLIENT_CLASSLOADER (
+ if defined HADOOP_USER_CLASSPATH_FIRST (
+ set CLASSPATH=%HADOOP_CLASSPATH%;%CLASSPATH%;
+ ) else (
+ set CLASSPATH=%CLASSPATH%;%HADOOP_CLASSPATH%;
+ )
)
)
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
index f030b8321b0..8b8952fb73d 100644
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
@@ -29,6 +29,23 @@
# by doing
# export HADOOP_USER_CLASSPATH_FIRST=true
#
+# HADOOP_USE_CLIENT_CLASSLOADER When defined, HADOOP_CLASSPATH and the jar
+# as the hadoop jar argument are handled by
+# by a separate isolated client classloader.
+# If it is set, HADOOP_USER_CLASSPATH_FIRST
+# is ignored. Can be defined by doing
+# export HADOOP_USE_CLIENT_CLASSLOADER=true
+#
+# HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES
+# When defined, it overrides the default
+# definition of system classes for the client
+# classloader when
+# HADOOP_USE_CLIENT_CLASSLOADER is enabled.
+# Names ending in '.' (period) are treated as
+# package names, and names starting with a
+# '-' are treated as negative matches.
+# For example,
+# export HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES="-org.apache.hadoop.UserClass,java.,javax.,org.apache.hadoop."
this="${BASH_SOURCE-$0}"
common_bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P)
@@ -282,7 +299,9 @@ fi
# Add the user-specified CLASSPATH via HADOOP_CLASSPATH
# Add it first or last depending on if user has
# set env-var HADOOP_USER_CLASSPATH_FIRST
-if [ "$HADOOP_CLASSPATH" != "" ]; then
+# if the user set HADOOP_USE_CLIENT_CLASSLOADER, HADOOP_CLASSPATH is not added
+# to the classpath
+if [[ ( "$HADOOP_CLASSPATH" != "" ) && ( "$HADOOP_USE_CLIENT_CLASSLOADER" = "" ) ]]; then
# Prefix it if its to be preceded
if [ "$HADOOP_USER_CLASSPATH_FIRST" != "" ]; then
CLASSPATH=${HADOOP_CLASSPATH}:${CLASSPATH}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java
new file mode 100644
index 00000000000..5dda10fc887
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java
@@ -0,0 +1,219 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+
+/**
+ * A {@link URLClassLoader} for application isolation. Classes from the
+ * application JARs are loaded in preference to the parent loader.
+ */
+@Public
+@Unstable
+public class ApplicationClassLoader extends URLClassLoader {
+ /**
+ * Default value of the system classes if the user did not override them.
+ * JDK classes, hadoop classes and resources, and some select third-party
+ * classes are considered system classes, and are not loaded by the
+ * application classloader.
+ */
+ public static final String DEFAULT_SYSTEM_CLASSES =
+ "java.," +
+ "javax.," +
+ "org.w3c.dom.," +
+ "org.xml.sax.," +
+ "org.apache.commons.logging.," +
+ "org.apache.log4j.," +
+ "org.apache.hadoop.," +
+ "core-default.xml," +
+ "hdfs-default.xml," +
+ "mapred-default.xml," +
+ "yarn-default.xml";
+
+ private static final Log LOG =
+ LogFactory.getLog(ApplicationClassLoader.class.getName());
+
+ private static final FilenameFilter JAR_FILENAME_FILTER =
+ new FilenameFilter() {
+ @Override
+ public boolean accept(File dir, String name) {
+ return name.endsWith(".jar") || name.endsWith(".JAR");
+ }
+ };
+
+ private final ClassLoader parent;
+ private final List systemClasses;
+
+ public ApplicationClassLoader(URL[] urls, ClassLoader parent,
+ List systemClasses) {
+ super(urls, parent);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("urls: " + Arrays.toString(urls));
+ LOG.debug("system classes: " + systemClasses);
+ }
+ this.parent = parent;
+ if (parent == null) {
+ throw new IllegalArgumentException("No parent classloader!");
+ }
+ // if the caller-specified system classes are null or empty, use the default
+ this.systemClasses = (systemClasses == null || systemClasses.isEmpty()) ?
+ Arrays.asList(StringUtils.getTrimmedStrings(DEFAULT_SYSTEM_CLASSES)) :
+ systemClasses;
+ LOG.info("system classes: " + this.systemClasses);
+ }
+
+ public ApplicationClassLoader(String classpath, ClassLoader parent,
+ List systemClasses) throws MalformedURLException {
+ this(constructUrlsFromClasspath(classpath), parent, systemClasses);
+ }
+
+ static URL[] constructUrlsFromClasspath(String classpath)
+ throws MalformedURLException {
+ List urls = new ArrayList();
+ for (String element : classpath.split(File.pathSeparator)) {
+ if (element.endsWith("/*")) {
+ String dir = element.substring(0, element.length() - 1);
+ File[] files = new File(dir).listFiles(JAR_FILENAME_FILTER);
+ if (files != null) {
+ for (File file : files) {
+ urls.add(file.toURI().toURL());
+ }
+ }
+ } else {
+ File file = new File(element);
+ if (file.exists()) {
+ urls.add(new File(element).toURI().toURL());
+ }
+ }
+ }
+ return urls.toArray(new URL[urls.size()]);
+ }
+
+ @Override
+ public URL getResource(String name) {
+ URL url = null;
+
+ if (!isSystemClass(name, systemClasses)) {
+ url= findResource(name);
+ if (url == null && name.startsWith("/")) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Remove leading / off " + name);
+ }
+ url= findResource(name.substring(1));
+ }
+ }
+
+ if (url == null) {
+ url= parent.getResource(name);
+ }
+
+ if (url != null) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("getResource("+name+")=" + url);
+ }
+ }
+
+ return url;
+ }
+
+ @Override
+ public Class> loadClass(String name) throws ClassNotFoundException {
+ return this.loadClass(name, false);
+ }
+
+ @Override
+ protected synchronized Class> loadClass(String name, boolean resolve)
+ throws ClassNotFoundException {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Loading class: " + name);
+ }
+
+ Class> c = findLoadedClass(name);
+ ClassNotFoundException ex = null;
+
+ if (c == null && !isSystemClass(name, systemClasses)) {
+ // Try to load class from this classloader's URLs. Note that this is like
+ // the servlet spec, not the usual Java 2 behaviour where we ask the
+ // parent to attempt to load first.
+ try {
+ c = findClass(name);
+ if (LOG.isDebugEnabled() && c != null) {
+ LOG.debug("Loaded class: " + name + " ");
+ }
+ } catch (ClassNotFoundException e) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(e);
+ }
+ ex = e;
+ }
+ }
+
+ if (c == null) { // try parent
+ c = parent.loadClass(name);
+ if (LOG.isDebugEnabled() && c != null) {
+ LOG.debug("Loaded class from parent: " + name + " ");
+ }
+ }
+
+ if (c == null) {
+ throw ex != null ? ex : new ClassNotFoundException(name);
+ }
+
+ if (resolve) {
+ resolveClass(c);
+ }
+
+ return c;
+ }
+
+ public static boolean isSystemClass(String name, List systemClasses) {
+ if (systemClasses != null) {
+ String canonicalName = name.replace('/', '.');
+ while (canonicalName.startsWith(".")) {
+ canonicalName=canonicalName.substring(1);
+ }
+ for (String c : systemClasses) {
+ boolean result = true;
+ if (c.startsWith("-")) {
+ c = c.substring(1);
+ result = false;
+ }
+ if (c.endsWith(".") && canonicalName.startsWith(c)) {
+ return result;
+ } else if (canonicalName.equals(c)) {
+ return result;
+ }
+ }
+ }
+ return false;
+ }
+}
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
index 08b4fd15d84..75b43b63fbd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
@@ -18,23 +18,25 @@
package org.apache.hadoop.util;
-import java.lang.reflect.Array;
-import java.lang.reflect.Method;
-import java.lang.reflect.InvocationTargetException;
-import java.net.URL;
-import java.net.URLClassLoader;
+import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.io.File;
-import java.util.regex.Pattern;
-import java.util.Arrays;
+import java.lang.reflect.Array;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Enumeration;
-import java.util.jar.JarFile;
+import java.util.List;
import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
import java.util.jar.Manifest;
+import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -55,6 +57,21 @@ public class RunJar {
*/
public static final int SHUTDOWN_HOOK_PRIORITY = 10;
+ /**
+ * Environment key for using the client classloader.
+ */
+ public static final String HADOOP_USE_CLIENT_CLASSLOADER =
+ "HADOOP_USE_CLIENT_CLASSLOADER";
+ /**
+ * Environment key for the (user-provided) hadoop classpath.
+ */
+ public static final String HADOOP_CLASSPATH = "HADOOP_CLASSPATH";
+ /**
+ * Environment key for the system classes.
+ */
+ public static final String HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES =
+ "HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES";
+
/**
* Unpack a jar file into a directory.
*
@@ -116,6 +133,10 @@ public class RunJar {
/** Run a Hadoop job jar. If the main class is not in the jar's manifest,
* then it must be provided on the command line. */
public static void main(String[] args) throws Throwable {
+ new RunJar().run(args);
+ }
+
+ public void run(String[] args) throws Throwable {
String usage = "RunJar jarFile [mainClass] args...";
if (args.length < 1) {
@@ -187,19 +208,7 @@ public class RunJar {
unJar(file, workDir);
- ArrayList classPath = new ArrayList();
- classPath.add(new File(workDir+"/").toURI().toURL());
- classPath.add(file.toURI().toURL());
- classPath.add(new File(workDir, "classes/").toURI().toURL());
- File[] libs = new File(workDir, "lib").listFiles();
- if (libs != null) {
- for (int i = 0; i < libs.length; i++) {
- classPath.add(libs[i].toURI().toURL());
- }
- }
-
- ClassLoader loader =
- new URLClassLoader(classPath.toArray(new URL[0]));
+ ClassLoader loader = createClassLoader(file, workDir);
Thread.currentThread().setContextClassLoader(loader);
Class> mainClass = Class.forName(mainClassName, true, loader);
@@ -214,5 +223,65 @@ public class RunJar {
throw e.getTargetException();
}
}
-
+
+ /**
+ * Creates a classloader based on the environment that was specified by the
+ * user. If HADOOP_USE_CLIENT_CLASSLOADER is specified, it creates an
+ * application classloader that provides the isolation of the user class space
+ * from the hadoop classes and their dependencies. It forms a class space for
+ * the user jar as well as the HADOOP_CLASSPATH. Otherwise, it creates a
+ * classloader that simply adds the user jar to the classpath.
+ */
+ private ClassLoader createClassLoader(File file, final File workDir)
+ throws MalformedURLException {
+ ClassLoader loader;
+ // see if the client classloader is enabled
+ if (useClientClassLoader()) {
+ StringBuilder sb = new StringBuilder();
+ sb.append(workDir+"/").
+ append(File.pathSeparator).append(file).
+ append(File.pathSeparator).append(workDir+"/classes/").
+ append(File.pathSeparator).append(workDir+"/lib/*");
+ // HADOOP_CLASSPATH is added to the client classpath
+ String hadoopClasspath = getHadoopClasspath();
+ if (hadoopClasspath != null && !hadoopClasspath.isEmpty()) {
+ sb.append(File.pathSeparator).append(hadoopClasspath);
+ }
+ String clientClasspath = sb.toString();
+ // get the system classes
+ String systemClasses = getSystemClasses();
+ List systemClassesList = systemClasses == null ?
+ null :
+ Arrays.asList(StringUtils.getTrimmedStrings(systemClasses));
+ // create an application classloader that isolates the user classes
+ loader = new ApplicationClassLoader(clientClasspath,
+ getClass().getClassLoader(), systemClassesList);
+ } else {
+ List classPath = new ArrayList();
+ classPath.add(new File(workDir+"/").toURI().toURL());
+ classPath.add(file.toURI().toURL());
+ classPath.add(new File(workDir, "classes/").toURI().toURL());
+ File[] libs = new File(workDir, "lib").listFiles();
+ if (libs != null) {
+ for (int i = 0; i < libs.length; i++) {
+ classPath.add(libs[i].toURI().toURL());
+ }
+ }
+ // create a normal parent-delegating classloader
+ loader = new URLClassLoader(classPath.toArray(new URL[0]));
+ }
+ return loader;
+ }
+
+ boolean useClientClassLoader() {
+ return Boolean.parseBoolean(System.getenv(HADOOP_USE_CLIENT_CLASSLOADER));
+ }
+
+ String getHadoopClasspath() {
+ return System.getenv(HADOOP_CLASSPATH);
+ }
+
+ String getSystemClasses() {
+ return System.getenv(HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES);
+ }
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheck.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheck.java
new file mode 100644
index 00000000000..aa2cc0eee41
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheck.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+public class ClassLoaderCheck {
+ /**
+ * Verifies the class is loaded by the right classloader.
+ */
+ public static void checkClassLoader(Class cls,
+ boolean shouldBeLoadedByAppClassLoader) {
+ boolean loadedByAppClassLoader =
+ cls.getClassLoader() instanceof ApplicationClassLoader;
+ if ((shouldBeLoadedByAppClassLoader && !loadedByAppClassLoader) ||
+ (!shouldBeLoadedByAppClassLoader && loadedByAppClassLoader)) {
+ throw new RuntimeException("incorrect classloader used");
+ }
+ }
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckMain.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckMain.java
new file mode 100644
index 00000000000..bb14ac9594f
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckMain.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+/**
+ * Test class used by {@link TestRunJar} to verify that it is loaded by the
+ * {@link ApplicationClassLoader}.
+ */
+public class ClassLoaderCheckMain {
+ public static void main(String[] args) {
+ // ClassLoaderCheckMain should be loaded by the application classloader
+ ClassLoaderCheck.checkClassLoader(ClassLoaderCheckMain.class, true);
+ // ClassLoaderCheckSecond should NOT be loaded by the application
+ // classloader
+ ClassLoaderCheck.checkClassLoader(ClassLoaderCheckSecond.class, false);
+ // ClassLoaderCheckThird should be loaded by the application classloader
+ ClassLoaderCheck.checkClassLoader(ClassLoaderCheckThird.class, true);
+ }
+}
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckSecond.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckSecond.java
new file mode 100644
index 00000000000..45601bd07dc
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckSecond.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+/**
+ * A class {@link ClassLoaderCheckMain} depends on that should be loaded by the
+ * system classloader.
+ */
+public class ClassLoaderCheckSecond {}
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckThird.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckThird.java
new file mode 100644
index 00000000000..dd4c0c4a1fa
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckThird.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+/**
+ * A class {@link ClassLoaderCheckMain} depends on that should be loaded by the
+ * application classloader.
+ */
+public class ClassLoaderCheckThird {}
\ No newline at end of file
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestApplicationClassLoader.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java
similarity index 95%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestApplicationClassLoader.java
rename to hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java
index bb4b28c616d..5d0e131bd6c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestApplicationClassLoader.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java
@@ -16,18 +16,15 @@
* limitations under the License.
*/
-package org.apache.hadoop.yarn.util;
+package org.apache.hadoop.util;
+import static org.apache.hadoop.util.ApplicationClassLoader.constructUrlsFromClasspath;
+import static org.apache.hadoop.util.ApplicationClassLoader.isSystemClass;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
-import static org.apache.hadoop.yarn.util.ApplicationClassLoader.constructUrlsFromClasspath;
-import static org.apache.hadoop.yarn.util.ApplicationClassLoader.isSystemClass;
-
-import com.google.common.base.Splitter;
-import com.google.common.collect.Lists;
import java.io.File;
import java.io.FileOutputStream;
@@ -43,6 +40,9 @@ import org.apache.hadoop.fs.FileUtil;
import org.junit.Before;
import org.junit.Test;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Lists;
+
public class TestApplicationClassLoader {
private static File testDir = new File(System.getProperty("test.build.data",
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
index 8903fca52f8..9e279689a49 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
@@ -17,23 +17,30 @@
*/
package org.apache.hadoop.util;
-import junit.framework.TestCase;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.InputStream;
import java.util.jar.JarOutputStream;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.FileUtil;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
-import org.apache.hadoop.fs.FileUtil;
public class TestRunJar extends TestCase {
private File TEST_ROOT_DIR;
private static final String TEST_JAR_NAME="test-runjar.jar";
+ private static final String TEST_JAR_2_NAME = "test-runjar2.jar";
@Override
@Before
@@ -107,4 +114,59 @@ public class TestRunJar extends TestCase {
new File(unjarDir, "foobaz.txt").exists());
}
+
+ /**
+ * Tests the client classloader to verify the main class and its dependent
+ * class are loaded correctly by the application classloader, and others are
+ * loaded by the system classloader.
+ */
+ @Test
+ public void testClientClassLoader() throws Throwable {
+ RunJar runJar = spy(new RunJar());
+ // enable the client classloader
+ when(runJar.useClientClassLoader()).thenReturn(true);
+ // set the system classes and blacklist the test main class and the test
+ // third class so they can be loaded by the application classloader
+ String mainCls = ClassLoaderCheckMain.class.getName();
+ String thirdCls = ClassLoaderCheckThird.class.getName();
+ String systemClasses = "-" + mainCls + "," +
+ "-" + thirdCls + "," +
+ ApplicationClassLoader.DEFAULT_SYSTEM_CLASSES;
+ when(runJar.getSystemClasses()).thenReturn(systemClasses);
+
+ // create the test jar
+ File testJar = makeClassLoaderTestJar(mainCls, thirdCls);
+ // form the args
+ String[] args = new String[3];
+ args[0] = testJar.getAbsolutePath();
+ args[1] = mainCls;
+
+ // run RunJar
+ runJar.run(args);
+ // it should not throw an exception
+ }
+
+ private File makeClassLoaderTestJar(String... clsNames) throws IOException {
+ File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_2_NAME);
+ JarOutputStream jstream =
+ new JarOutputStream(new FileOutputStream(jarFile));
+ for (String clsName: clsNames) {
+ String name = clsName.replace('.', '/') + ".class";
+ InputStream entryInputStream = this.getClass().getResourceAsStream(
+ "/" + name);
+ ZipEntry entry = new ZipEntry(name);
+ jstream.putNextEntry(entry);
+ BufferedInputStream bufInputStream = new BufferedInputStream(
+ entryInputStream, 2048);
+ int count;
+ byte[] data = new byte[2048];
+ while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
+ jstream.write(data, 0, count);
+ }
+ jstream.closeEntry();
+ }
+ jstream.close();
+
+ return jarFile;
+ }
}
\ No newline at end of file
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
index 301cdd50be2..3e522347b2a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
@@ -34,6 +34,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.annotations.VisibleForTesting;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
@@ -56,6 +57,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.util.ApplicationClassLoader;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringInterner;
import org.apache.hadoop.util.StringUtils;
@@ -67,7 +69,6 @@ import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
-import org.apache.hadoop.yarn.util.ApplicationClassLoader;
import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.RollingFileAppender;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
index 36a6a2a9c6f..3b20bf1ee37 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
@@ -50,6 +50,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.util.ApplicationClassLoader;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.util.StringUtils;
@@ -59,7 +60,6 @@ import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
-import org.apache.hadoop.yarn.util.ApplicationClassLoader;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -510,7 +510,8 @@ public class TestMRApps {
@Test
public void testSystemClasses() {
final List systemClasses =
- Arrays.asList(MRApps.getSystemClasses(new Configuration()));
+ Arrays.asList(StringUtils.getTrimmedStrings(
+ ApplicationClassLoader.DEFAULT_SYSTEM_CLASSES));
for (String defaultXml : DEFAULT_XMLS) {
assertTrue(defaultXml + " must be system resource",
ApplicationClassLoader.isSystemClass(defaultXml, systemClasses));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
index 6ab8dce3a35..73bf196f57f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
@@ -1660,13 +1660,13 @@
mapreduce.job.classloader.system.classes
- java.,javax.,org.w3c.dom.,org.xml.sax.,org.apache.commons.logging.,
- org.apache.log4j.,org.apache.hadoop.,core-default.xml,
- hdfs-default.xml,mapred-default.xml,yarn-default.xml
- A comma-separated list of classes that should be loaded from the
- system classpath, not the user-supplied JARs, when mapreduce.job.classloader
- is enabled. Names ending in '.' (period) are treated as package names,
- and names starting with a '-' are treated as negative matches.
+
+ Used to override the default definition of the system classes for
+ the job classloader. The system classes are a comma-separated list of
+ classes that should be loaded from the system classpath, not the
+ user-supplied JARs, when mapreduce.job.classloader is enabled. Names ending
+ in '.' (period) are treated as package names, and names starting with a '-'
+ are treated as negative matches.
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
index 6b47554e8eb..32153996c8d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
@@ -84,13 +84,13 @@ import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.ApplicationClassLoader;
import org.apache.hadoop.util.JarFinder;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
-import org.apache.hadoop.yarn.util.ApplicationClassLoader;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.Level;
import org.junit.AfterClass;
@@ -242,8 +242,7 @@ public class TestMRJobs {
// to test AM loading user classes such as output format class, we want
// to blacklist them from the system classes (they need to be prepended
// as the first match wins)
- String systemClasses =
- sleepConf.get(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER_SYSTEM_CLASSES);
+ String systemClasses = ApplicationClassLoader.DEFAULT_SYSTEM_CLASSES;
// exclude the custom classes from system classes
systemClasses = "-" + CustomOutputFormat.class.getName() + ",-" +
CustomSpeculator.class.getName() + "," +
diff --git a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
index 6609a260130..b1dfb1ec5ed 100644
--- a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
+++ b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
@@ -344,4 +344,11 @@
+
+
+
+
+
+
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ApplicationClassLoader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ApplicationClassLoader.java
index 63dc5b798c1..ee9ad4c8ddd 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ApplicationClassLoader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ApplicationClassLoader.java
@@ -18,180 +18,30 @@
package org.apache.hadoop.yarn.util;
-import java.io.File;
-import java.io.FilenameFilter;
import java.net.MalformedURLException;
import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Splitter;
-
/**
- * A {@link URLClassLoader} for YARN application isolation. Classes from
- * the application JARs are loaded in preference to the parent loader.
+ * This type has been deprecated in favor of
+ * {@link org.apache.hadoop.util.ApplicationClassLoader}. All new uses of
+ * ApplicationClassLoader should use that type instead.
*/
@Public
@Unstable
-public class ApplicationClassLoader extends URLClassLoader {
-
- private static final Log LOG =
- LogFactory.getLog(ApplicationClassLoader.class.getName());
-
- private static final FilenameFilter JAR_FILENAME_FILTER =
- new FilenameFilter() {
- @Override
- public boolean accept(File dir, String name) {
- return name.endsWith(".jar") || name.endsWith(".JAR");
- }
- };
-
- private ClassLoader parent;
- private List systemClasses;
-
+@Deprecated
+public class ApplicationClassLoader extends
+ org.apache.hadoop.util.ApplicationClassLoader {
public ApplicationClassLoader(URL[] urls, ClassLoader parent,
List systemClasses) {
- super(urls, parent);
- this.parent = parent;
- if (parent == null) {
- throw new IllegalArgumentException("No parent classloader!");
- }
- this.systemClasses = systemClasses;
+ super(urls, parent, systemClasses);
}
-
+
public ApplicationClassLoader(String classpath, ClassLoader parent,
List systemClasses) throws MalformedURLException {
- this(constructUrlsFromClasspath(classpath), parent, systemClasses);
+ super(classpath, parent, systemClasses);
}
-
- @VisibleForTesting
- static URL[] constructUrlsFromClasspath(String classpath)
- throws MalformedURLException {
- List urls = new ArrayList();
- for (String element : Splitter.on(File.pathSeparator).split(classpath)) {
- if (element.endsWith("/*")) {
- String dir = element.substring(0, element.length() - 1);
- File[] files = new File(dir).listFiles(JAR_FILENAME_FILTER);
- if (files != null) {
- for (File file : files) {
- urls.add(file.toURI().toURL());
- }
- }
- } else {
- File file = new File(element);
- if (file.exists()) {
- urls.add(new File(element).toURI().toURL());
- }
- }
- }
- return urls.toArray(new URL[urls.size()]);
- }
-
- @Override
- public URL getResource(String name) {
- URL url = null;
-
- if (!isSystemClass(name, systemClasses)) {
- url= findResource(name);
- if (url == null && name.startsWith("/")) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Remove leading / off " + name);
- }
- url= findResource(name.substring(1));
- }
- }
-
- if (url == null) {
- url= parent.getResource(name);
- }
-
- if (url != null) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("getResource("+name+")=" + url);
- }
- }
-
- return url;
- }
-
- @Override
- public Class> loadClass(String name) throws ClassNotFoundException {
- return this.loadClass(name, false);
- }
-
- @Override
- protected synchronized Class> loadClass(String name, boolean resolve)
- throws ClassNotFoundException {
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Loading class: " + name);
- }
-
- Class> c = findLoadedClass(name);
- ClassNotFoundException ex = null;
-
- if (c == null && !isSystemClass(name, systemClasses)) {
- // Try to load class from this classloader's URLs. Note that this is like
- // the servlet spec, not the usual Java 2 behaviour where we ask the
- // parent to attempt to load first.
- try {
- c = findClass(name);
- if (LOG.isDebugEnabled() && c != null) {
- LOG.debug("Loaded class: " + name + " ");
- }
- } catch (ClassNotFoundException e) {
- if (LOG.isDebugEnabled()) {
- LOG.debug(e);
- }
- ex = e;
- }
- }
-
- if (c == null) { // try parent
- c = parent.loadClass(name);
- if (LOG.isDebugEnabled() && c != null) {
- LOG.debug("Loaded class from parent: " + name + " ");
- }
- }
-
- if (c == null) {
- throw ex != null ? ex : new ClassNotFoundException(name);
- }
-
- if (resolve) {
- resolveClass(c);
- }
-
- return c;
- }
-
- @VisibleForTesting
- public static boolean isSystemClass(String name, List systemClasses) {
- if (systemClasses != null) {
- String canonicalName = name.replace('/', '.');
- while (canonicalName.startsWith(".")) {
- canonicalName=canonicalName.substring(1);
- }
- for (String c : systemClasses) {
- boolean result = true;
- if (c.startsWith("-")) {
- c = c.substring(1);
- result = false;
- }
- if (c.endsWith(".") && canonicalName.startsWith(c)) {
- return result;
- } else if (canonicalName.equals(c)) {
- return result;
- }
- }
- }
- return false;
- }
-}
\ No newline at end of file
+}