diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 25c35f15835..e5931e43861 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -63,6 +63,9 @@ Release 2.0.4-beta - UNRELEASED HADOOP-9230. TestUniformSizeInputFormat fails intermittently. (kkambatl via tucu) + HADOOP-9349. Confusing output when running hadoop version from one hadoop + installation when HADOOP_HOME points to another. (sandyr via tucu) + Release 2.0.3-alpha - 2013-02-06 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java new file mode 100644 index 00000000000..53a5de17325 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.util; + +import java.io.IOException; +import java.net.URL; +import java.net.URLDecoder; +import java.util.Enumeration; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class ClassUtil { + /** + * Find a jar that contains a class of the same name, if any. + * It will return a jar file, even if that is not the first thing + * on the class path that has a class with the same name. + * + * @param clazz the class to find. + * @return a jar file that contains the class, or null. + * @throws IOException + */ + public static String findContainingJar(Class clazz) { + ClassLoader loader = clazz.getClassLoader(); + String classFile = clazz.getName().replaceAll("\\.", "/") + ".class"; + try { + for (Enumeration itr = loader.getResources(classFile); + itr.hasMoreElements();) { + URL url = (URL) itr.nextElement(); + if ("jar".equals(url.getProtocol())) { + String toReturn = url.getPath(); + if (toReturn.startsWith("file:")) { + toReturn = toReturn.substring("file:".length()); + } + toReturn = URLDecoder.decode(toReturn, "UTF-8"); + return toReturn.replaceAll("!.*$", ""); + } + } + } catch (IOException e) { + throw new RuntimeException(e); + } + return null; + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java index f2415590b0d..5d7614f1ebf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java @@ -18,6 +18,11 @@ package org.apache.hadoop.util; +import java.io.IOException; +import java.net.URL; +import java.net.URLDecoder; +import java.util.Enumeration; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -155,5 +160,7 @@ public class VersionInfo { System.out.println("Subversion " + getUrl() + " -r " + getRevision()); System.out.println("Compiled by " + getUser() + " on " + getDate()); System.out.println("From source with checksum " + getSrcChecksum()); + System.out.println("This command was run using " + + ClassUtil.findContainingJar(VersionInfo.class)); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClassUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClassUtil.java new file mode 100644 index 00000000000..fe1284fd585 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClassUtil.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.util; + +import java.io.File; + +import junit.framework.Assert; + +import org.apache.log4j.Logger; +import org.junit.Test; + +public class TestClassUtil { + @Test(timeout=1000) + public void testFindContainingJar() { + String containingJar = ClassUtil.findContainingJar(Logger.class); + Assert.assertNotNull("Containing jar not found for Logger", + containingJar); + File jarFile = new File(containingJar); + Assert.assertTrue("Containing jar does not exist on file system", + jarFile.exists()); + Assert.assertTrue("Incorrect jar file" + containingJar, + jarFile.getName().matches("log4j.+[.]jar")); + } +} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java index e76f62856da..a6ae5e2da42 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java @@ -50,6 +50,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.filecache.DistributedCache; import org.apache.hadoop.mapreduce.util.ConfigUtil; import org.apache.hadoop.security.Credentials; +import org.apache.hadoop.util.ClassUtil; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Tool; import org.apache.log4j.Level; @@ -453,7 +454,7 @@ public class JobConf extends Configuration { * @param cls the example class. */ public void setJarByClass(Class cls) { - String jar = findContainingJar(cls); + String jar = ClassUtil.findContainingJar(cls); if (jar != null) { setJar(jar); } @@ -1811,7 +1812,7 @@ public class JobConf extends Configuration { return (int)(Math.ceil((float)getMemoryForReduceTask() / (float)slotSizePerReduce)); } - + /** * Find a jar that contains a class of the same name, if any. * It will return a jar file, even if that is not the first thing @@ -1822,35 +1823,9 @@ public class JobConf extends Configuration { * @throws IOException */ public static String findContainingJar(Class my_class) { - ClassLoader loader = my_class.getClassLoader(); - String class_file = my_class.getName().replaceAll("\\.", "/") + ".class"; - try { - for(Enumeration itr = loader.getResources(class_file); - itr.hasMoreElements();) { - URL url = (URL) itr.nextElement(); - if ("jar".equals(url.getProtocol())) { - String toReturn = url.getPath(); - if (toReturn.startsWith("file:")) { - toReturn = toReturn.substring("file:".length()); - } - // URLDecoder is a misnamed class, since it actually decodes - // x-www-form-urlencoded MIME type rather than actual - // URL encoding (which the file path has). Therefore it would - // decode +s to ' 's which is incorrect (spaces are actually - // either unencoded or encoded as "%20"). Replace +s first, so - // that they are kept sacred during the decoding process. - toReturn = toReturn.replaceAll("\\+", "%2B"); - toReturn = URLDecoder.decode(toReturn, "UTF-8"); - return toReturn.replaceAll("!.*$", ""); - } - } - } catch (IOException e) { - throw new RuntimeException(e); - } - return null; + return ClassUtil.findContainingJar(my_class); } - /** * Get the memory required to run a task of this job, in bytes. See * {@link #MAPRED_TASK_MAXVMEM_PROPERTY} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobConf.java index 3bd2c7866c5..56eb752e1db 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobConf.java @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.util.ClassUtil; import static org.junit.Assert.*; @@ -79,7 +80,7 @@ public class TestJobConf { Class clazz = Class.forName(CLASSNAME, true, cl); assertNotNull(clazz); - String containingJar = JobConf.findContainingJar(clazz); + String containingJar = ClassUtil.findContainingJar(clazz); assertEquals(jar.getAbsolutePath(), containingJar); } }