HADOOP-9349. Confusing output when running hadoop version from one hadoop installation when HADOOP_HOME points to another. (sandyr via tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1451448 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2013-03-01 01:20:53 +00:00
parent 62d70af19a
commit 892e088773
6 changed files with 116 additions and 30 deletions

View File

@ -400,6 +400,9 @@ Release 2.0.4-beta - UNRELEASED
HADOOP-9230. TestUniformSizeInputFormat fails intermittently.
(kkambatl via tucu)
HADOOP-9349. Confusing output when running hadoop version from one hadoop
installation when HADOOP_HOME points to another. (sandyr via tucu)
Release 2.0.3-alpha - 2013-02-06
INCOMPATIBLE CHANGES

View File

@ -0,0 +1,60 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import java.io.IOException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.Enumeration;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public class ClassUtil {
/**
* Find a jar that contains a class of the same name, if any.
* It will return a jar file, even if that is not the first thing
* on the class path that has a class with the same name.
*
* @param clazz the class to find.
* @return a jar file that contains the class, or null.
* @throws IOException
*/
public static String findContainingJar(Class clazz) {
ClassLoader loader = clazz.getClassLoader();
String classFile = clazz.getName().replaceAll("\\.", "/") + ".class";
try {
for (Enumeration itr = loader.getResources(classFile);
itr.hasMoreElements();) {
URL url = (URL) itr.nextElement();
if ("jar".equals(url.getProtocol())) {
String toReturn = url.getPath();
if (toReturn.startsWith("file:")) {
toReturn = toReturn.substring("file:".length());
}
toReturn = URLDecoder.decode(toReturn, "UTF-8");
return toReturn.replaceAll("!.*$", "");
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return null;
}
}

View File

@ -18,6 +18,11 @@
package org.apache.hadoop.util;
import java.io.IOException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.Enumeration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@ -155,5 +160,7 @@ public class VersionInfo {
System.out.println("Subversion " + getUrl() + " -r " + getRevision());
System.out.println("Compiled by " + getUser() + " on " + getDate());
System.out.println("From source with checksum " + getSrcChecksum());
System.out.println("This command was run using " +
ClassUtil.findContainingJar(VersionInfo.class));
}
}

View File

@ -0,0 +1,40 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import java.io.File;
import junit.framework.Assert;
import org.apache.log4j.Logger;
import org.junit.Test;
public class TestClassUtil {
@Test(timeout=1000)
public void testFindContainingJar() {
String containingJar = ClassUtil.findContainingJar(Logger.class);
Assert.assertNotNull("Containing jar not found for Logger",
containingJar);
File jarFile = new File(containingJar);
Assert.assertTrue("Containing jar does not exist on file system",
jarFile.exists());
Assert.assertTrue("Incorrect jar file" + containingJar,
jarFile.getName().matches("log4j.+[.]jar"));
}
}

View File

@ -50,6 +50,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
import org.apache.hadoop.mapreduce.util.ConfigUtil;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.util.ClassUtil;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Tool;
import org.apache.log4j.Level;
@ -453,7 +454,7 @@ public class JobConf extends Configuration {
* @param cls the example class.
*/
public void setJarByClass(Class cls) {
String jar = findContainingJar(cls);
String jar = ClassUtil.findContainingJar(cls);
if (jar != null) {
setJar(jar);
}
@ -1811,7 +1812,7 @@ public class JobConf extends Configuration {
return
(int)(Math.ceil((float)getMemoryForReduceTask() / (float)slotSizePerReduce));
}
/**
* Find a jar that contains a class of the same name, if any.
* It will return a jar file, even if that is not the first thing
@ -1822,35 +1823,9 @@ public class JobConf extends Configuration {
* @throws IOException
*/
public static String findContainingJar(Class my_class) {
ClassLoader loader = my_class.getClassLoader();
String class_file = my_class.getName().replaceAll("\\.", "/") + ".class";
try {
for(Enumeration itr = loader.getResources(class_file);
itr.hasMoreElements();) {
URL url = (URL) itr.nextElement();
if ("jar".equals(url.getProtocol())) {
String toReturn = url.getPath();
if (toReturn.startsWith("file:")) {
toReturn = toReturn.substring("file:".length());
}
// URLDecoder is a misnamed class, since it actually decodes
// x-www-form-urlencoded MIME type rather than actual
// URL encoding (which the file path has). Therefore it would
// decode +s to ' 's which is incorrect (spaces are actually
// either unencoded or encoded as "%20"). Replace +s first, so
// that they are kept sacred during the decoding process.
toReturn = toReturn.replaceAll("\\+", "%2B");
toReturn = URLDecoder.decode(toReturn, "UTF-8");
return toReturn.replaceAll("!.*$", "");
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return null;
return ClassUtil.findContainingJar(my_class);
}
/**
* Get the memory required to run a task of this job, in bytes. See
* {@link #MAPRED_TASK_MAXVMEM_PROPERTY}

View File

@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.util.ClassUtil;
import static org.junit.Assert.*;
@ -79,7 +80,7 @@ public class TestJobConf {
Class clazz = Class.forName(CLASSNAME, true, cl);
assertNotNull(clazz);
String containingJar = JobConf.findContainingJar(clazz);
String containingJar = ClassUtil.findContainingJar(clazz);
assertEquals(jar.getAbsolutePath(), containingJar);
}
}