HBASE-8140 TableMapReduceUtils#addDependencyJar fails when nested inside another MR job

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1458531 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-03-19 21:54:00 +00:00
parent 500cdf06fe
commit fd4c887cd0
3 changed files with 315 additions and 49 deletions

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.hadoopbackport.JarFinder;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.security.User;
@ -590,7 +591,7 @@ public class TableMapReduceUtil {
* @return a jar file that contains the class, or null.
* @throws IOException
*/
private static String findOrCreateJar(Class my_class)
private static String findOrCreateJar(Class<?> my_class)
throws IOException {
try {
Class<?> jarFinder = Class.forName("org.apache.hadoop.util.JarFinder");
@ -598,8 +599,7 @@ public class TableMapReduceUtil {
// if it doesn't exist. Note that this is needed to run the mapreduce
// unit tests post-0.23, because mapreduce v2 requires the relevant jars
// to be in the mr cluster to do output, split, etc. At unit test time,
// the hbase jars do not exist, so we need to create some. Note that we
// can safely fall back to findContainingJars for pre-0.23 mapreduce.
// the hbase jars do not exist, so we need to create some.
Method m = jarFinder.getMethod("getJar", Class.class);
return (String)m.invoke(null,my_class);
} catch (InvocationTargetException ite) {
@ -607,52 +607,19 @@ public class TableMapReduceUtil {
throw new IOException(ite.getCause());
} catch (Exception e) {
// ignore all other exceptions. related to reflection failure
}
LOG.debug("New JarFinder: org.apache.hadoop.util.JarFinder.getJar " +
"not available. Using old findContainingJar");
return findContainingJar(my_class);
}
/**
* Find a jar that contains a class of the same name, if any.
* It will return a jar file, even if that is not the first thing
* on the class path that has a class with the same name.
*
* This is shamelessly copied from JobConf
*
* @param my_class the class to find.
* @return a jar file that contains the class, or null.
* @throws IOException
*/
private static String findContainingJar(Class my_class) {
ClassLoader loader = my_class.getClassLoader();
String class_file = my_class.getName().replaceAll("\\.", "/") + ".class";
try {
for(Enumeration itr = loader.getResources(class_file);
itr.hasMoreElements();) {
URL url = (URL) itr.nextElement();
if ("jar".equals(url.getProtocol())) {
String toReturn = url.getPath();
if (toReturn.startsWith("file:")) {
toReturn = toReturn.substring("file:".length());
}
// URLDecoder is a misnamed class, since it actually decodes
// x-www-form-urlencoded MIME type rather than actual
// URL encoding (which the file path has). Therefore it would
// decode +s to ' 's which is incorrect (spaces are actually
// either unencoded or encoded as "%20"). Replace +s first, so
// that they are kept sacred during the decoding process.
toReturn = toReturn.replaceAll("\\+", "%2B");
toReturn = URLDecoder.decode(toReturn, "UTF-8");
return toReturn.replaceAll("!.*$", "");
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return null;
LOG.debug("New JarFinder: org.apache.hadoop.util.JarFinder.getJar " +
"not available. Falling back to backported JarFinder");
// Use JarFinder because it will construct a jar from class files when
// one does not exist. This is relevant for cases when an HBase MR job
// is created in the context of another MR job (particularly common for
// tools consuming the bulk import APIs). In that case, the dependency
// jars have already been shipped to and expanded in the job's working
// directory, so it has no jars to package. We could just construct a
// classpath from those class files, but we don't know the context: are
// they on the local filesystem, are they are ephemeral tmp files, &c.
// Better to package them up and ship them via the normal means.
return JarFinder.getJar(my_class);
}
}

View File

@ -0,0 +1,170 @@
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.hbase.mapreduce.hadoopbackport;
import com.google.common.base.Preconditions;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLDecoder;
import java.text.MessageFormat;
import java.util.Enumeration;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
* Finds the Jar for a class. If the class is in a directory in the
* classpath, it creates a Jar on the fly with the contents of the directory
* and returns the path to that Jar. If a Jar is created, it is created in
* the system temporary directory.
*
* This file was forked from hadoop/common/branches/branch-2@1377176.
*/
public class JarFinder {
private static void copyToZipStream(InputStream is, ZipEntry entry,
ZipOutputStream zos) throws IOException {
zos.putNextEntry(entry);
byte[] arr = new byte[4096];
int read = is.read(arr);
while (read > -1) {
zos.write(arr, 0, read);
read = is.read(arr);
}
is.close();
zos.closeEntry();
}
public static void jarDir(File dir, String relativePath, ZipOutputStream zos)
throws IOException {
Preconditions.checkNotNull(relativePath, "relativePath");
Preconditions.checkNotNull(zos, "zos");
// by JAR spec, if there is a manifest, it must be the first entry in the
// ZIP.
File manifestFile = new File(dir, JarFile.MANIFEST_NAME);
ZipEntry manifestEntry = new ZipEntry(JarFile.MANIFEST_NAME);
if (!manifestFile.exists()) {
zos.putNextEntry(manifestEntry);
new Manifest().write(new BufferedOutputStream(zos));
zos.closeEntry();
} else {
InputStream is = new FileInputStream(manifestFile);
copyToZipStream(is, manifestEntry, zos);
}
zos.closeEntry();
zipDir(dir, relativePath, zos, true);
zos.close();
}
private static void zipDir(File dir, String relativePath, ZipOutputStream zos,
boolean start) throws IOException {
String[] dirList = dir.list();
for (String aDirList : dirList) {
File f = new File(dir, aDirList);
if (!f.isHidden()) {
if (f.isDirectory()) {
if (!start) {
ZipEntry dirEntry = new ZipEntry(relativePath + f.getName() + "/");
zos.putNextEntry(dirEntry);
zos.closeEntry();
}
String filePath = f.getPath();
File file = new File(filePath);
zipDir(file, relativePath + f.getName() + "/", zos, false);
}
else {
String path = relativePath + f.getName();
if (!path.equals(JarFile.MANIFEST_NAME)) {
ZipEntry anEntry = new ZipEntry(path);
InputStream is = new FileInputStream(f);
copyToZipStream(is, anEntry, zos);
}
}
}
}
}
private static void createJar(File dir, File jarFile) throws IOException {
Preconditions.checkNotNull(dir, "dir");
Preconditions.checkNotNull(jarFile, "jarFile");
File jarDir = jarFile.getParentFile();
if (!jarDir.exists()) {
if (!jarDir.mkdirs()) {
throw new IOException(MessageFormat.format("could not create dir [{0}]",
jarDir));
}
}
JarOutputStream zos = new JarOutputStream(new FileOutputStream(jarFile));
jarDir(dir, "", zos);
}
/**
* Returns the full path to the Jar containing the class. It always return a
* JAR.
*
* @param klass class.
*
* @return path to the Jar containing the class.
*/
public static String getJar(Class klass) {
Preconditions.checkNotNull(klass, "klass");
ClassLoader loader = klass.getClassLoader();
if (loader != null) {
String class_file = klass.getName().replaceAll("\\.", "/") + ".class";
try {
for (Enumeration itr = loader.getResources(class_file);
itr.hasMoreElements(); ) {
URL url = (URL) itr.nextElement();
String path = url.getPath();
if (path.startsWith("file:")) {
path = path.substring("file:".length());
}
path = URLDecoder.decode(path, "UTF-8");
if ("jar".equals(url.getProtocol())) {
path = URLDecoder.decode(path, "UTF-8");
return path.replaceAll("!.*$", "");
}
else if ("file".equals(url.getProtocol())) {
String klassName = klass.getName();
klassName = klassName.replace(".", "/") + ".class";
path = path.substring(0, path.length() - klassName.length());
File baseDir = new File(path);
File testDir = new File(System.getProperty("test.build.dir", "target/test-dir"));
testDir = testDir.getAbsoluteFile();
if (!testDir.exists()) {
testDir.mkdirs();
}
File tempJar = File.createTempFile("hadoop-", "", testDir);
tempJar = new File(tempJar.getAbsolutePath() + ".jar");
createJar(baseDir, tempJar);
return tempJar.getAbsolutePath();
}
}
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
return null;
}
}

View File

@ -0,0 +1,129 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce.hadoopbackport;
import org.apache.commons.logging.LogFactory;
import org.junit.Assert;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.text.MessageFormat;
import java.util.Properties;
import java.util.jar.JarInputStream;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
/**
* This file was forked from hadoop/common/branches/branch-2@1350012.
*/
public class TestJarFinder {
@Test
public void testJar() throws Exception {
//picking a class that is for sure in a JAR in the classpath
String jar = JarFinder.getJar(LogFactory.class);
Assert.assertTrue(new File(jar).exists());
}
private static void delete(File file) throws IOException {
if (file.getAbsolutePath().length() < 5) {
throw new IllegalArgumentException(
MessageFormat.format("Path [{0}] is too short, not deleting",
file.getAbsolutePath()));
}
if (file.exists()) {
if (file.isDirectory()) {
File[] children = file.listFiles();
if (children != null) {
for (File child : children) {
delete(child);
}
}
}
if (!file.delete()) {
throw new RuntimeException(
MessageFormat.format("Could not delete path [{0}]",
file.getAbsolutePath()));
}
}
}
@Test
public void testExpandedClasspath() throws Exception {
//picking a class that is for sure in a directory in the classpath
//in this case the JAR is created on the fly
String jar = JarFinder.getJar(TestJarFinder.class);
Assert.assertTrue(new File(jar).exists());
}
@Test
public void testExistingManifest() throws Exception {
File dir = new File(System.getProperty("test.build.dir", "target/test-dir"),
TestJarFinder.class.getName() + "-testExistingManifest");
delete(dir);
dir.mkdirs();
File metaInfDir = new File(dir, "META-INF");
metaInfDir.mkdirs();
File manifestFile = new File(metaInfDir, "MANIFEST.MF");
Manifest manifest = new Manifest();
OutputStream os = new FileOutputStream(manifestFile);
manifest.write(os);
os.close();
File propsFile = new File(dir, "props.properties");
Writer writer = new FileWriter(propsFile);
new Properties().store(writer, "");
writer.close();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
JarOutputStream zos = new JarOutputStream(baos);
JarFinder.jarDir(dir, "", zos);
JarInputStream jis =
new JarInputStream(new ByteArrayInputStream(baos.toByteArray()));
Assert.assertNotNull(jis.getManifest());
jis.close();
}
@Test
public void testNoManifest() throws Exception {
File dir = new File(System.getProperty("test.build.dir", "target/test-dir"),
TestJarFinder.class.getName() + "-testNoManifest");
delete(dir);
dir.mkdirs();
File propsFile = new File(dir, "props.properties");
Writer writer = new FileWriter(propsFile);
new Properties().store(writer, "");
writer.close();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
JarOutputStream zos = new JarOutputStream(baos);
JarFinder.jarDir(dir, "", zos);
JarInputStream jis =
new JarInputStream(new ByteArrayInputStream(baos.toByteArray()));
Assert.assertNotNull(jis.getManifest());
jis.close();
}
}