HADOOP-13068. Clean up RunJar and related test class. (Contributed by Andras Bokor)
This commit is contained in:
parent
2d01ffaf24
commit
cb0a035bd1
|
@ -23,7 +23,6 @@ import java.io.FileOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.lang.reflect.Array;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.MalformedURLException;
|
||||
|
@ -40,7 +39,6 @@ import java.util.regex.Pattern;
|
|||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -53,7 +51,7 @@ public class RunJar {
|
|||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RunJar.class);
|
||||
|
||||
/** Pattern that matches any string */
|
||||
/** Pattern that matches any string. */
|
||||
public static final Pattern MATCH_ANY = Pattern.compile(".*");
|
||||
|
||||
/**
|
||||
|
@ -76,10 +74,21 @@ public class RunJar {
|
|||
public static final String HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES =
|
||||
"HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES";
|
||||
|
||||
/**
|
||||
* Buffer size for copy the content of compressed file to new file.
|
||||
*/
|
||||
private static final int BUFFER_SIZE = 8_192;
|
||||
|
||||
/**
|
||||
* Unpack a jar file into a directory.
|
||||
*
|
||||
* This version unpacks all files inside the jar regardless of filename.
|
||||
*
|
||||
* @param jarFile the .jar file to unpack
|
||||
* @param toDir the destination directory into which to unpack the jar
|
||||
*
|
||||
* @throws IOException if an I/O error has occurred or toDir
|
||||
* cannot be created and does not already exist
|
||||
*/
|
||||
public static void unJar(File jarFile, File toDir) throws IOException {
|
||||
unJar(jarFile, toDir, MATCH_ANY);
|
||||
|
@ -92,47 +101,43 @@ public class RunJar {
|
|||
* @param jarFile the .jar file to unpack
|
||||
* @param toDir the destination directory into which to unpack the jar
|
||||
* @param unpackRegex the pattern to match jar entries against
|
||||
*
|
||||
* @throws IOException if an I/O error has occurred or toDir
|
||||
* cannot be created and does not already exist
|
||||
*/
|
||||
public static void unJar(File jarFile, File toDir, Pattern unpackRegex)
|
||||
throws IOException {
|
||||
JarFile jar = new JarFile(jarFile);
|
||||
try {
|
||||
throws IOException {
|
||||
try (JarFile jar = new JarFile(jarFile)) {
|
||||
int numOfFailedLastModifiedSet = 0;
|
||||
Enumeration<JarEntry> entries = jar.entries();
|
||||
while (entries.hasMoreElements()) {
|
||||
final JarEntry entry = entries.nextElement();
|
||||
if (!entry.isDirectory() &&
|
||||
unpackRegex.matcher(entry.getName()).matches()) {
|
||||
InputStream in = jar.getInputStream(entry);
|
||||
try {
|
||||
try (InputStream in = jar.getInputStream(entry)) {
|
||||
File file = new File(toDir, entry.getName());
|
||||
ensureDirectory(file.getParentFile());
|
||||
OutputStream out = new FileOutputStream(file);
|
||||
try {
|
||||
IOUtils.copyBytes(in, out, 8192);
|
||||
} finally {
|
||||
out.close();
|
||||
try (OutputStream out = new FileOutputStream(file)) {
|
||||
IOUtils.copyBytes(in, out, BUFFER_SIZE);
|
||||
}
|
||||
if (!file.setLastModified(entry.getTime())) {
|
||||
numOfFailedLastModifiedSet++;
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (numOfFailedLastModifiedSet > 0) {
|
||||
LOG.warn("Could not set last modfied time for {} file(s)",
|
||||
numOfFailedLastModifiedSet);
|
||||
numOfFailedLastModifiedSet);
|
||||
}
|
||||
} finally {
|
||||
jar.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the existence of a given directory.
|
||||
*
|
||||
* @param dir Directory to check
|
||||
*
|
||||
* @throws IOException if it cannot be created and does not already exist
|
||||
*/
|
||||
private static void ensureDirectory(File dir) throws IOException {
|
||||
|
@ -169,7 +174,7 @@ public class RunJar {
|
|||
JarFile jarFile;
|
||||
try {
|
||||
jarFile = new JarFile(fileName);
|
||||
} catch(IOException io) {
|
||||
} catch (IOException io) {
|
||||
throw new IOException("Error opening job jar: " + fileName)
|
||||
.initCause(io);
|
||||
}
|
||||
|
@ -193,11 +198,11 @@ public class RunJar {
|
|||
ensureDirectory(tmpDir);
|
||||
|
||||
final File workDir;
|
||||
try {
|
||||
try {
|
||||
workDir = File.createTempFile("hadoop-unjar", "", tmpDir);
|
||||
} catch (IOException ioe) {
|
||||
// If user has insufficient perms to write to tmpDir, default
|
||||
// "Permission denied" message doesn't specify a filename.
|
||||
// If user has insufficient perms to write to tmpDir, default
|
||||
// "Permission denied" message doesn't specify a filename.
|
||||
System.err.println("Error creating temp dir in java.io.tmpdir "
|
||||
+ tmpDir + " due to " + ioe.getMessage());
|
||||
System.exit(-1);
|
||||
|
@ -211,12 +216,12 @@ public class RunJar {
|
|||
ensureDirectory(workDir);
|
||||
|
||||
ShutdownHookManager.get().addShutdownHook(
|
||||
new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
FileUtil.fullyDelete(workDir);
|
||||
}
|
||||
}, SHUTDOWN_HOOK_PRIORITY);
|
||||
new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
FileUtil.fullyDelete(workDir);
|
||||
}
|
||||
}, SHUTDOWN_HOOK_PRIORITY);
|
||||
|
||||
|
||||
unJar(file, workDir);
|
||||
|
@ -225,13 +230,13 @@ public class RunJar {
|
|||
|
||||
Thread.currentThread().setContextClassLoader(loader);
|
||||
Class<?> mainClass = Class.forName(mainClassName, true, loader);
|
||||
Method main = mainClass.getMethod("main", new Class[] {
|
||||
Array.newInstance(String.class, 0).getClass()
|
||||
});
|
||||
String[] newArgs = Arrays.asList(args)
|
||||
.subList(firstArg, args.length).toArray(new String[0]);
|
||||
Method main = mainClass.getMethod("main", String[].class);
|
||||
List<String> newArgsSubList = Arrays.asList(args)
|
||||
.subList(firstArg, args.length);
|
||||
String[] newArgs = newArgsSubList
|
||||
.toArray(new String[newArgsSubList.size()]);
|
||||
try {
|
||||
main.invoke(null, new Object[] { newArgs });
|
||||
main.invoke(null, new Object[] {newArgs});
|
||||
} catch (InvocationTargetException e) {
|
||||
throw e.getTargetException();
|
||||
}
|
||||
|
@ -251,10 +256,10 @@ public class RunJar {
|
|||
// see if the client classloader is enabled
|
||||
if (useClientClassLoader()) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(workDir+"/").
|
||||
sb.append(workDir).append("/").
|
||||
append(File.pathSeparator).append(file).
|
||||
append(File.pathSeparator).append(workDir+"/classes/").
|
||||
append(File.pathSeparator).append(workDir+"/lib/*");
|
||||
append(File.pathSeparator).append(workDir).append("/classes/").
|
||||
append(File.pathSeparator).append(workDir).append("/lib/*");
|
||||
// HADOOP_CLASSPATH is added to the client classpath
|
||||
String hadoopClasspath = getHadoopClasspath();
|
||||
if (hadoopClasspath != null && !hadoopClasspath.isEmpty()) {
|
||||
|
@ -270,18 +275,18 @@ public class RunJar {
|
|||
loader = new ApplicationClassLoader(clientClasspath,
|
||||
getClass().getClassLoader(), systemClassesList);
|
||||
} else {
|
||||
List<URL> classPath = new ArrayList<URL>();
|
||||
classPath.add(new File(workDir+"/").toURI().toURL());
|
||||
List<URL> classPath = new ArrayList<>();
|
||||
classPath.add(new File(workDir + "/").toURI().toURL());
|
||||
classPath.add(file.toURI().toURL());
|
||||
classPath.add(new File(workDir, "classes/").toURI().toURL());
|
||||
File[] libs = new File(workDir, "lib").listFiles();
|
||||
if (libs != null) {
|
||||
for (int i = 0; i < libs.length; i++) {
|
||||
classPath.add(libs[i].toURI().toURL());
|
||||
for (File lib : libs) {
|
||||
classPath.add(lib.toURI().toURL());
|
||||
}
|
||||
}
|
||||
// create a normal parent-delegating classloader
|
||||
loader = new URLClassLoader(classPath.toArray(new URL[0]));
|
||||
loader = new URLClassLoader(classPath.toArray(new URL[classPath.size()]));
|
||||
}
|
||||
return loader;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,9 @@
|
|||
*/
|
||||
package org.apache.hadoop.util;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
@ -29,15 +32,16 @@ import java.util.jar.JarOutputStream;
|
|||
import java.util.regex.Pattern;
|
||||
import java.util.zip.ZipEntry;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestRunJar extends TestCase {
|
||||
public class TestRunJar {
|
||||
private static final String FOOBAR_TXT = "foobar.txt";
|
||||
private static final String FOOBAZ_TXT = "foobaz.txt";
|
||||
private static final int BUFF_SIZE = 2048;
|
||||
private File TEST_ROOT_DIR;
|
||||
|
||||
private static final String TEST_JAR_NAME="test-runjar.jar";
|
||||
|
@ -45,9 +49,8 @@ public class TestRunJar extends TestCase {
|
|||
private static final long MOCKED_NOW = 1_460_389_972_000L;
|
||||
private static final long MOCKED_NOW_PLUS_TWO_SEC = MOCKED_NOW + 2_000;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
protected void setUp() throws Exception {
|
||||
public void setUp() throws Exception {
|
||||
TEST_ROOT_DIR = GenericTestUtils.getTestDir(getClass().getSimpleName());
|
||||
if (!TEST_ROOT_DIR.exists()) {
|
||||
TEST_ROOT_DIR.mkdirs();
|
||||
|
@ -56,9 +59,8 @@ public class TestRunJar extends TestCase {
|
|||
makeTestJar();
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
protected void tearDown() {
|
||||
public void tearDown() {
|
||||
FileUtil.fullyDelete(TEST_ROOT_DIR);
|
||||
}
|
||||
|
||||
|
@ -70,11 +72,11 @@ public class TestRunJar extends TestCase {
|
|||
File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
|
||||
JarOutputStream jstream =
|
||||
new JarOutputStream(new FileOutputStream(jarFile));
|
||||
ZipEntry zipEntry1 = new ZipEntry("foobar.txt");
|
||||
ZipEntry zipEntry1 = new ZipEntry(FOOBAR_TXT);
|
||||
zipEntry1.setTime(MOCKED_NOW);
|
||||
jstream.putNextEntry(zipEntry1);
|
||||
jstream.closeEntry();
|
||||
ZipEntry zipEntry2 = new ZipEntry("foobaz.txt");
|
||||
ZipEntry zipEntry2 = new ZipEntry(FOOBAZ_TXT);
|
||||
zipEntry2.setTime(MOCKED_NOW_PLUS_TWO_SEC);
|
||||
jstream.putNextEntry(zipEntry2);
|
||||
jstream.closeEntry();
|
||||
|
@ -86,50 +88,52 @@ public class TestRunJar extends TestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testUnJar() throws Exception {
|
||||
File unjarDir = new File(TEST_ROOT_DIR, "unjar-all");
|
||||
assertFalse("unjar dir shouldn't exist at test start",
|
||||
new File(unjarDir, "foobar.txt").exists());
|
||||
File unjarDir = getUnjarDir("unjar-all");
|
||||
|
||||
// Unjar everything
|
||||
RunJar.unJar(new File(TEST_ROOT_DIR, TEST_JAR_NAME),
|
||||
unjarDir);
|
||||
assertTrue("foobar unpacked",
|
||||
new File(unjarDir, "foobar.txt").exists());
|
||||
new File(unjarDir, TestRunJar.FOOBAR_TXT).exists());
|
||||
assertTrue("foobaz unpacked",
|
||||
new File(unjarDir, "foobaz.txt").exists());
|
||||
|
||||
new File(unjarDir, FOOBAZ_TXT).exists());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test unjarring a specific regex
|
||||
*/
|
||||
@Test
|
||||
public void testUnJarWithPattern() throws Exception {
|
||||
File unjarDir = new File(TEST_ROOT_DIR, "unjar-pattern");
|
||||
assertFalse("unjar dir shouldn't exist at test start",
|
||||
new File(unjarDir, "foobar.txt").exists());
|
||||
File unjarDir = getUnjarDir("unjar-pattern");
|
||||
|
||||
// Unjar only a regex
|
||||
RunJar.unJar(new File(TEST_ROOT_DIR, TEST_JAR_NAME),
|
||||
unjarDir,
|
||||
Pattern.compile(".*baz.*"));
|
||||
assertFalse("foobar not unpacked",
|
||||
new File(unjarDir, "foobar.txt").exists());
|
||||
new File(unjarDir, TestRunJar.FOOBAR_TXT).exists());
|
||||
assertTrue("foobaz unpacked",
|
||||
new File(unjarDir, "foobaz.txt").exists());
|
||||
|
||||
new File(unjarDir, FOOBAZ_TXT).exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUnJarDoesNotLooseLastModify() throws Exception {
|
||||
File unjarDir = new File(TEST_ROOT_DIR, "unjar-lastmod");
|
||||
assertFalse("unjar dir shouldn't exist at test start",
|
||||
new File(unjarDir, "foobar.txt").exists());
|
||||
File unjarDir = getUnjarDir("unjar-lastmod");
|
||||
|
||||
// Unjar everything
|
||||
RunJar.unJar(new File(TEST_ROOT_DIR, TEST_JAR_NAME),
|
||||
unjarDir);
|
||||
|
||||
assertEquals("Last modify time was lost during unJar", MOCKED_NOW, new File(unjarDir, "foobar.txt").lastModified());
|
||||
assertEquals("Last modify time was lost during unJar", MOCKED_NOW_PLUS_TWO_SEC, new File(unjarDir, "foobaz.txt").lastModified());
|
||||
String failureMessage = "Last modify time was lost during unJar";
|
||||
assertEquals(failureMessage, MOCKED_NOW, new File(unjarDir, TestRunJar.FOOBAR_TXT).lastModified());
|
||||
assertEquals(failureMessage, MOCKED_NOW_PLUS_TWO_SEC, new File(unjarDir, FOOBAZ_TXT).lastModified());
|
||||
}
|
||||
|
||||
private File getUnjarDir(String dirName) {
|
||||
File unjarDir = new File(TEST_ROOT_DIR, dirName);
|
||||
assertFalse("unjar dir shouldn't exist at test start",
|
||||
new File(unjarDir, TestRunJar.FOOBAR_TXT).exists());
|
||||
return unjarDir;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -174,10 +178,10 @@ public class TestRunJar extends TestCase {
|
|||
ZipEntry entry = new ZipEntry(name);
|
||||
jstream.putNextEntry(entry);
|
||||
BufferedInputStream bufInputStream = new BufferedInputStream(
|
||||
entryInputStream, 2048);
|
||||
entryInputStream, BUFF_SIZE);
|
||||
int count;
|
||||
byte[] data = new byte[2048];
|
||||
while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
|
||||
byte[] data = new byte[BUFF_SIZE];
|
||||
while ((count = bufInputStream.read(data, 0, BUFF_SIZE)) != -1) {
|
||||
jstream.write(data, 0, count);
|
||||
}
|
||||
jstream.closeEntry();
|
||||
|
|
Loading…
Reference in New Issue