merge -r 1312017:1312018 from trunk. FIXES: MAPREDUCE-4108

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1312020 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas Graves 2012-04-10 22:10:34 +00:00
parent 3cf79f4c7e
commit 15d3d52f1b
3 changed files with 53 additions and 22 deletions

View File

@ -131,6 +131,9 @@ Release 2.0.0 - UNRELEASED
MAPREDUCE-4076. Stream job fails with ZipException when use yarn jar MAPREDUCE-4076. Stream job fails with ZipException when use yarn jar
command (Devaraj K via bobby) command (Devaraj K via bobby)
MAPREDUCE-4108. Fix tests in org.apache.hadoop.util.TestRunJar
(Devaraj K via tgraves)
Release 0.23.3 - UNRELEASED Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package testjar; package org.apache.hadoop.util;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
@ -26,15 +26,14 @@ import java.io.IOException;
* *
*/ */
public class Hello { public class Hello {
public static void main(String[] args){ public static void main(String[] args) {
try { try {
System.out.println("Creating file" + args[0]); System.out.println("Creating file" + args[0]);
FileOutputStream fstream = new FileOutputStream(args[0]); FileOutputStream fstream = new FileOutputStream(args[0]);
fstream.write("Hello Hadoopers".getBytes()); fstream.write("Hello Hadoopers".getBytes());
fstream.close(); fstream.close();
} } catch (IOException e) {
catch (IOException e) { // do nothing
//do nothing
} }
} }
} }

View File

@ -18,34 +18,63 @@
package org.apache.hadoop.util; package org.apache.hadoop.util;
import java.io.BufferedInputStream;
import java.io.File; import java.io.File;
import org.apache.hadoop.fs.Path; import java.io.FileOutputStream;
import org.junit.Ignore; import java.io.IOException;
import java.io.InputStream;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import junit.framework.TestCase; import org.apache.hadoop.fs.Path;
import org.junit.Assert;
import org.junit.Test;
/** /**
* A test to rest the RunJar class. * A test to rest the RunJar class.
*/ */
@Ignore public class TestRunJar {
public class TestRunJar extends TestCase {
private static String TEST_ROOT_DIR = new Path(System.getProperty( private static String TEST_ROOT_DIR = new Path(System.getProperty(
"test.build.data", "/tmp")).toString(); "test.build.data", "/tmp")).toString();
public void testRunjar() throws Throwable { private static final String TEST_JAR_NAME = "testjar.jar";
private static final String CLASS_NAME = "Hello.class";
@Test
public void testRunjar() throws Throwable {
File outFile = new File(TEST_ROOT_DIR, "out"); File outFile = new File(TEST_ROOT_DIR, "out");
// delete if output file already exists. // delete if output file already exists.
if (outFile.exists()) { if (outFile.exists()) {
outFile.delete(); outFile.delete();
} }
File makeTestJar = makeTestJar();
String[] args = new String[3]; String[] args = new String[3];
args[0] = "build/test/mapred/testjar/testjob.jar"; args[0] = makeTestJar.getAbsolutePath();
args[1] = "testjar.Hello"; args[1] = "org.apache.hadoop.util.Hello";
args[2] = outFile.toString(); args[2] = outFile.toString();
RunJar.main(args); RunJar.main(args);
assertTrue("RunJar failed", outFile.exists()); Assert.assertTrue("RunJar failed", outFile.exists());
}
private File makeTestJar() throws IOException {
File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
JarOutputStream jstream = new JarOutputStream(new FileOutputStream(jarFile));
InputStream entryInputStream = this.getClass().getResourceAsStream(
CLASS_NAME);
ZipEntry entry = new ZipEntry("org/apache/hadoop/util/" + CLASS_NAME);
jstream.putNextEntry(entry);
BufferedInputStream bufInputStream = new BufferedInputStream(
entryInputStream, 2048);
int count;
byte[] data = new byte[2048];
while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
jstream.write(data, 0, count);
}
jstream.closeEntry();
jstream.close();
return jarFile;
} }
} }