merge -r 1312017:1312018 from trunk. FIXES: MAPREDUCE-4108

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1312020 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas Graves 2012-04-10 22:10:34 +00:00
parent 3cf79f4c7e
commit 15d3d52f1b
3 changed files with 53 additions and 22 deletions

View File

@ -130,6 +130,9 @@ Release 2.0.0 - UNRELEASED
MAPREDUCE-4076. Stream job fails with ZipException when use yarn jar
command (Devaraj K via bobby)
MAPREDUCE-4108. Fix tests in org.apache.hadoop.util.TestRunJar
(Devaraj K via tgraves)
Release 0.23.3 - UNRELEASED

View File

@ -16,25 +16,24 @@
* limitations under the License.
*/
package testjar;
package org.apache.hadoop.util;
import java.io.FileOutputStream;
import java.io.IOException;
/**
* A simple Hello class that is called from TestRunJar
*
* A simple Hello class that is called from TestRunJar
*
*/
public class Hello {
public static void main(String[] args){
public static void main(String[] args) {
try {
System.out.println("Creating file" + args[0]);
FileOutputStream fstream = new FileOutputStream(args[0]);
fstream.write("Hello Hadoopers".getBytes());
fstream.close();
}
catch (IOException e) {
//do nothing
} catch (IOException e) {
// do nothing
}
}
}

View File

@ -18,34 +18,63 @@
package org.apache.hadoop.util;
import java.io.BufferedInputStream;
import java.io.File;
import org.apache.hadoop.fs.Path;
import org.junit.Ignore;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import junit.framework.TestCase;
import org.apache.hadoop.fs.Path;
import org.junit.Assert;
import org.junit.Test;
/**
* A test to rest the RunJar class.
*/
@Ignore
public class TestRunJar extends TestCase {
public class TestRunJar {
private static String TEST_ROOT_DIR = new Path(System.getProperty(
"test.build.data", "/tmp")).toString();
private static final String TEST_JAR_NAME = "testjar.jar";
private static final String CLASS_NAME = "Hello.class";
@Test
public void testRunjar() throws Throwable {
File outFile = new File(TEST_ROOT_DIR, "out");
// delete if output file already exists.
File outFile = new File(TEST_ROOT_DIR, "out");
// delete if output file already exists.
if (outFile.exists()) {
outFile.delete();
}
File makeTestJar = makeTestJar();
String[] args = new String[3];
args[0] = "build/test/mapred/testjar/testjob.jar";
args[1] = "testjar.Hello";
args[0] = makeTestJar.getAbsolutePath();
args[1] = "org.apache.hadoop.util.Hello";
args[2] = outFile.toString();
RunJar.main(args);
assertTrue("RunJar failed", outFile.exists());
Assert.assertTrue("RunJar failed", outFile.exists());
}
}
private File makeTestJar() throws IOException {
File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
JarOutputStream jstream = new JarOutputStream(new FileOutputStream(jarFile));
InputStream entryInputStream = this.getClass().getResourceAsStream(
CLASS_NAME);
ZipEntry entry = new ZipEntry("org/apache/hadoop/util/" + CLASS_NAME);
jstream.putNextEntry(entry);
BufferedInputStream bufInputStream = new BufferedInputStream(
entryInputStream, 2048);
int count;
byte[] data = new byte[2048];
while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
jstream.write(data, 0, count);
}
jstream.closeEntry();
jstream.close();
return jarFile;
}
}