MAPREDUCE-5587. TestTextOutputFormat fails on JDK7 (jeagles)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1533629 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Turner Eagles 2013-10-18 20:57:48 +00:00
parent 7d637a3a99
commit 2baa42dd01
2 changed files with 51 additions and 22 deletions

View File

@ -1500,6 +1500,8 @@ Release 0.23.10 - UNRELEASED
MAPREDUCE-5586. TestCopyMapper#testCopyFailOnBlockSizeDifference fails when
run from hadoop-tools/hadoop-distcp directory (jeagles)
MAPREDUCE-5587. TestTextOutputFormat fails on JDK7 (jeagles)
Release 0.23.9 - 2013-07-08
INCOMPATIBLE CHANGES

View File

@ -18,13 +18,24 @@
package org.apache.hadoop.mapred;
import java.io.*;
import junit.framework.TestCase;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
import org.junit.Test;
public class TestTextOutputFormat extends TestCase {
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.io.compress.CompressionInputStream;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.LineReader;
public class TestTextOutputFormat {
private static JobConf defaultConf = new JobConf();
private static FileSystem localFs = null;
@ -38,12 +49,13 @@ public class TestTextOutputFormat extends TestCase {
// A random task attempt id for testing.
private static String attempt = "attempt_200707121733_0001_m_000000_0";
private static Path workDir =
private static Path workDir =
new Path(new Path(
new Path(System.getProperty("test.build.data", "."),
"data"),
new Path(System.getProperty("test.build.data", "."),
"data"),
FileOutputCommitter.TEMP_DIR_NAME), "_" + attempt);
@Test
public void testFormat() throws Exception {
JobConf job = new JobConf();
job.set(JobContext.TASK_ATTEMPT_ID, attempt);
@ -53,7 +65,7 @@ public class TestTextOutputFormat extends TestCase {
if (!fs.mkdirs(workDir)) {
fail("Failed to create output directory");
}
String file = "test.txt";
String file = "test_format.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
@ -90,10 +102,11 @@ public class TestTextOutputFormat extends TestCase {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
assertEquals(expectedOutput.toString(), output);
}
@Test
public void testFormatWithCustomSeparator() throws Exception {
JobConf job = new JobConf();
String separator = "\u0001";
@ -105,7 +118,7 @@ public class TestTextOutputFormat extends TestCase {
if (!fs.mkdirs(workDir)) {
fail("Failed to create output directory");
}
String file = "test.txt";
String file = "test_custom.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
@ -142,27 +155,27 @@ public class TestTextOutputFormat extends TestCase {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append(separator).append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
assertEquals(expectedOutput.toString(), output);
}
/**
* test compressed file
* @throws IOException
*/
public void testCompress() throws IOException{
@Test
public void testCompress() throws IOException {
JobConf job = new JobConf();
String separator = "\u0001";
job.set("mapreduce.output.textoutputformat.separator", separator);
job.set(JobContext.TASK_ATTEMPT_ID, attempt);
job.set(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS,"true");
FileOutputFormat.setOutputPath(job, workDir.getParent().getParent());
FileOutputFormat.setWorkOutputPath(job, workDir);
FileSystem fs = workDir.getFileSystem(job);
if (!fs.mkdirs(workDir)) {
fail("Failed to create output directory");
}
String file = "test.txt";
String file = "test_compress.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
@ -189,16 +202,30 @@ public class TestTextOutputFormat extends TestCase {
} finally {
theRecordWriter.close(reporter);
}
File expectedFile = new File(new Path(workDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append(separator).append(val1).append("\n");
expectedOutput.append(key1).append("\t").append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append(separator).append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
expectedOutput.append(key2).append("\t").append(val2).append("\n");
DefaultCodec codec = new DefaultCodec();
codec.setConf(job);
Path expectedFile = new Path(workDir, file + codec.getDefaultExtension());
final FileInputStream istream = new FileInputStream(expectedFile.toString());
CompressionInputStream cistream = codec.createInputStream(istream);
LineReader reader = new LineReader(cistream);
String output = "";
Text out = new Text();
while (reader.readLine(out) > 0) {
output += out;
output += "\n";
}
reader.close();
assertEquals(expectedOutput.toString(), output);
}
public static void main(String[] args) throws Exception {
new TestTextOutputFormat().testFormat();