MAPREDUCE-2882. TestLineRecordReader depends on ant jars. Contributed by Todd Lipcon.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1165449 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Todd Lipcon 2011-09-05 23:23:35 +00:00
parent 6b608aad7d
commit ae367be8f3
4 changed files with 6 additions and 15 deletions

View File

@ -1202,6 +1202,8 @@ Release 0.23.0 - Unreleased
MAPREDUCE-2716. MRReliabilityTest job fails because of missing
job-file. (Jeffrey Naisbitt via vinodkv)
MAPREDUCE-2882. TestLineRecordReader depends on ant jars. (todd)
Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES

View File

@ -18,9 +18,7 @@
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import junit.framework.TestCase;
@ -32,7 +30,6 @@ import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.tools.ant.util.FileUtils;
import org.junit.Test;
public class TestLineRecordReader extends TestCase {
@ -66,10 +63,7 @@ public class TestLineRecordReader extends TestCase {
public String readOutputFile(Configuration conf) throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
Path file = new Path(outputDir, "part-00000");
Reader reader = new InputStreamReader(localFs.open(file));
String r = FileUtils.readFully(reader);
reader.close();
return r;
return UtilsForTests.slurpHadoop(file, localFs);
}
/**

View File

@ -172,7 +172,7 @@ public class UtilsForTests {
return contents;
}
static String slurpHadoop(Path p, FileSystem fs) throws IOException {
public static String slurpHadoop(Path p, FileSystem fs) throws IOException {
int len = (int) fs.getFileStatus(p).getLen();
byte[] buf = new byte[len];
InputStream in = fs.open(p);

View File

@ -19,9 +19,7 @@
package org.apache.hadoop.mapreduce.lib.input;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import junit.framework.TestCase;
@ -29,11 +27,11 @@ import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.UtilsForTests;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.tools.ant.util.FileUtils;
import org.junit.Test;
public class TestLineRecordReader extends TestCase {
@ -67,10 +65,7 @@ public class TestLineRecordReader extends TestCase {
public String readOutputFile(Configuration conf) throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
Path file = new Path(outputDir, "part-r-00000");
Reader reader = new InputStreamReader(localFs.open(file));
String r = FileUtils.readFully(reader);
reader.close();
return r;
return UtilsForTests.slurpHadoop(file, localFs);
}
/**