MAPREDUCE-5885. build/test/test.mapred.spill causes release audit warnings. Contributed by Chen He

(cherry picked from commit 812bd0c0e5)
This commit is contained in:
Jason Lowe 2014-08-27 15:14:54 +00:00
parent 0c785f0824
commit 38df629367
6 changed files with 150 additions and 59 deletions

View File

@ -99,6 +99,9 @@ Release 2.6.0 - UNRELEASED
MAPREDUCE-6044. Fully qualified intermediate done dir path breaks per-user dir MAPREDUCE-6044. Fully qualified intermediate done dir path breaks per-user dir
creation on Windows. (zjshen) creation on Windows. (zjshen)
MAPREDUCE-5885. build/test/test.mapred.spill causes release audit warnings
(Chen He via jlowe)
Release 2.5.1 - UNRELEASED Release 2.5.1 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -17,13 +17,30 @@
*/ */
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import org.apache.hadoop.fs.*; import java.io.DataInput;
import org.apache.hadoop.io.*; import java.io.DataOutput;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.Random;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import junit.framework.TestCase; import org.junit.After;
import java.io.*; import org.junit.Before;
import java.util.*; import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/** /**
* Two different types of comparators can be used in MapReduce. One is used * Two different types of comparators can be used in MapReduce. One is used
@ -37,8 +54,11 @@ import java.util.*;
* 2. Test the common use case where values are grouped by keys but values * 2. Test the common use case where values are grouped by keys but values
* within each key are grouped by a secondary key (a timestamp, for example). * within each key are grouped by a secondary key (a timestamp, for example).
*/ */
public class TestComparators extends TestCase public class TestComparators {
{ private static final File TEST_DIR = new File(
System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir")), "TestComparators-mapred");
JobConf conf = new JobConf(TestMapOutputType.class); JobConf conf = new JobConf(TestMapOutputType.class);
JobClient jc; JobClient jc;
static Random rng = new Random(); static Random rng = new Random();
@ -292,9 +312,9 @@ public class TestComparators extends TestCase
} }
} }
@Before
public void configure() throws Exception { public void configure() throws Exception {
Path testdir = new Path("build/test/test.mapred.spill"); Path testdir = new Path(TEST_DIR.getAbsolutePath());
Path inDir = new Path(testdir, "in"); Path inDir = new Path(testdir, "in");
Path outDir = new Path(testdir, "out"); Path outDir = new Path(testdir, "out");
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
@ -334,14 +354,18 @@ public class TestComparators extends TestCase
jc = new JobClient(conf); jc = new JobClient(conf);
} }
@After
public void cleanup() {
FileUtil.fullyDelete(TEST_DIR);
}
/** /**
* Test the default comparator for Map/Reduce. * Test the default comparator for Map/Reduce.
* Use the identity mapper and see if the keys are sorted at the end * Use the identity mapper and see if the keys are sorted at the end
* @throws Exception * @throws Exception
*/ */
public void testDefaultMRComparator() throws Exception { @Test
configure(); public void testDefaultMRComparator() throws Exception {
conf.setMapperClass(IdentityMapper.class); conf.setMapperClass(IdentityMapper.class);
conf.setReducerClass(AscendingKeysReducer.class); conf.setReducerClass(AscendingKeysReducer.class);
@ -361,8 +385,8 @@ public class TestComparators extends TestCase
* comparator. Keys should be sorted in reverse order in the reducer. * comparator. Keys should be sorted in reverse order in the reducer.
* @throws Exception * @throws Exception
*/ */
public void testUserMRComparator() throws Exception { @Test
configure(); public void testUserMRComparator() throws Exception {
conf.setMapperClass(IdentityMapper.class); conf.setMapperClass(IdentityMapper.class);
conf.setReducerClass(DescendingKeysReducer.class); conf.setReducerClass(DescendingKeysReducer.class);
conf.setOutputKeyComparatorClass(DecreasingIntComparator.class); conf.setOutputKeyComparatorClass(DecreasingIntComparator.class);
@ -384,8 +408,8 @@ public class TestComparators extends TestCase
* values for a key should be sorted by the 'timestamp'. * values for a key should be sorted by the 'timestamp'.
* @throws Exception * @throws Exception
*/ */
public void testUserValueGroupingComparator() throws Exception { @Test
configure(); public void testUserValueGroupingComparator() throws Exception {
conf.setMapperClass(RandomGenMapper.class); conf.setMapperClass(RandomGenMapper.class);
conf.setReducerClass(AscendingGroupReducer.class); conf.setReducerClass(AscendingGroupReducer.class);
conf.setOutputValueGroupingComparator(CompositeIntGroupFn.class); conf.setOutputValueGroupingComparator(CompositeIntGroupFn.class);
@ -409,8 +433,8 @@ public class TestComparators extends TestCase
* order. This lets us make sure that the right comparators are used. * order. This lets us make sure that the right comparators are used.
* @throws Exception * @throws Exception
*/ */
public void testAllUserComparators() throws Exception { @Test
configure(); public void testAllUserComparators() throws Exception {
conf.setMapperClass(RandomGenMapper.class); conf.setMapperClass(RandomGenMapper.class);
// use a decreasing comparator so keys are sorted in reverse order // use a decreasing comparator so keys are sorted in reverse order
conf.setOutputKeyComparatorClass(DecreasingIntComparator.class); conf.setOutputKeyComparatorClass(DecreasingIntComparator.class);
@ -430,6 +454,7 @@ public class TestComparators extends TestCase
* Test a user comparator that relies on deserializing both arguments * Test a user comparator that relies on deserializing both arguments
* for each compare. * for each compare.
*/ */
@Test
public void testBakedUserComparator() throws Exception { public void testBakedUserComparator() throws Exception {
MyWritable a = new MyWritable(8, 8); MyWritable a = new MyWritable(8, 8);
MyWritable b = new MyWritable(7, 9); MyWritable b = new MyWritable(7, 9);

View File

@ -17,21 +17,36 @@
*/ */
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import org.apache.hadoop.fs.*; import java.io.File;
import org.apache.hadoop.io.*; import java.io.IOException;
import org.apache.hadoop.mapred.lib.*; import java.util.Iterator;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import junit.framework.TestCase;
import java.io.*; import org.junit.After;
import java.util.*; import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.fail;
/** /**
* TestMapOutputType checks whether the Map task handles type mismatch * TestMapOutputType checks whether the Map task handles type mismatch
* between mapper output and the type specified in * between mapper output and the type specified in
* JobConf.MapOutputKeyType and JobConf.MapOutputValueType. * JobConf.MapOutputKeyType and JobConf.MapOutputValueType.
*/ */
public class TestMapOutputType extends TestCase public class TestMapOutputType {
{ private static final File TEST_DIR = new File(
System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir")), "TestMapOutputType-mapred");
JobConf conf = new JobConf(TestMapOutputType.class); JobConf conf = new JobConf(TestMapOutputType.class);
JobClient jc; JobClient jc;
/** /**
@ -75,9 +90,9 @@ public class TestMapOutputType extends TestCase
} }
} }
@Before
public void configure() throws Exception { public void configure() throws Exception {
Path testdir = new Path("build/test/test.mapred.spill"); Path testdir = new Path(TEST_DIR.getAbsolutePath());
Path inDir = new Path(testdir, "in"); Path inDir = new Path(testdir, "in");
Path outDir = new Path(testdir, "out"); Path outDir = new Path(testdir, "out");
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
@ -101,17 +116,21 @@ public class TestMapOutputType extends TestCase
throw new IOException("Mkdirs failed to create " + inDir.toString()); throw new IOException("Mkdirs failed to create " + inDir.toString());
} }
Path inFile = new Path(inDir, "part0"); Path inFile = new Path(inDir, "part0");
SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, inFile, SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, inFile,
Text.class, Text.class); Text.class, Text.class);
writer.append(new Text("rec: 1"), new Text("Hello")); writer.append(new Text("rec: 1"), new Text("Hello"));
writer.close(); writer.close();
jc = new JobClient(conf); jc = new JobClient(conf);
} }
@After
public void cleanup() {
FileUtil.fullyDelete(TEST_DIR);
}
@Test
public void testKeyMismatch() throws Exception { public void testKeyMismatch() throws Exception {
configure();
// Set bad MapOutputKeyClass and MapOutputValueClass // Set bad MapOutputKeyClass and MapOutputValueClass
conf.setMapOutputKeyClass(IntWritable.class); conf.setMapOutputKeyClass(IntWritable.class);
conf.setMapOutputValueClass(IntWritable.class); conf.setMapOutputValueClass(IntWritable.class);
@ -125,11 +144,9 @@ public class TestMapOutputType extends TestCase
fail("Oops! The job was supposed to break due to an exception"); fail("Oops! The job was supposed to break due to an exception");
} }
} }
@Test
public void testValueMismatch() throws Exception { public void testValueMismatch() throws Exception {
configure();
// Set good MapOutputKeyClass, bad MapOutputValueClass
conf.setMapOutputKeyClass(Text.class); conf.setMapOutputKeyClass(Text.class);
conf.setMapOutputValueClass(IntWritable.class); conf.setMapOutputValueClass(IntWritable.class);
@ -142,11 +159,10 @@ public class TestMapOutputType extends TestCase
fail("Oops! The job was supposed to break due to an exception"); fail("Oops! The job was supposed to break due to an exception");
} }
} }
public void testNoMismatch() throws Exception{ @Test
configure(); public void testNoMismatch() throws Exception{
// Set good MapOutputKeyClass and MapOutputValueClass
// Set good MapOutputKeyClass and MapOutputValueClass
conf.setMapOutputKeyClass(Text.class); conf.setMapOutputKeyClass(Text.class);
conf.setMapOutputValueClass(Text.class); conf.setMapOutputValueClass(Text.class);

View File

@ -24,7 +24,7 @@ import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.util.Collections; import java.io.File;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
@ -34,6 +34,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
@ -46,11 +47,11 @@ import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
/********************************************************** /**********************************************************
* MapredLoadTest generates a bunch of work that exercises * MapredLoadTest generates a bunch of work that exercises
@ -110,6 +111,10 @@ public class TestMapRed extends Configured implements Tool {
* of numbers in random order, but where each number appears * of numbers in random order, but where each number appears
* as many times as we were instructed. * as many times as we were instructed.
*/ */
private static final File TEST_DIR = new File(
System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir")), "TestMapRed-mapred");
static class RandomGenMapper static class RandomGenMapper
implements Mapper<IntWritable, IntWritable, IntWritable, IntWritable> { implements Mapper<IntWritable, IntWritable, IntWritable, IntWritable> {
@ -248,6 +253,11 @@ public class TestMapRed extends Configured implements Tool {
private static int counts = 100; private static int counts = 100;
private static Random r = new Random(); private static Random r = new Random();
@After
public void cleanup() {
FileUtil.fullyDelete(TEST_DIR);
}
/** /**
public TestMapRed(int range, int counts, Configuration conf) throws IOException { public TestMapRed(int range, int counts, Configuration conf) throws IOException {
this.range = range; this.range = range;
@ -372,7 +382,7 @@ public class TestMapRed extends Configured implements Tool {
boolean includeCombine boolean includeCombine
) throws Exception { ) throws Exception {
JobConf conf = new JobConf(TestMapRed.class); JobConf conf = new JobConf(TestMapRed.class);
Path testdir = new Path("build/test/test.mapred.compress"); Path testdir = new Path(TEST_DIR.getAbsolutePath());
Path inDir = new Path(testdir, "in"); Path inDir = new Path(testdir, "in");
Path outDir = new Path(testdir, "out"); Path outDir = new Path(testdir, "out");
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
@ -440,7 +450,7 @@ public class TestMapRed extends Configured implements Tool {
// //
// Generate distribution of ints. This is the answer key. // Generate distribution of ints. This is the answer key.
// //
JobConf conf = null; JobConf conf;
//Check to get configuration and check if it is configured thro' Configured //Check to get configuration and check if it is configured thro' Configured
//interface. This would happen when running testcase thro' command line. //interface. This would happen when running testcase thro' command line.
if(getConf() == null) { if(getConf() == null) {
@ -465,7 +475,7 @@ public class TestMapRed extends Configured implements Tool {
// Write the answer key to a file. // Write the answer key to a file.
// //
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
Path testdir = new Path("mapred.loadtest"); Path testdir = new Path(TEST_DIR.getAbsolutePath(), "mapred.loadtest");
if (!fs.mkdirs(testdir)) { if (!fs.mkdirs(testdir)) {
throw new IOException("Mkdirs failed to create " + testdir.toString()); throw new IOException("Mkdirs failed to create " + testdir.toString());
} }
@ -635,8 +645,8 @@ public class TestMapRed extends Configured implements Tool {
in.close(); in.close();
} }
int originalTotal = 0; int originalTotal = 0;
for (int i = 0; i < dist.length; i++) { for (int aDist : dist) {
originalTotal += dist[i]; originalTotal += aDist;
} }
System.out.println("Original sum: " + originalTotal); System.out.println("Original sum: " + originalTotal);
System.out.println("Recomputed sum: " + totalseen); System.out.println("Recomputed sum: " + totalseen);
@ -727,7 +737,7 @@ public class TestMapRed extends Configured implements Tool {
public void runJob(int items) { public void runJob(int items) {
try { try {
JobConf conf = new JobConf(TestMapRed.class); JobConf conf = new JobConf(TestMapRed.class);
Path testdir = new Path("build/test/test.mapred.spill"); Path testdir = new Path(TEST_DIR.getAbsolutePath());
Path inDir = new Path(testdir, "in"); Path inDir = new Path(testdir, "in");
Path outDir = new Path(testdir, "out"); Path outDir = new Path(testdir, "out");
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
@ -777,7 +787,7 @@ public class TestMapRed extends Configured implements Tool {
System.err.println("Usage: TestMapRed <range> <counts>"); System.err.println("Usage: TestMapRed <range> <counts>");
System.err.println(); System.err.println();
System.err.println("Note: a good test will have a " + System.err.println("Note: a good test will have a " +
"<counts> value that is substantially larger than the <range>"); "<counts> value that is substantially larger than the <range>");
return -1; return -1;
} }

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.mapred.lib; package org.apache.hadoop.mapred.lib;
import java.io.*;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
@ -35,9 +34,23 @@ import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.Utils; import org.apache.hadoop.mapred.Utils;
import org.junit.After;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class TestKeyFieldBasedComparator extends HadoopTestCase { public class TestKeyFieldBasedComparator extends HadoopTestCase {
private static final File TEST_DIR = new File(
System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir")),
"TestKeyFieldBasedComparator-lib");
JobConf conf; JobConf conf;
JobConf localConf; JobConf localConf;
@ -50,8 +63,9 @@ public class TestKeyFieldBasedComparator extends HadoopTestCase {
localConf = createJobConf(); localConf = createJobConf();
localConf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " "); localConf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
} }
public void configure(String keySpec, int expect) throws Exception { public void configure(String keySpec, int expect) throws Exception {
Path testdir = new Path("build/test/test.mapred.spill"); Path testdir = new Path(TEST_DIR.getAbsolutePath());
Path inDir = new Path(testdir, "in"); Path inDir = new Path(testdir, "in");
Path outDir = new Path(testdir, "out"); Path outDir = new Path(testdir, "out");
FileSystem fs = getFileSystem(); FileSystem fs = getFileSystem();
@ -116,6 +130,13 @@ public class TestKeyFieldBasedComparator extends HadoopTestCase {
reader.close(); reader.close();
} }
} }
@After
public void cleanup() {
FileUtil.fullyDelete(TEST_DIR);
}
@Test
public void testBasicUnixComparator() throws Exception { public void testBasicUnixComparator() throws Exception {
configure("-k1,1n", 1); configure("-k1,1n", 1);
configure("-k2,2n", 1); configure("-k2,2n", 1);

View File

@ -23,14 +23,14 @@ import java.io.DataInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.File;
import java.util.Iterator; import java.util.Iterator;
import java.util.Random; import java.util.Random;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile;
@ -41,6 +41,10 @@ import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
/********************************************************** /**********************************************************
* MapredLoadTest generates a bunch of work that exercises * MapredLoadTest generates a bunch of work that exercises
@ -75,8 +79,10 @@ import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
* 7) A mapred job integrates all the count files into a single one. * 7) A mapred job integrates all the count files into a single one.
* *
**********************************************************/ **********************************************************/
public class TestMapReduce extends TestCase { public class TestMapReduce {
private static final File TEST_DIR = new File(
System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir")), "TestMapReduce-mapreduce");
private static FileSystem fs; private static FileSystem fs;
static { static {
@ -215,6 +221,12 @@ public class TestMapReduce extends TestCase {
private static int counts = 100; private static int counts = 100;
private static Random r = new Random(); private static Random r = new Random();
@After
public void cleanup() {
FileUtil.fullyDelete(TEST_DIR);
}
@Test
public void testMapred() throws Exception { public void testMapred() throws Exception {
launch(); launch();
} }
@ -239,7 +251,7 @@ public class TestMapReduce extends TestCase {
// //
// Write the answer key to a file. // Write the answer key to a file.
// //
Path testdir = new Path("mapred.loadtest"); Path testdir = new Path(TEST_DIR.getAbsolutePath());
if (!fs.mkdirs(testdir)) { if (!fs.mkdirs(testdir)) {
throw new IOException("Mkdirs failed to create " + testdir.toString()); throw new IOException("Mkdirs failed to create " + testdir.toString());
} }
@ -488,13 +500,17 @@ public class TestMapReduce extends TestCase {
System.err.println("Usage: TestMapReduce <range> <counts>"); System.err.println("Usage: TestMapReduce <range> <counts>");
System.err.println(); System.err.println();
System.err.println("Note: a good test will have a <counts> value" + System.err.println("Note: a good test will have a <counts> value" +
" that is substantially larger than the <range>"); " that is substantially larger than the <range>");
return; return;
} }
int i = 0; int i = 0;
range = Integer.parseInt(argv[i++]); range = Integer.parseInt(argv[i++]);
counts = Integer.parseInt(argv[i++]); counts = Integer.parseInt(argv[i++]);
launch(); try {
launch();
} finally {
FileUtil.fullyDelete(TEST_DIR);
}
} }
} }