diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java index b47d5efc7e5..e2d75ab268a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java @@ -17,17 +17,30 @@ */ package org.apache.hadoop.conf; -import org.junit.Assert; - -import org.apache.hadoop.mapred.*; -import org.apache.hadoop.mapreduce.MRConfig; -import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.FileInputFormat; +import org.apache.hadoop.mapred.FileOutputFormat; +import org.apache.hadoop.mapred.HadoopTestCase; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.TextInputFormat; +import org.apache.hadoop.mapred.TextOutputFormat; +import org.apache.hadoop.mapred.Utils; +import org.junit.Test; -import java.io.*; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; /** * This testcase tests that a JobConf without default values submits jobs @@ -40,6 +53,7 @@ public class TestNoDefaultsJobConf extends HadoopTestCase { super(HadoopTestCase.CLUSTER_MR, HadoopTestCase.DFS_FS, 1, 1); } + @Test public void testNoDefaults() throws Exception { JobConf configuration = new JobConf(); assertTrue(configuration.get("hadoop.tmp.dir", null) != null); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java index 3cd0668fe43..277c0fd4b0a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java @@ -18,13 +18,12 @@ package org.apache.hadoop.mapred; -import junit.framework.TestCase; -import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.mapreduce.MRConfig; +import org.junit.After; +import org.junit.Before; -import java.io.File; import java.io.IOException; /** @@ -38,7 +37,7 @@ import java.io.IOException; * Job Configurations should be created using a configuration returned by the * 'createJobConf()' method. */ -public abstract class HadoopTestCase extends TestCase { +public abstract class HadoopTestCase { public static final int LOCAL_MR = 1; public static final int CLUSTER_MR = 2; public static final int LOCAL_FS = 4; @@ -140,8 +139,8 @@ public abstract class HadoopTestCase extends TestCase { * * @throws Exception */ - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { if (localFS) { fileSystem = FileSystem.getLocal(new JobConf()); } @@ -164,7 +163,8 @@ public abstract class HadoopTestCase extends TestCase { * * @throws Exception */ - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { try { if (mrCluster != null) { mrCluster.shutdown(); @@ -181,7 +181,6 @@ public abstract class HadoopTestCase extends TestCase { catch (Exception ex) { System.out.println(ex); } - super.tearDown(); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java index d2ea74e6940..1f657cfc1ed 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java @@ -34,6 +34,13 @@ import javax.servlet.ServletException; import java.io.IOException; import java.io.DataOutputStream; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.After; +import org.junit.Test; + + /** * Base class to test Job end notification in local and cluster mode. * @@ -140,17 +147,19 @@ public abstract class NotificationTestCase extends HadoopTestCase { return conf; } - - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { super.setUp(); startHttpServer(); } - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { stopHttpServer(); super.tearDown(); } + @Test public void testMR() throws Exception { System.out.println(launchWordCount(this.createJobConf(), diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java index 81b53cc3b32..314123567fa 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java @@ -30,12 +30,16 @@ import java.io.IOException; import java.io.OutputStream; import java.util.Iterator; +import org.junit.Test; +import static org.junit.Assert.assertTrue; + public class TestFileOutputFormat extends HadoopTestCase { public TestFileOutputFormat() throws IOException { super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); } + @Test public void testCustomFile() throws Exception { Path inDir = new Path("testing/fileoutputformat/input"); Path outDir = new Path("testing/fileoutputformat/output"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java index bad06e912db..bed545e36cf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java @@ -17,9 +17,6 @@ */ package org.apache.hadoop.mapred; -import java.io.File; -import java.io.IOException; - import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -27,9 +24,18 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.ProtocolSignature; import org.apache.hadoop.mapred.SortedRanges.Range; import org.apache.hadoop.mapreduce.TaskType; -import org.apache.hadoop.mapreduce.checkpoint.CheckpointID; -import org.apache.hadoop.mapreduce.checkpoint.FSCheckpointID; import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID; +import org.junit.After; +import org.junit.Test; + +import java.io.File; +import java.io.IOException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + public class TestTaskCommit extends HadoopTestCase { @@ -80,12 +86,13 @@ public class TestTaskCommit extends HadoopTestCase { super(LOCAL_MR, LOCAL_FS, 1, 1); } - @Override + @After public void tearDown() throws Exception { super.tearDown(); FileUtil.fullyDelete(new File(rootDir.toString())); } - + + @Test public void testCommitFail() throws IOException { final Path inDir = new Path(rootDir, "./input"); final Path outDir = new Path(rootDir, "./output"); @@ -199,6 +206,7 @@ public class TestTaskCommit extends HadoopTestCase { * * @throws Exception */ + @Test public void testTaskCleanupDoesNotCommit() throws Exception { // Mimic a job with a special committer that does not cleanup // files when a task fails. @@ -245,23 +253,27 @@ public class TestTaskCommit extends HadoopTestCase { assertTrue("Task did not succeed", umbilical.taskDone); } + @Test public void testCommitRequiredForMapTask() throws Exception { Task testTask = createDummyTask(TaskType.MAP); assertTrue("MapTask should need commit", testTask.isCommitRequired()); } + @Test public void testCommitRequiredForReduceTask() throws Exception { Task testTask = createDummyTask(TaskType.REDUCE); assertTrue("ReduceTask should need commit", testTask.isCommitRequired()); } - + + @Test public void testCommitNotRequiredForJobSetup() throws Exception { Task testTask = createDummyTask(TaskType.MAP); testTask.setJobSetupTask(); assertFalse("Job setup task should not need commit", testTask.isCommitRequired()); } - + + @Test public void testCommitNotRequiredForJobCleanup() throws Exception { Task testTask = createDummyTask(TaskType.MAP); testTask.setJobCleanupTask(); @@ -269,6 +281,7 @@ public class TestTaskCommit extends HadoopTestCase { testTask.isCommitRequired()); } + @Test public void testCommitNotRequiredForTaskCleanup() throws Exception { Task testTask = createDummyTask(TaskType.REDUCE); testTask.setTaskCleanupTask(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java index 8d35dcf4046..07b1306513c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java @@ -27,6 +27,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapred.JobConf; +import org.junit.Test; +import static org.junit.Assert.assertEquals; /** * HadoopTestCase that tests the local job runner. @@ -59,6 +61,7 @@ public class TestLocalJobControl extends HadoopTestCase { * object. Finally, it creates a thread to run the JobControl object and * monitors/reports the job states. */ + @Test public void testLocalJobControlDataCopy() throws Exception { FileSystem fs = FileSystem.get(createJobConf()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java index 37cb91f023b..0933ecef941 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java @@ -21,12 +21,29 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapred.*; +import org.apache.hadoop.mapred.FileInputFormat; +import org.apache.hadoop.mapred.FileOutputFormat; +import org.apache.hadoop.mapred.HadoopTestCase; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.Mapper; +import org.apache.hadoop.mapred.OutputCollector; +import org.apache.hadoop.mapred.Reducer; +import org.apache.hadoop.mapred.Reporter; +import org.apache.hadoop.mapred.RunningJob; +import org.apache.hadoop.mapred.TextInputFormat; +import org.apache.hadoop.mapred.TextOutputFormat; +import org.junit.Test; import java.io.DataOutputStream; import java.io.IOException; import java.util.Iterator; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + public class TestChainMapReduce extends HadoopTestCase { private static Path getFlagDir(boolean local) { @@ -67,6 +84,7 @@ public class TestChainMapReduce extends HadoopTestCase { super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); } + @Test public void testChain() throws Exception { Path inDir = new Path("testing/chain/input"); Path outDir = new Path("testing/chain/output"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java index 34a4d2c6c92..35b3f243c7f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java @@ -36,6 +36,8 @@ import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.hadoop.mapred.Utils; import org.junit.After; import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.io.BufferedReader; import java.io.File; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java index e5c6d75a041..3a9cb9ec337 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java @@ -17,11 +17,6 @@ */ package org.apache.hadoop.mapred.lib; -import java.io.IOException; -import java.util.Map; - -import junit.framework.TestCase; - import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobConf; @@ -30,12 +25,19 @@ import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.TextInputFormat; +import org.junit.Test; + +import java.io.IOException; +import java.util.Map; + +import static org.junit.Assert.assertEquals; /** * @see TestDelegatingInputFormat */ -public class TestMultipleInputs extends TestCase { - +public class TestMultipleInputs { + + @Test public void testAddInputPathWithFormat() { final JobConf conf = new JobConf(); MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class); @@ -48,6 +50,7 @@ public class TestMultipleInputs extends TestCase { .getClass()); } + @Test public void testAddInputPathWithMapper() { final JobConf conf = new JobConf(); MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java index 59c0a97b3bf..f3e58930eac 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java @@ -24,7 +24,23 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.serializer.JavaSerializationComparator; -import org.apache.hadoop.mapred.*; +import org.apache.hadoop.mapred.Counters; +import org.apache.hadoop.mapred.FileInputFormat; +import org.apache.hadoop.mapred.FileOutputFormat; +import org.apache.hadoop.mapred.HadoopTestCase; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.Mapper; +import org.apache.hadoop.mapred.OutputCollector; +import org.apache.hadoop.mapred.Reducer; +import org.apache.hadoop.mapred.Reporter; +import org.apache.hadoop.mapred.RunningJob; +import org.apache.hadoop.mapred.SequenceFileOutputFormat; +import org.apache.hadoop.mapred.TextInputFormat; +import org.apache.hadoop.mapred.TextOutputFormat; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import java.io.BufferedReader; import java.io.DataOutputStream; @@ -32,17 +48,23 @@ import java.io.IOException; import java.io.InputStreamReader; import java.util.Iterator; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + public class TestMultipleOutputs extends HadoopTestCase { public TestMultipleOutputs() throws IOException { super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); } + @Test public void testWithoutCounters() throws Exception { _testMultipleOutputs(false); _testMOWithJavaSerialization(false); } + @Test public void testWithCounters() throws Exception { _testMultipleOutputs(true); _testMOWithJavaSerialization(true); @@ -62,6 +84,7 @@ public class TestMultipleOutputs extends HadoopTestCase { return dir; } + @Before public void setUp() throws Exception { super.setUp(); Path rootDir = getDir(ROOT_DIR); @@ -75,6 +98,7 @@ public class TestMultipleOutputs extends HadoopTestCase { } } + @After public void tearDown() throws Exception { Path rootDir = getDir(ROOT_DIR); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java index 7e224cdda31..1059d29ee91 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java @@ -22,26 +22,45 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapred.*; +import org.apache.hadoop.mapred.FileInputFormat; +import org.apache.hadoop.mapred.FileOutputFormat; +import org.apache.hadoop.mapred.HadoopTestCase; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.Mapper; +import org.apache.hadoop.mapred.OutputCollector; +import org.apache.hadoop.mapred.Reducer; +import org.apache.hadoop.mapred.Reporter; +import org.apache.hadoop.mapred.RunningJob; +import org.apache.hadoop.mapred.TextInputFormat; +import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper; +import org.junit.Test; import java.io.DataOutputStream; import java.io.IOException; import java.util.Iterator; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + public class TestMultithreadedMapRunner extends HadoopTestCase { public TestMultithreadedMapRunner() throws IOException { super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); } + @Test public void testOKRun() throws Exception { run(false, false); } + @Test public void testIOExRun() throws Exception { run(true, false); } + + @Test public void testRuntimeExRun() throws Exception { run(false, true); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestChild.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestChild.java index d5afe63c2ee..338f1172b04 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestChild.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestChild.java @@ -30,6 +30,13 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapred.JobConf; import org.apache.log4j.Level; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; public class TestChild extends HadoopTestCase { private static String TEST_ROOT_DIR = @@ -145,7 +152,8 @@ public class TestChild extends HadoopTestCase { + numPartFiles, list.length == numPartFiles); return job; } - + + @Test public void testChild() throws Exception { try { submitAndValidateJob(createJobConf(), 1, 1, true); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java index 5d36c92e420..7520f389e14 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java @@ -30,6 +30,9 @@ import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.junit.Ignore; +import org.junit.Test; +import static org.junit.Assert.assertTrue; + @Ignore public class TestNoJobSetupCleanup extends HadoopTestCase { private static String TEST_ROOT_DIR = @@ -68,7 +71,8 @@ public class TestNoJobSetupCleanup extends HadoopTestCase { + numPartFiles, list.length == numPartFiles); return job; } - + + @Test public void testNoJobSetupCleanup() throws Exception { try { Configuration conf = createJobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestTaskContext.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestTaskContext.java index bf742c46169..67daaa443d0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestTaskContext.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestTaskContext.java @@ -33,6 +33,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; /** * Tests context api and {@link StatusReporter#getProgress()} via diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestChainErrors.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestChainErrors.java index 2dfcf415bb0..46024bc46de 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestChainErrors.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestChainErrors.java @@ -29,6 +29,10 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; /** * Tests error conditions in ChainMapper/ChainReducer. @@ -51,6 +55,7 @@ public class TestChainErrors extends HadoopTestCase { * * @throws Exception */ + @Test public void testChainSubmission() throws Exception { Configuration conf = createJobConf(); @@ -89,6 +94,7 @@ public class TestChainErrors extends HadoopTestCase { * * @throws Exception */ + @Test public void testChainFail() throws Exception { Configuration conf = createJobConf(); @@ -114,6 +120,7 @@ public class TestChainErrors extends HadoopTestCase { * * @throws Exception */ + @Test public void testReducerFail() throws Exception { Configuration conf = createJobConf(); @@ -139,6 +146,7 @@ public class TestChainErrors extends HadoopTestCase { * * @throws Exception */ + @Test public void testChainMapNoOuptut() throws Exception { Configuration conf = createJobConf(); String expectedOutput = ""; @@ -163,6 +171,7 @@ public class TestChainErrors extends HadoopTestCase { * * @throws Exception */ + @Test public void testChainReduceNoOuptut() throws Exception { Configuration conf = createJobConf(); String expectedOutput = ""; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestMapReduceChain.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestMapReduceChain.java index 971ea6862c5..aaaaf5193d4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestMapReduceChain.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestMapReduceChain.java @@ -30,6 +30,11 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; +import org.junit.Test; + +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; public class TestMapReduceChain extends HadoopTestCase { @@ -63,6 +68,7 @@ public class TestMapReduceChain extends HadoopTestCase { super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); } + @Test public void testChain() throws Exception { Path inDir = new Path(localPathRoot, "testing/chain/input"); Path outDir = new Path(localPathRoot, "testing/chain/output"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestSingleElementChain.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestSingleElementChain.java index 06cfe1cb92b..f78ac70e46d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestSingleElementChain.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/chain/TestSingleElementChain.java @@ -26,6 +26,9 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper; import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.IOException; @@ -42,6 +45,7 @@ public class TestSingleElementChain extends HadoopTestCase { } // test chain mapper and reducer by adding single mapper and reducer to chain + @Test public void testNoChain() throws Exception { Path inDir = new Path(localPathRoot, "testing/chain/input"); Path outDir = new Path(localPathRoot, "testing/chain/output"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDataDrivenDBInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDataDrivenDBInputFormat.java index 37f93645c1e..81a32498d6c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDataDrivenDBInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDataDrivenDBInputFormat.java @@ -18,25 +18,40 @@ package org.apache.hadoop.mapreduce.lib.db; -import java.sql.*; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -//import org.apache.hadoop.examples.DBCountPageView; -import org.apache.hadoop.fs.*; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.HadoopTestCase; -import org.apache.hadoop.mapreduce.*; -import org.apache.hadoop.mapreduce.lib.db.*; -import org.apache.hadoop.mapreduce.lib.input.*; -import org.apache.hadoop.mapreduce.lib.output.*; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.mapreduce.TaskCounter; +import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.StringUtils; import org.hsqldb.server.Server; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.sql.Connection; +import java.sql.Date; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +//import org.apache.hadoop.examples.DBCountPageView; /** * Test aspects of DataDrivenDBInputFormat @@ -109,11 +124,13 @@ public class TestDataDrivenDBInputFormat extends HadoopTestCase { createConnection(driverClassName, url); } + @Before public void setUp() throws Exception { initialize(DRIVER_CLASS, DB_URL); super.setUp(); } + @After public void tearDown() throws Exception { super.tearDown(); shutdown(); @@ -170,6 +187,7 @@ public class TestDataDrivenDBInputFormat extends HadoopTestCase { } } + @Test public void testDateSplits() throws Exception { Statement s = connection.createStatement(); final String DATE_TABLE = "datetable"; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java index c8680508df8..632c40e3407 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java @@ -38,6 +38,8 @@ import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.junit.Before; import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; /** * @see TestDelegatingInputFormat @@ -139,7 +141,7 @@ public class TestMultipleInputs extends HadoopTestCase { assertTrue(output.readLine().equals("e 2")); } - @SuppressWarnings("unchecked") + @Test public void testAddInputPathWithFormat() throws IOException { final Job conf = Job.getInstance(); MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class); @@ -152,7 +154,7 @@ public class TestMultipleInputs extends HadoopTestCase { .getClass()); } - @SuppressWarnings("unchecked") + @Test public void testAddInputPathWithMapper() throws IOException { final Job conf = Job.getInstance(); MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/jobcontrol/TestMapReduceJobControl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/jobcontrol/TestMapReduceJobControl.java index 14c64bdc7d9..d86ddd0c523 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/jobcontrol/TestMapReduceJobControl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/jobcontrol/TestMapReduceJobControl.java @@ -33,6 +33,9 @@ import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; /** * This class performs unit test for Job/JobControl classes. @@ -120,7 +123,8 @@ public class TestMapReduceJobControl extends HadoopTestCase { } catch (Exception e) {} } } - + + @Test public void testJobControlWithFailJob() throws Exception { LOG.info("Starting testJobControlWithFailJob"); Configuration conf = createJobConf(); @@ -144,6 +148,7 @@ public class TestMapReduceJobControl extends HadoopTestCase { theControl.stop(); } + @Test public void testJobControlWithKillJob() throws Exception { LOG.info("Starting testJobControlWithKillJob"); @@ -182,6 +187,7 @@ public class TestMapReduceJobControl extends HadoopTestCase { theControl.stop(); } + @Test public void testJobControl() throws Exception { LOG.info("Starting testJobControl"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/map/TestMultithreadedMapper.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/map/TestMultithreadedMapper.java index 52b0e709de3..5096192e11c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/map/TestMultithreadedMapper.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/map/TestMultithreadedMapper.java @@ -23,23 +23,33 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.HadoopTestCase; -import org.apache.hadoop.mapreduce.*; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.MapReduceTestUtil; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.junit.Test; import java.io.IOException; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + public class TestMultithreadedMapper extends HadoopTestCase { public TestMultithreadedMapper() throws IOException { super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); } + @Test public void testOKRun() throws Exception { run(false, false); } + @Test public void testIOExRun() throws Exception { run(true, false); } + @Test public void testRuntimeExRun() throws Exception { run(false, true); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java index 49b59cacfe4..19b712f3da1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java @@ -33,6 +33,11 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.TaskAttemptContext; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; /** * A JUnit test to test Map-Reduce job committer. @@ -54,15 +59,15 @@ public class TestJobOutputCommitter extends HadoopTestCase { private FileSystem fs; private Configuration conf = null; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { super.setUp(); conf = createJobConf(); fs = getFileSystem(); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { fs.delete(new Path(TEST_ROOT_DIR), true); super.tearDown(); } @@ -219,6 +224,7 @@ public class TestJobOutputCommitter extends HadoopTestCase { * * @throws Exception */ + @Test public void testDefaultCleanupAndAbort() throws Exception { // check with a successful job testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME, @@ -238,6 +244,7 @@ public class TestJobOutputCommitter extends HadoopTestCase { * * @throws Exception */ + @Test public void testCustomAbort() throws Exception { // check with a successful job testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME, @@ -264,6 +271,7 @@ public class TestJobOutputCommitter extends HadoopTestCase { * compatibility testing. * @throws Exception */ + @Test public void testCustomCleanup() throws Exception { // check with a successful job testSuccessfulJob(CUSTOM_CLEANUP_FILE_NAME, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRMultipleOutputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRMultipleOutputs.java index 6c432ddfc7f..babd20e66c4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRMultipleOutputs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRMultipleOutputs.java @@ -27,23 +27,36 @@ import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.mapred.HadoopTestCase; -import org.apache.hadoop.mapreduce.*; +import org.apache.hadoop.mapreduce.CounterGroup; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.MapReduceTestUtil; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + public class TestMRMultipleOutputs extends HadoopTestCase { public TestMRMultipleOutputs() throws IOException { super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); } + @Test public void testWithoutCounters() throws Exception { _testMultipleOutputs(false); _testMOWithJavaSerialization(false); } + @Test public void testWithCounters() throws Exception { _testMultipleOutputs(true); _testMOWithJavaSerialization(true); @@ -57,6 +70,7 @@ public class TestMRMultipleOutputs extends HadoopTestCase { private static String TEXT = "text"; private static String SEQUENCE = "sequence"; + @Before public void setUp() throws Exception { super.setUp(); Configuration conf = createJobConf(); @@ -64,6 +78,7 @@ public class TestMRMultipleOutputs extends HadoopTestCase { fs.delete(ROOT_DIR, true); } + @After public void tearDown() throws Exception { Configuration conf = createJobConf(); FileSystem fs = FileSystem.get(conf); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java index 3a2b8312bda..0d75d2fe9ea 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java @@ -18,8 +18,6 @@ package org.apache.hadoop.mapreduce.lib.partition; -import java.io.*; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -32,6 +30,15 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.map.InverseMapper; +import org.junit.Test; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; public class TestMRKeyFieldBasedComparator extends HadoopTestCase { @@ -45,8 +52,8 @@ public class TestMRKeyFieldBasedComparator extends HadoopTestCase { conf = createJobConf(); conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " "); } - - private void testComparator(String keySpec, int expect) + + private void testComparator(String keySpec, int expect) throws Exception { String root = System.getProperty("test.build.data", "/tmp"); Path inDir = new Path(root, "test_cmp/in"); @@ -93,7 +100,8 @@ public class TestMRKeyFieldBasedComparator extends HadoopTestCase { reader.close(); } } - + + @Test public void testBasicUnixComparator() throws Exception { testComparator("-k1,1n", 1); testComparator("-k2,2n", 1); @@ -117,7 +125,7 @@ public class TestMRKeyFieldBasedComparator extends HadoopTestCase { byte[] line1_bytes = line1.getBytes(); byte[] line2_bytes = line2.getBytes(); - public void testWithoutMRJob(String keySpec, int expect) throws Exception { + private void testWithoutMRJob(String keySpec, int expect) throws Exception { KeyFieldBasedComparator keyFieldCmp = new KeyFieldBasedComparator(); conf.set("mapreduce.partition.keycomparator.options", keySpec); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/terasort/TestTeraSort.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/terasort/TestTeraSort.java index 349208999d3..391e482c193 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/terasort/TestTeraSort.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/terasort/TestTeraSort.java @@ -27,6 +27,12 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileAlreadyExistsException; import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.util.ToolRunner; +import org.junit.After; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + public class TestTeraSort extends HadoopTestCase { private static Log LOG = LogFactory.getLog(TestTeraSort.class); @@ -35,7 +41,8 @@ public class TestTeraSort extends HadoopTestCase { super(LOCAL_MR, LOCAL_FS, 1, 1); } - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { getFileSystem().delete(new Path(TEST_DIR), true); super.tearDown(); } @@ -76,6 +83,7 @@ public class TestTeraSort extends HadoopTestCase { assertEquals(ToolRunner.run(job, new TeraValidate(), svArgs), 0); } + @Test public void testTeraSort() throws Exception { // Run TeraGen to generate input for 'terasort' runTeraGen(createJobConf(), SORT_INPUT_PATH); @@ -104,6 +112,7 @@ public class TestTeraSort extends HadoopTestCase { TERA_OUTPUT_PATH); } + @Test public void testTeraSortWithLessThanTwoArgs() throws Exception { String[] args = new String[1]; assertEquals(new TeraSort().run(args), 2);