From 0050fa5f1c20087009bd76a0bb2183a479f787f0 Mon Sep 17 00:00:00 2001 From: Akira Ajisaka Date: Tue, 29 Mar 2016 18:17:52 +0900 Subject: [PATCH] MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote. --- .../java/org/apache/hadoop/fs/DFSCIOTest.java | 8 +-- .../org/apache/hadoop/fs/TestFileSystem.java | 20 +++++-- .../java/org/apache/hadoop/fs/TestJHLA.java | 6 +-- .../io/TestSequenceFileMergeProgress.java | 12 +++-- .../mapred/ClusterMapReduceTestCase.java | 14 ++--- .../apache/hadoop/mapred/TestAuditLogger.java | 9 ++-- .../apache/hadoop/mapred/TestBadRecords.java | 8 ++- .../mapred/TestClusterMapReduceTestCase.java | 10 ++++ .../org/apache/hadoop/mapred/TestCollect.java | 10 ++-- .../mapred/TestCommandLineJobSubmission.java | 9 ++-- .../hadoop/mapred/TestFieldSelection.java | 6 ++- .../mapred/TestFileInputFormatPathFilter.java | 19 ++++--- .../hadoop/mapred/TestGetSplitHosts.java | 7 +-- .../hadoop/mapred/TestIFileStreams.java | 13 ++--- .../apache/hadoop/mapred/TestInputPath.java | 7 +-- .../hadoop/mapred/TestJavaSerialization.java | 10 ++-- .../org/apache/hadoop/mapred/TestJobName.java | 6 +++ .../hadoop/mapred/TestJobSysDirWithDFS.java | 10 ++-- .../mapred/TestKeyValueTextInputFormat.java | 15 +++--- .../apache/hadoop/mapred/TestLazyOutput.java | 7 +-- .../mapred/TestMRCJCFileInputFormat.java | 32 +++++++----- .../mapred/TestMRCJCFileOutputCommitter.java | 28 ++++++---- .../apache/hadoop/mapred/TestMapProgress.java | 9 ++-- .../org/apache/hadoop/mapred/TestMerge.java | 7 +-- .../hadoop/mapred/TestMiniMRBringup.java | 6 ++- .../hadoop/mapred/TestMiniMRDFSCaching.java | 14 +++-- .../mapred/TestMultiFileInputFormat.java | 19 +++---- .../hadoop/mapred/TestMultiFileSplit.java | 10 ++-- .../mapred/TestMultipleLevelCaching.java | 12 +++-- .../mapred/TestMultipleTextOutputFormat.java | 23 ++++---- .../apache/hadoop/mapred/TestReduceFetch.java | 10 ++-- .../mapred/TestReduceFetchFromPartialMem.java | 46 +++++++--------- .../apache/hadoop/mapred/TestReduceTask.java | 18 ++++--- .../TestSequenceFileAsBinaryInputFormat.java | 19 ++++--- .../TestSequenceFileAsBinaryOutputFormat.java | 31 +++++++---- .../TestSequenceFileAsTextInputFormat.java | 27 +++++----- .../mapred/TestSequenceFileInputFilter.java | 32 ++++++------ .../mapred/TestSequenceFileInputFormat.java | 23 ++++---- .../hadoop/mapred/TestSortedRanges.java | 19 ++++--- .../TestSpecialCharactersInOutputPath.java | 21 ++++---- .../mapred/TestStatisticsCollector.java | 10 ++-- .../mapred/TestUserDefinedCounters.java | 24 +++++---- .../hadoop/mapred/TestWritableJobConf.java | 20 ++++--- .../apache/hadoop/mapred/TestYARNRunner.java | 8 +-- .../hadoop/mapred/join/TestDatamerge.java | 42 ++++++++------- .../hadoop/mapred/join/TestTupleWritable.java | 24 ++++++--- .../TestWrappedRecordReaderClassloader.java | 7 +-- .../mapred/lib/TestDelegatingInputFormat.java | 9 ++-- .../mapred/lib/TestLineInputFormat.java | 7 +-- .../hadoop/mapred/lib/TestMultipleInputs.java | 2 - .../mapred/lib/aggregate/TestAggregates.java | 7 +-- .../mapred/lib/db/TestConstructQuery.java | 16 +++--- .../apache/hadoop/mapred/pipes/TestPipes.java | 9 ++-- .../hadoop/mapreduce/TestLocalRunner.java | 34 +++++++----- .../hadoop/mapreduce/TestMRJobClient.java | 49 +++++++++-------- .../mapreduce/TestMapReduceLazyOutput.java | 9 ++-- .../hadoop/mapreduce/TestValueIterReset.java | 8 +-- .../TestYarnClientProtocolProvider.java | 5 +- .../aggregate/TestMapReduceAggregates.java | 23 ++++---- .../mapreduce/lib/db/TestDBOutputFormat.java | 17 +++--- .../mapreduce/lib/db/TestIntegerSplitter.java | 15 ++++-- .../mapreduce/lib/db/TestTextSplitter.java | 18 +++++-- .../lib/fieldsel/TestMRFieldSelection.java | 20 ++++--- ...TestMRSequenceFileAsBinaryInputFormat.java | 21 +++++--- .../TestMRSequenceFileAsTextInputFormat.java | 27 ++++++---- .../input/TestMRSequenceFileInputFilter.java | 39 +++++++------- .../lib/input/TestNLineInputFormat.java | 34 +++++++----- .../mapreduce/lib/join/TestJoinDatamerge.java | 52 +++++++++++-------- .../lib/join/TestJoinProperties.java | 44 ++++++++-------- .../lib/join/TestJoinTupleWritable.java | 24 ++++++--- .../lib/join/TestWrappedRRClassloader.java | 17 ++++-- ...estMRSequenceFileAsBinaryOutputFormat.java | 35 +++++++++---- .../lib/partition/TestBinaryPartitioner.java | 16 ++++-- .../lib/partition/TestKeyFieldHelper.java | 9 +++- .../TestMRKeyFieldBasedPartitioner.java | 6 ++- .../partition/TestTotalOrderPartitioner.java | 11 ++-- .../util/TestMRAsyncDiskService.java | 15 ++++-- .../mapreduce/v2/TestMiniMRProxyUser.java | 30 ++++++----- .../mapreduce/v2/TestNonExistentJob.java | 18 ++++--- .../streaming/TestStreamingBadRecords.java | 9 +++- 80 files changed, 835 insertions(+), 567 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java index 1caa2cdae6c..12bec0869f6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java @@ -28,8 +28,6 @@ import java.io.PrintStream; import java.util.Date; import java.util.StringTokenizer; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -39,8 +37,9 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.mapred.*; import org.junit.Ignore; +import org.junit.Test; -/** + /** * Distributed i/o benchmark. *

* This test writes into or reads from a specified number of files. @@ -68,7 +67,7 @@ import org.junit.Ignore; * */ @Ignore -public class DFSCIOTest extends TestCase { +public class DFSCIOTest { // Constants private static final Log LOG = LogFactory.getLog(DFSCIOTest.class); private static final int TEST_TYPE_READ = 0; @@ -98,6 +97,7 @@ public class DFSCIOTest extends TestCase { * * @throws Exception */ + @Test public void testIOs() throws Exception { testIOs(10, 10); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java index 4146b139c50..f0300b368a6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java @@ -34,8 +34,6 @@ import java.util.HashMap; import java.net.InetSocketAddress; import java.net.URI; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; @@ -50,8 +48,15 @@ import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapred.lib.LongSumReducer; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.fail; -public class TestFileSystem extends TestCase { + +public class TestFileSystem { private static final Log LOG = FileSystem.LOG; private static Configuration conf = new Configuration(); @@ -66,6 +71,7 @@ public class TestFileSystem extends TestCase { private static Path READ_DIR = new Path(ROOT, "fs_read"); private static Path DATA_DIR = new Path(ROOT, "fs_data"); + @Test public void testFs() throws Exception { testFs(10 * MEGA, 100, 0); } @@ -90,7 +96,8 @@ public class TestFileSystem extends TestCase { fs.delete(READ_DIR, true); } - public static void testCommandFormat() throws Exception { + @Test + public void testCommandFormat() throws Exception { // This should go to TestFsShell.java when it is added. CommandFormat cf; cf= new CommandFormat("copyToLocal", 2,2,"crc","ignoreCrc"); @@ -488,6 +495,7 @@ public class TestFileSystem extends TestCase { } } + @Test public void testFsCache() throws Exception { { long now = System.currentTimeMillis(); @@ -561,6 +569,7 @@ public class TestFileSystem extends TestCase { + StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort())); } + @Test public void testFsClose() throws Exception { { Configuration conf = new Configuration(); @@ -569,6 +578,7 @@ public class TestFileSystem extends TestCase { } } + @Test public void testFsShutdownHook() throws Exception { final Set closed = Collections.synchronizedSet(new HashSet()); Configuration conf = new Configuration(); @@ -600,7 +610,7 @@ public class TestFileSystem extends TestCase { assertTrue(closed.contains(fsWithoutAuto)); } - + @Test public void testCacheKeysAreCaseInsensitive() throws Exception { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java index f2bc4edc46d..31950fd6104 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java @@ -23,19 +23,18 @@ import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.File; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.junit.After; import org.junit.Before; +import org.junit.Test; /** * Test Job History Log Analyzer. * * @see JHLogAnalyzer */ -public class TestJHLA extends TestCase { +public class TestJHLA { private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class); private String historyLog = System.getProperty("test.build.data", "build/test/data") + "/history/test.log"; @@ -133,6 +132,7 @@ public class TestJHLA extends TestCase { /** * Run log analyzer in test mode for file test.log. */ + @Test public void testJHLA() { String[] args = {"-test", historyLog, "-jobDelimiter", ".!!FILE=.*!!"}; JHLogAnalyzer.main(args); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java index 1d7b98a6719..97dfa26acf4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java @@ -32,21 +32,25 @@ import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.mapred.*; -import junit.framework.TestCase; import org.apache.commons.logging.*; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -public class TestSequenceFileMergeProgress extends TestCase { +public class TestSequenceFileMergeProgress { private static final Log LOG = FileInputFormat.LOG; private static final int RECORDS = 10000; - + + @Test public void testMergeProgressWithNoCompression() throws IOException { runTest(SequenceFile.CompressionType.NONE); } + @Test public void testMergeProgressWithRecordCompression() throws IOException { runTest(SequenceFile.CompressionType.RECORD); } + @Test public void testMergeProgressWithBlockCompression() throws IOException { runTest(SequenceFile.CompressionType.BLOCK); } @@ -92,7 +96,7 @@ public class TestSequenceFileMergeProgress extends TestCase { count++; } assertEquals(RECORDS, count); - assertEquals(1.0f, rIter.getProgress().get()); + assertEquals(1.0f, rIter.getProgress().get(), 0.0000); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java index 5bf4ff11b89..8d33b1580a8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.mapred; -import junit.framework.TestCase; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.junit.After; +import org.junit.Before; import java.io.IOException; import java.util.Map; @@ -41,7 +42,7 @@ import java.util.Properties; *

* The DFS filesystem is formated before the testcase starts and after it ends. */ -public abstract class ClusterMapReduceTestCase extends TestCase { +public abstract class ClusterMapReduceTestCase { private MiniDFSCluster dfsCluster = null; private MiniMRCluster mrCluster = null; @@ -50,9 +51,8 @@ public abstract class ClusterMapReduceTestCase extends TestCase { * * @throws Exception */ - protected void setUp() throws Exception { - super.setUp(); - + @Before + public void setUp() throws Exception { startCluster(true, null); } @@ -139,9 +139,9 @@ public abstract class ClusterMapReduceTestCase extends TestCase { * * @throws Exception */ - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { stopCluster(); - super.tearDown(); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java index 353185b59e3..bc85703bc84 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java @@ -28,13 +28,13 @@ import org.apache.hadoop.ipc.TestRPC.TestImpl; import org.apache.hadoop.ipc.TestRPC.TestProtocol; import org.apache.hadoop.mapred.AuditLogger.Keys; import org.apache.hadoop.net.NetUtils; - -import junit.framework.TestCase; +import org.junit.Test; +import static org.junit.Assert.assertEquals; /** * Tests {@link AuditLogger}. */ -public class TestAuditLogger extends TestCase { +public class TestAuditLogger { private static final String USER = "test"; private static final String OPERATION = "oper"; private static final String TARGET = "tgt"; @@ -44,6 +44,7 @@ public class TestAuditLogger extends TestCase { /** * Test the AuditLog format with key-val pair. */ + @Test public void testKeyValLogFormat() { StringBuilder actLog = new StringBuilder(); StringBuilder expLog = new StringBuilder(); @@ -114,6 +115,7 @@ public class TestAuditLogger extends TestCase { /** * Test {@link AuditLogger} without IP set. */ + @Test public void testAuditLoggerWithoutIP() throws Exception { // test without ip testSuccessLogFormat(false); @@ -137,6 +139,7 @@ public class TestAuditLogger extends TestCase { /** * Test {@link AuditLogger} with IP set. */ + @Test public void testAuditLoggerWithIP() throws Exception { Configuration conf = new Configuration(); // start the IPC server diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java index ea9f3d3f989..c2d6257823e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java @@ -40,6 +40,11 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.util.ReflectionUtils; import org.junit.Ignore; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNotNull; @Ignore public class TestBadRecords extends ClusterMapReduceTestCase { @@ -206,7 +211,8 @@ public class TestBadRecords extends ClusterMapReduceTestCase { } return processed; } - + + @Test public void testBadMapRed() throws Exception { JobConf conf = createJobConf(); conf.setMapperClass(BadMapper.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java index ada2d0c634b..f04fbd7a29a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java @@ -29,6 +29,12 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertFalse; public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase { public void _testMapReduce(boolean restart) throws Exception { OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); @@ -85,14 +91,17 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase { } + @Test public void testMapReduce() throws Exception { _testMapReduce(false); } + @Test public void testMapReduceRestarting() throws Exception { _testMapReduce(true); } + @Test public void testDFSRestart() throws Exception { Path file = new Path(getInputDir(), "text.txt"); OutputStream os = getFileSystem().create(file); @@ -109,6 +118,7 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase { } + @Test public void testMRConfig() throws Exception { JobConf conf = createJobConf(); assertNull(conf.get("xyz")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java index 4bd20d54ad5..595d09cc2a0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java @@ -21,15 +21,15 @@ import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat; import org.apache.hadoop.mapreduce.MRConfig; +import org.junit.Test; -import junit.framework.TestCase; import java.io.*; import java.util.*; /** * TestCollect checks if the collect can handle simultaneous invocations. */ -public class TestCollect extends TestCase +public class TestCollect { final static Path OUTPUT_DIR = new Path("build/test/test.collect.output"); static final int NUM_FEEDERS = 10; @@ -127,7 +127,7 @@ public class TestCollect extends TestCase conf.setNumMapTasks(1); conf.setNumReduceTasks(1); } - + @Test public void testCollect() throws IOException { JobConf conf = new JobConf(); configure(conf); @@ -144,9 +144,5 @@ public class TestCollect extends TestCase fs.delete(OUTPUT_DIR, true); } } - - public static void main(String[] args) throws IOException { - new TestCollect().testCollect(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java index 69353871cf4..7cf5e71e1a5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java @@ -21,28 +21,29 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.Ignore; +import org.junit.Test; +import static org.junit.Assert.assertTrue; /** * check for the job submission options of * -libjars -files -archives */ @Ignore -public class TestCommandLineJobSubmission extends TestCase { - // Input output paths for this.. +public class TestCommandLineJobSubmission { + // Input output paths for this.. // these are all dummy and does not test // much in map reduce except for the command line // params static final Path input = new Path("/test/input/"); static final Path output = new Path("/test/output"); File buildDir = new File(System.getProperty("test.build.data", "/tmp")); + @Test public void testJobShell() throws Exception { MiniDFSCluster dfs = null; MiniMRCluster mr = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java index 29333b7bfdb..868896815ef 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java @@ -23,11 +23,12 @@ import org.apache.hadoop.mapred.lib.*; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper; import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -import junit.framework.TestCase; import java.text.NumberFormat; -public class TestFieldSelection extends TestCase { +public class TestFieldSelection { private static NumberFormat idFormat = NumberFormat.getInstance(); static { @@ -35,6 +36,7 @@ private static NumberFormat idFormat = NumberFormat.getInstance(); idFormat.setGroupingUsed(false); } + @Test public void testFieldSelection() throws Exception { launch(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java index 1c8be66d084..d87f6fd91a9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java @@ -17,12 +17,14 @@ */ package org.apache.hadoop.mapred; -import junit.framework.TestCase; - import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.assertEquals; import java.io.IOException; import java.io.Writer; @@ -30,7 +32,7 @@ import java.io.OutputStreamWriter; import java.util.Set; import java.util.HashSet; -public class TestFileInputFormatPathFilter extends TestCase { +public class TestFileInputFormatPathFilter { public static class DummyFileInputFormat extends FileInputFormat { @@ -55,12 +57,12 @@ public class TestFileInputFormatPathFilter extends TestCase { new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestFileInputFormatPathFilter"); - + @Before public void setUp() throws Exception { tearDown(); localFs.mkdirs(workDir); } - + @After public void tearDown() throws Exception { if (localFs.exists(workDir)) { localFs.delete(workDir, true); @@ -129,18 +131,19 @@ public class TestFileInputFormatPathFilter extends TestCase { assertEquals(createdFiles, computedFiles); } + @Test public void testWithoutPathFilterWithoutGlob() throws Exception { _testInputFiles(false, false); } - + @Test public void testWithoutPathFilterWithGlob() throws Exception { _testInputFiles(false, true); } - + @Test public void testWithPathFilterWithoutGlob() throws Exception { _testInputFiles(true, false); } - + @Test public void testWithPathFilterWithGlob() throws Exception { _testInputFiles(true, true); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java index 7891bca7990..3d1c2e71bff 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java @@ -20,10 +20,11 @@ package org.apache.hadoop.mapred; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.net.NetworkTopology; -import junit.framework.TestCase; - -public class TestGetSplitHosts extends TestCase { +import org.junit.Test; +import static org.junit.Assert.assertTrue; +public class TestGetSplitHosts { + @Test public void testGetSplitHosts() throws Exception { int numBlocks = 3; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java index 86431e5c135..2b97d3b95ad 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java @@ -21,11 +21,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; +import org.junit.Test; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; -import junit.framework.TestCase; - -public class TestIFileStreams extends TestCase { - +public class TestIFileStreams { + @Test public void testIFileStream() throws Exception { final int DLEN = 100; DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4); @@ -42,7 +43,7 @@ public class TestIFileStreams extends TestCase { } ifis.close(); } - + @Test public void testBadIFileStream() throws Exception { final int DLEN = 100; DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4); @@ -73,7 +74,7 @@ public class TestIFileStreams extends TestCase { } fail("Did not detect bad data in checksum"); } - + @Test public void testBadLength() throws Exception { final int DLEN = 100; DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java index 1398f9e5aaa..0c20c335d89 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java @@ -17,14 +17,15 @@ */ package org.apache.hadoop.mapred; -import junit.framework.TestCase; - import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.StringUtils; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -public class TestInputPath extends TestCase { +public class TestInputPath { + @Test public void testInputPath() throws Exception { JobConf jobConf = new JobConf(); Path workingDir = jobConf.getWorkingDirectory(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java index 265118a70f6..a787e68c124 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java @@ -26,8 +26,6 @@ import java.io.Writer; import java.util.Iterator; import java.util.StringTokenizer; -import junit.framework.TestCase; - import org.apache.commons.io.FileUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -36,8 +34,11 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.mapreduce.MRConfig; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; -public class TestJavaSerialization extends TestCase { +public class TestJavaSerialization { private static String TEST_ROOT_DIR = new File(System.getProperty("test.build.data", "/tmp")).toURI() @@ -90,7 +91,7 @@ public class TestJavaSerialization extends TestCase { wr.write("b a\n"); wr.close(); } - + @Test public void testMapReduceJob() throws Exception { JobConf conf = new JobConf(TestJavaSerialization.class); @@ -149,6 +150,7 @@ public class TestJavaSerialization extends TestCase { * coupled to Writable types, if so, the job will fail. * */ + @Test public void testWriteToSequencefile() throws Exception { JobConf conf = new JobConf(TestJavaSerialization.class); conf.setJobName("JavaSerialization"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java index 4b62b4a1d8e..2659a14a70b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java @@ -29,8 +29,13 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.lib.IdentityMapper; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + public class TestJobName extends ClusterMapReduceTestCase { + @Test public void testComplexName() throws Exception { OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); @@ -65,6 +70,7 @@ public class TestJobName extends ClusterMapReduceTestCase { reader.close(); } + @Test public void testComplexNameWithRegex() throws Exception { OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java index 109c781c2b0..3dbc5777bd5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java @@ -21,8 +21,6 @@ package org.apache.hadoop.mapred; import java.io.DataOutputStream; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -32,11 +30,15 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; /** * A JUnit test to test Job System Directory with Mini-DFS. */ -public class TestJobSysDirWithDFS extends TestCase { +public class TestJobSysDirWithDFS { private static final Log LOG = LogFactory.getLog(TestJobSysDirWithDFS.class.getName()); @@ -115,7 +117,7 @@ public class TestJobSysDirWithDFS extends TestCase { // between Job Client & Job Tracker assertTrue(result.job.isSuccessful()); } - + @Test public void testWithDFS() throws IOException { MiniDFSCluster dfs = null; MiniMRCluster mr = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java index 27070783e14..bacc196008e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java @@ -20,7 +20,6 @@ package org.apache.hadoop.mapred; import java.io.*; import java.util.*; -import junit.framework.TestCase; import org.apache.commons.logging.*; import org.apache.hadoop.fs.*; @@ -28,8 +27,11 @@ import org.apache.hadoop.io.*; import org.apache.hadoop.io.compress.*; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestKeyValueTextInputFormat extends TestCase { +public class TestKeyValueTextInputFormat { private static final Log LOG = LogFactory.getLog(TestKeyValueTextInputFormat.class.getName()); @@ -47,7 +49,7 @@ public class TestKeyValueTextInputFormat extends TestCase { private static Path workDir = new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestKeyValueTextInputFormat"); - + @Test public void testFormat() throws Exception { JobConf job = new JobConf(); Path file = new Path(workDir, "test.txt"); @@ -134,7 +136,7 @@ public class TestKeyValueTextInputFormat extends TestCase { (str.getBytes("UTF-8")), defaultConf); } - + @Test public void testUTF8() throws Exception { LineReader in = null; @@ -153,7 +155,7 @@ public class TestKeyValueTextInputFormat extends TestCase { } } } - + @Test public void testNewLines() throws Exception { LineReader in = null; try { @@ -219,7 +221,8 @@ public class TestKeyValueTextInputFormat extends TestCase { /** * Test using the gzip codec for reading */ - public static void testGzip() throws IOException { + @Test + public void testGzip() throws IOException { JobConf job = new JobConf(); CompressionCodec gzip = new GzipCodec(); ReflectionUtils.setConf(gzip, job); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java index 7412832d5c2..dde9310607f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java @@ -35,14 +35,15 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.lib.LazyOutputFormat; -import junit.framework.TestCase; +import org.junit.Test; +import static org.junit.Assert.assertTrue; /** * A JUnit test to test the Map-Reduce framework's feature to create part * files only if there is an explicit output.collect. This helps in preventing * 0 byte files */ -public class TestLazyOutput extends TestCase { +public class TestLazyOutput { private static final int NUM_HADOOP_SLAVES = 3; private static final int NUM_MAPS_PER_NODE = 2; private static final Path INPUT = new Path("/testlazy/input"); @@ -132,7 +133,7 @@ public class TestLazyOutput extends TestCase { } } - + @Test public void testLazyOutput() throws Exception { MiniDFSCluster dfs = null; MiniMRCluster mr = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java index fb9e8fcce3a..20d0173cc81 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java @@ -17,16 +17,6 @@ */ package org.apache.hadoop.mapred; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.concurrent.TimeoutException; - -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataOutputStream; @@ -36,9 +26,21 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.io.Text; +import org.junit.After; +import org.junit.Test; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.concurrent.TimeoutException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; @SuppressWarnings("deprecation") -public class TestMRCJCFileInputFormat extends TestCase { +public class TestMRCJCFileInputFormat { Configuration conf = new Configuration(); MiniDFSCluster dfs = null; @@ -50,6 +52,7 @@ public class TestMRCJCFileInputFormat extends TestCase { .build(); } + @Test public void testLocality() throws Exception { JobConf job = new JobConf(conf); dfs = newDFSCluster(job); @@ -109,6 +112,7 @@ public class TestMRCJCFileInputFormat extends TestCase { DFSTestUtil.waitReplication(fs, path, replication); } + @Test public void testNumInputs() throws Exception { JobConf job = new JobConf(conf); dfs = newDFSCluster(job); @@ -157,6 +161,7 @@ public class TestMRCJCFileInputFormat extends TestCase { } } + @Test public void testMultiLevelInput() throws Exception { JobConf job = new JobConf(conf); @@ -195,6 +200,7 @@ public class TestMRCJCFileInputFormat extends TestCase { } @SuppressWarnings("rawtypes") + @Test public void testLastInputSplitAtSplitBoundary() throws Exception { FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024, 128l * 1024 * 1024); @@ -208,6 +214,7 @@ public class TestMRCJCFileInputFormat extends TestCase { } @SuppressWarnings("rawtypes") + @Test public void testLastInputSplitExceedingSplitBoundary() throws Exception { FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024, 128l * 1024 * 1024); @@ -221,6 +228,7 @@ public class TestMRCJCFileInputFormat extends TestCase { } @SuppressWarnings("rawtypes") + @Test public void testLastInputSplitSingleSplit() throws Exception { FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024, 128l * 1024 * 1024); @@ -305,7 +313,7 @@ public class TestMRCJCFileInputFormat extends TestCase { DFSTestUtil.waitReplication(fileSys, name, replication); } - @Override + @After public void tearDown() throws Exception { if (dfs != null) { dfs.shutdown(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java index 3b86f81cc23..74b6d77f6a0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java @@ -18,18 +18,25 @@ package org.apache.hadoop.mapred; -import java.io.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RawLocalFileSystem; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.JobStatus; +import org.junit.Test; + +import java.io.File; +import java.io.IOException; import java.net.URI; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.mapred.JobContextImpl; -import org.apache.hadoop.mapred.TaskAttemptContextImpl; -import org.apache.hadoop.mapreduce.JobStatus; - -public class TestMRCJCFileOutputCommitter extends TestCase { +public class TestMRCJCFileOutputCommitter { private static Path outDir = new Path( System.getProperty("test.build.data", "/tmp"), "output"); @@ -67,6 +74,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase { } @SuppressWarnings("unchecked") + @Test public void testCommitter() throws Exception { JobConf job = new JobConf(); setConfForFileOutputCommitter(job); @@ -108,6 +116,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase { FileUtil.fullyDelete(new File(outDir.toString())); } + @Test public void testAbort() throws IOException { JobConf job = new JobConf(); setConfForFileOutputCommitter(job); @@ -161,6 +170,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase { } } + @Test public void testFailAbort() throws IOException { JobConf job = new JobConf(); job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java index db6348ba440..b8ff016d6af 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java @@ -22,8 +22,6 @@ import java.io.File; import java.io.IOException; import java.util.List; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; @@ -40,6 +38,8 @@ import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.split.JobSplitWriter; import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.Test; +import static org.junit.Assert.assertTrue; /** * Validates map phase progress. @@ -55,7 +55,7 @@ import org.apache.hadoop.util.ReflectionUtils; * once mapTask.run() is finished. Sort phase progress in map task is not * validated here. */ -public class TestMapProgress extends TestCase { +public class TestMapProgress { public static final Log LOG = LogFactory.getLog(TestMapProgress.class); private static String TEST_ROOT_DIR; static { @@ -234,7 +234,8 @@ public class TestMapProgress extends TestCase { /** * Validates map phase progress after each record is processed by map task * using custom task reporter. - */ + */ + @Test public void testMapProgress() throws Exception { JobConf job = new JobConf(); fs = FileSystem.getLocal(job); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java index e19ff589fa4..a9e7f64c0b8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java @@ -44,8 +44,8 @@ import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.mapred.Task.TaskReporter; - -import junit.framework.TestCase; +import org.junit.Test; +import static org.junit.Assert.assertEquals; @SuppressWarnings(value={"unchecked", "deprecation"}) /** @@ -56,7 +56,7 @@ import junit.framework.TestCase; * framework's merge on the reduce side will merge the partitions created to * generate the final output which is sorted on the key. */ -public class TestMerge extends TestCase { +public class TestMerge { private static final int NUM_HADOOP_DATA_NODES = 2; // Number of input files is same as the number of mappers. private static final int NUM_MAPPERS = 10; @@ -69,6 +69,7 @@ public class TestMerge extends TestCase { // Where output goes. private static final Path OUTPUT = new Path("/testplugin/output"); + @Test public void testMerge() throws Exception { MiniDFSCluster dfsCluster = null; MiniMRClientCluster mrCluster = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java index 8b7b8f51b96..b608d756a49 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java @@ -18,14 +18,16 @@ package org.apache.hadoop.mapred; +import org.junit.Test; + import java.io.IOException; -import junit.framework.TestCase; /** * A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster. */ -public class TestMiniMRBringup extends TestCase { +public class TestMiniMRBringup { + @Test public void testBringUp() throws IOException { MiniMRCluster mr = null; try { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java index 45879aff623..3f64f7a35b9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java @@ -18,20 +18,23 @@ package org.apache.hadoop.mapred; -import java.io.*; -import junit.framework.TestCase; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.mapred.MRCaching.TestResult; import org.junit.Ignore; +import org.junit.Test; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; /** * A JUnit test to test caching with DFS * */ @Ignore -public class TestMiniMRDFSCaching extends TestCase { +public class TestMiniMRDFSCaching { + @Test public void testWithDFS() throws IOException { MiniMRCluster mr = null; MiniDFSCluster dfs = null; @@ -70,9 +73,4 @@ public class TestMiniMRDFSCaching extends TestCase { } } } - - public static void main(String[] argv) throws Exception { - TestMiniMRDFSCaching td = new TestMiniMRDFSCaching(); - td.testWithDFS(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java index 49825e99f57..1bd29542fcd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java @@ -21,17 +21,17 @@ import java.io.IOException; import java.util.BitSet; import java.util.HashMap; import java.util.Random; - -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestMultiFileInputFormat extends TestCase{ +public class TestMultiFileInputFormat { private static JobConf job = new JobConf(); @@ -79,7 +79,8 @@ public class TestMultiFileInputFormat extends TestCase{ FileInputFormat.setInputPaths(job, multiFileDir); return multiFileDir; } - + + @Test public void testFormat() throws IOException { LOG.info("Test started"); LOG.info("Max split count = " + MAX_SPLIT_COUNT); @@ -122,7 +123,8 @@ public class TestMultiFileInputFormat extends TestCase{ } LOG.info("Test Finished"); } - + + @Test public void testFormatWithLessPathsThanSplits() throws Exception { MultiFileInputFormat format = new DummyMultiFileInputFormat(); FileSystem fs = FileSystem.getLocal(job); @@ -135,9 +137,4 @@ public class TestMultiFileInputFormat extends TestCase{ initFiles(fs, 2, 500); assertEquals(2, format.getSplits(job, 4).length); } - - public static void main(String[] args) throws Exception{ - TestMultiFileInputFormat test = new TestMultiFileInputFormat(); - test.testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java index 16ff6af9271..5bb336e4e81 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java @@ -27,16 +27,19 @@ import java.io.IOException; import java.io.OutputStream; import java.util.Arrays; -import junit.framework.TestCase; - import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + /** * * test MultiFileSplit class */ -public class TestMultiFileSplit extends TestCase{ +public class TestMultiFileSplit { + @Test public void testReadWrite() throws Exception { MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {new Path("/test/path/1"), new Path("/test/path/2")}, new long[] {100,200}); @@ -70,6 +73,7 @@ public class TestMultiFileSplit extends TestCase{ * test method getLocations * @throws IOException */ + @Test public void testgetLocations() throws IOException{ JobConf job= new JobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java index 294723a9c87..7e8dfef03f1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java @@ -17,10 +17,6 @@ */ package org.apache.hadoop.mapred; -import java.io.IOException; - -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -32,12 +28,17 @@ import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.junit.Ignore; +import org.junit.Test; + +import java.io.IOException; + +import static org.junit.Assert.assertEquals; /** * This test checks whether the task caches are created and used properly. */ @Ignore -public class TestMultipleLevelCaching extends TestCase { +public class TestMultipleLevelCaching { private static final int MAX_LEVEL = 5; final Path inDir = new Path("/cachetesting"); final Path outputPath = new Path("/output"); @@ -71,6 +72,7 @@ public class TestMultipleLevelCaching extends TestCase { return rack.toString(); } + @Test public void testMultiLevelCaching() throws Exception { for (int i = 1 ; i <= MAX_LEVEL; ++i) { testCachingAtLevel(i); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java index 14c097d77e1..b5047fc8331 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java @@ -18,15 +18,19 @@ package org.apache.hadoop.mapred; -import java.io.*; -import junit.framework.TestCase; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat; +import org.junit.Test; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import java.io.File; +import java.io.IOException; -import org.apache.hadoop.mapred.lib.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -public class TestMultipleTextOutputFormat extends TestCase { +public class TestMultipleTextOutputFormat { private static JobConf defaultConf = new JobConf(); private static FileSystem localFs = null; @@ -83,7 +87,8 @@ public class TestMultipleTextOutputFormat extends TestCase { writeData(rw); rw.close(null); } - + + @Test public void testFormat() throws Exception { JobConf job = new JobConf(); job.set(JobContext.TASK_ATTEMPT_ID, attempt); @@ -145,8 +150,4 @@ public class TestMultipleTextOutputFormat extends TestCase { //System.out.printf("File_2 output: %s\n", output); assertEquals(output, expectedOutput.toString()); } - - public static void main(String[] args) throws Exception { - new TestMultipleTextOutputFormat().testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java index 586df38dcfc..767459f88b4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java @@ -19,17 +19,18 @@ package org.apache.hadoop.mapred; import org.apache.hadoop.mapreduce.TaskCounter; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; public class TestReduceFetch extends TestReduceFetchFromPartialMem { - static { - setSuite(TestReduceFetch.class); - } - /** * Verify that all segments are read from disk * @throws Exception might be thrown */ + @Test public void testReduceFromDisk() throws Exception { final int MAP_TASKS = 8; JobConf job = mrCluster.createJobConf(); @@ -53,6 +54,7 @@ public class TestReduceFetch extends TestReduceFetchFromPartialMem { * Verify that no segment hits disk. * @throws Exception might be thrown */ + @Test public void testReduceFromMem() throws Exception { final int MAP_TASKS = 3; JobConf job = mrCluster.createJobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java index 3a1a275ab91..9b04f64ac60 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java @@ -18,10 +18,6 @@ package org.apache.hadoop.mapred; -import junit.extensions.TestSetup; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -30,7 +26,9 @@ import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.mapreduce.TaskCounter; -import org.apache.hadoop.mapreduce.MRConfig; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import java.io.DataInput; import java.io.DataOutput; @@ -39,34 +37,27 @@ import java.util.Arrays; import java.util.Formatter; import java.util.Iterator; -public class TestReduceFetchFromPartialMem extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class TestReduceFetchFromPartialMem { protected static MiniMRCluster mrCluster = null; protected static MiniDFSCluster dfsCluster = null; - protected static TestSuite mySuite; - protected static void setSuite(Class klass) { - mySuite = new TestSuite(klass); + @Before + public void setUp() throws Exception { + Configuration conf = new Configuration(); + dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); + mrCluster = new MiniMRCluster(2, + dfsCluster.getFileSystem().getUri().toString(), 1); } - static { - setSuite(TestReduceFetchFromPartialMem.class); - } - - public static Test suite() { - TestSetup setup = new TestSetup(mySuite) { - protected void setUp() throws Exception { - Configuration conf = new Configuration(); - dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); - mrCluster = new MiniMRCluster(2, - dfsCluster.getFileSystem().getUri().toString(), 1); - } - protected void tearDown() throws Exception { - if (dfsCluster != null) { dfsCluster.shutdown(); } - if (mrCluster != null) { mrCluster.shutdown(); } - } - }; - return setup; + @After + public void tearDown() throws Exception { + if (dfsCluster != null) { dfsCluster.shutdown(); } + if (mrCluster != null) { mrCluster.shutdown(); } } private static final String tagfmt = "%04d"; @@ -78,6 +69,7 @@ public class TestReduceFetchFromPartialMem extends TestCase { } /** Verify that at least one segment does not hit disk */ + @Test public void testReduceFromPartialMem() throws Exception { final int MAP_TASKS = 7; JobConf job = mrCluster.createJobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java index 43fd94871a2..69546a6cba2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java @@ -17,10 +17,6 @@ */ package org.apache.hadoop.mapred; -import java.io.IOException; - -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; @@ -30,11 +26,17 @@ import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.util.Progressable; +import org.junit.Test; + +import java.io.IOException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; /** * This test exercises the ValueIterator. */ -public class TestReduceTask extends TestCase { +public class TestReduceTask { static class NullProgress implements Progressable { public void progress() { } @@ -119,9 +121,10 @@ public class TestReduceTask extends TestCase { } assertEquals(vals.length, i); // make sure we have progress equal to 1.0 - assertEquals(1.0f, rawItr.getProgress().get()); + assertEquals(1.0f, rawItr.getProgress().get(),0.0000); } + @Test public void testValueIterator() throws Exception { Path tmpDir = new Path("build/test/test.reduce.task"); Configuration conf = new Configuration(); @@ -129,7 +132,8 @@ public class TestReduceTask extends TestCase { runValueIterator(tmpDir, testCase, conf, null); } } - + + @Test public void testValueIteratorWithCompression() throws Exception { Path tmpDir = new Path("build/test/test.reduce.task.compression"); Configuration conf = new Configuration(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java index b8be7400070..64b0983a5d6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java @@ -18,19 +18,26 @@ package org.apache.hadoop.mapred; +import org.apache.commons.logging.Log; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.DataInputBuffer; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; +import org.junit.Test; + import java.io.IOException; import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; -import junit.framework.TestCase; -import org.apache.commons.logging.*; - -public class TestSequenceFileAsBinaryInputFormat extends TestCase { +public class TestSequenceFileAsBinaryInputFormat { private static final Log LOG = FileInputFormat.LOG; private static final int RECORDS = 10000; + @Test public void testBinary() throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java index abe21f223ef..03dc6a69003 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java @@ -18,24 +18,35 @@ package org.apache.hadoop.mapred; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BooleanWritable; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.DataInputBuffer; +import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.FloatWritable; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.SequenceFile.CompressionType; +import org.junit.Test; + import java.io.IOException; import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.io.SequenceFile.CompressionType; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; -import junit.framework.TestCase; -import org.apache.commons.logging.*; - -public class TestSequenceFileAsBinaryOutputFormat extends TestCase { +public class TestSequenceFileAsBinaryOutputFormat { private static final Log LOG = LogFactory.getLog(TestSequenceFileAsBinaryOutputFormat.class.getName()); - private static final int RECORDS = 10000; // A random task attempt id for testing. private static final String attempt = "attempt_200707121733_0001_m_000000_0"; + @Test public void testBinary() throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); @@ -129,7 +140,8 @@ public class TestSequenceFileAsBinaryOutputFormat extends TestCase { assertEquals("Some records not found", RECORDS, count); } - public void testSequenceOutputClassDefaultsToMapRedOutputClass() + @Test + public void testSequenceOutputClassDefaultsToMapRedOutputClass() throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); @@ -163,6 +175,7 @@ public class TestSequenceFileAsBinaryOutputFormat extends TestCase { job)); } + @Test public void testcheckOutputSpecsForbidRecordCompression() throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java index 4cfd59af745..d4e5e17e11f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java @@ -18,22 +18,29 @@ package org.apache.hadoop.mapred; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; +import org.apache.commons.logging.Log; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; +import org.junit.Test; -import org.apache.commons.logging.*; +import java.util.BitSet; +import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.conf.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestSequenceFileAsTextInputFormat extends TestCase { +public class TestSequenceFileAsTextInputFormat { private static final Log LOG = FileInputFormat.LOG; private static int MAX_LENGTH = 10000; private static Configuration conf = new Configuration(); + @Test public void testFormat() throws Exception { JobConf job = new JobConf(conf); FileSystem fs = FileSystem.getLocal(conf); @@ -112,8 +119,4 @@ public class TestSequenceFileAsTextInputFormat extends TestCase { } } - - public static void main(String[] args) throws Exception { - new TestSequenceFileAsTextInputFormat().testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java index e50c396a434..93f21ce9e49 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java @@ -18,17 +18,21 @@ package org.apache.hadoop.mapred; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; +import org.apache.commons.logging.Log; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; +import org.junit.Test; -import org.apache.commons.logging.*; +import java.io.IOException; +import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.conf.*; +import static org.junit.Assert.assertEquals; -public class TestSequenceFileInputFilter extends TestCase { +public class TestSequenceFileInputFilter { private static final Log LOG = FileInputFormat.LOG; private static final int MAX_LENGTH = 15000; @@ -97,7 +101,8 @@ public class TestSequenceFileInputFilter extends TestCase { } return count; } - + + @Test public void testRegexFilter() throws Exception { // set the filter class LOG.info("Testing Regex Filter with patter: \\A10*"); @@ -121,6 +126,7 @@ public class TestSequenceFileInputFilter extends TestCase { fs.delete(inDir, true); } + @Test public void testPercentFilter() throws Exception { LOG.info("Testing Percent Filter with frequency: 1000"); // set the filter class @@ -147,7 +153,8 @@ public class TestSequenceFileInputFilter extends TestCase { // clean up fs.delete(inDir, true); } - + + @Test public void testMD5Filter() throws Exception { // set the filter class LOG.info("Testing MD5 Filter with frequency: 1000"); @@ -168,9 +175,4 @@ public class TestSequenceFileInputFilter extends TestCase { // clean up fs.delete(inDir, true); } - - public static void main(String[] args) throws Exception { - TestSequenceFileInputFilter filter = new TestSequenceFileInputFilter(); - filter.testRegexFilter(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java index 575ed532545..ba4dce30974 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java @@ -18,22 +18,28 @@ package org.apache.hadoop.mapred; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; +import org.apache.commons.logging.Log; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.SequenceFile; +import org.junit.Test; -import org.apache.commons.logging.*; +import java.util.BitSet; +import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.conf.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestSequenceFileInputFormat extends TestCase { +public class TestSequenceFileInputFormat { private static final Log LOG = FileInputFormat.LOG; private static int MAX_LENGTH = 10000; private static Configuration conf = new Configuration(); + @Test public void testFormat() throws Exception { JobConf job = new JobConf(conf); FileSystem fs = FileSystem.getLocal(conf); @@ -110,7 +116,6 @@ public class TestSequenceFileInputFormat extends TestCase { } } - public static void main(String[] args) throws Exception { new TestSequenceFileInputFormat().testFormat(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java index ad4d4ce17a9..82d1d2d09a1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java @@ -17,18 +17,20 @@ */ package org.apache.hadoop.mapred; -import java.util.Iterator; - -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.mapred.SortedRanges.Range; +import org.junit.Test; -public class TestSortedRanges extends TestCase { - private static final Log LOG = +import java.util.Iterator; + +import static org.junit.Assert.assertEquals; + +public class TestSortedRanges { + private static final Log LOG = LogFactory.getLog(TestSortedRanges.class); - + + @Test public void testAdd() { SortedRanges sr = new SortedRanges(); sr.add(new Range(2,9)); @@ -66,7 +68,8 @@ public class TestSortedRanges extends TestCase { assertEquals(77, it.next().longValue()); } - + + @Test public void testRemove() { SortedRanges sr = new SortedRanges(); sr.add(new Range(2,19)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java index 426686f9bb5..b9e32759fa4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java @@ -18,12 +18,6 @@ package org.apache.hadoop.mapred; -import java.io.DataOutputStream; -import java.io.IOException; -import java.net.URI; - -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -34,14 +28,20 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; -import org.apache.hadoop.mapreduce.MRConfig; -import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.util.Progressable; +import org.junit.Test; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.net.URI; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; /** * A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795). */ -public class TestSpecialCharactersInOutputPath extends TestCase { +public class TestSpecialCharactersInOutputPath { private static final Log LOG = LogFactory.getLog(TestSpecialCharactersInOutputPath.class.getName()); @@ -96,7 +96,8 @@ public class TestSpecialCharactersInOutputPath extends TestCase { LOG.info("job is complete: " + runningJob.isSuccessful()); return (runningJob.isSuccessful()); } - + + @Test public void testJobWithDFS() throws IOException { String namenode = null; MiniDFSCluster dfs = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java index 12568d09175..8a83e8153e3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java @@ -19,14 +19,18 @@ package org.apache.hadoop.mapred; import java.util.Map; -import junit.framework.TestCase; - import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow; import org.apache.hadoop.mapred.StatisticsCollector.Stat; +import org.junit.Test; -public class TestStatisticsCollector extends TestCase{ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +public class TestStatisticsCollector { @SuppressWarnings("rawtypes") + @Test public void testMovingWindow() throws Exception { StatisticsCollector collector = new StatisticsCollector(1); TimeWindow window = new TimeWindow("test", 6, 2); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java index 3c2cf215fb3..2d67edc581a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java @@ -17,6 +17,15 @@ */ package org.apache.hadoop.mapred; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.lib.IdentityMapper; +import org.apache.hadoop.mapred.lib.IdentityReducer; +import org.junit.Test; + import java.io.BufferedReader; import java.io.File; import java.io.IOException; @@ -26,18 +35,10 @@ import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapred.lib.IdentityMapper; -import org.apache.hadoop.mapred.lib.IdentityReducer; - -public class TestUserDefinedCounters extends TestCase { - +public class TestUserDefinedCounters { private static String TEST_ROOT_DIR = new File(System.getProperty("test.build.data", "/tmp")).toURI() .toString().replace(' ', '+') @@ -75,6 +76,7 @@ public class TestUserDefinedCounters extends TestCase { wr.close(); } + @Test public void testMapReduceJob() throws Exception { JobConf conf = new JobConf(TestUserDefinedCounters.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java index 2c0cedcbb30..82c68db30c5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java @@ -18,12 +18,6 @@ package org.apache.hadoop.mapred; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; @@ -31,8 +25,15 @@ import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.util.GenericsUtil; +import org.junit.Test; -public class TestWritableJobConf extends TestCase { +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import static org.junit.Assert.assertTrue; + +public class TestWritableJobConf { private static final Configuration CONF = new Configuration(); @@ -78,15 +79,17 @@ public class TestWritableJobConf extends TestCase { } } - assertEquals(map1, map2); + assertTrue(map1.equals(map2)); } + @Test public void testEmptyConfiguration() throws Exception { JobConf conf = new JobConf(); Configuration deser = serDeser(conf); assertEquals(conf, deser); } + @Test public void testNonEmptyConfiguration() throws Exception { JobConf conf = new JobConf(); conf.set("a", "A"); @@ -95,6 +98,7 @@ public class TestWritableJobConf extends TestCase { assertEquals(conf, deser); } + @Test public void testConfigurationWithDefaults() throws Exception { JobConf conf = new JobConf(false); conf.set("a", "A"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java index 0e340428214..abf2e72e0d1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java @@ -18,6 +18,10 @@ package org.apache.hadoop.mapred; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; @@ -38,8 +42,6 @@ import java.security.PrivilegedExceptionAction; import java.util.List; import java.util.Map; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -113,7 +115,7 @@ import org.mockito.stubbing.Answer; * Test YarnRunner and make sure the client side plugin works * fine */ -public class TestYARNRunner extends TestCase { +public class TestYARNRunner { private static final Log LOG = LogFactory.getLog(TestYARNRunner.class); private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java index 15cea69dab2..a3066765ec0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java @@ -22,11 +22,6 @@ import java.io.DataOutput; import java.io.IOException; import java.util.Iterator; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; -import junit.extensions.TestSetup; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; @@ -54,23 +49,27 @@ import org.apache.hadoop.mapred.Utils; import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; -public class TestDatamerge extends TestCase { +public class TestDatamerge { private static MiniDFSCluster cluster = null; - public static Test suite() { - TestSetup setup = new TestSetup(new TestSuite(TestDatamerge.class)) { - protected void setUp() throws Exception { - Configuration conf = new Configuration(); - cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); - } - protected void tearDown() throws Exception { - if (cluster != null) { - cluster.shutdown(); - } - } - }; - return setup; + + @Before + public void setUp() throws Exception { + Configuration conf = new Configuration(); + cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); + } + @After + public void tearDown() throws Exception { + if (cluster != null) { + cluster.shutdown(); + } } private static SequenceFile.Writer[] createWriters(Path testdir, @@ -246,18 +245,22 @@ public class TestDatamerge extends TestCase { base.getFileSystem(job).delete(base, true); } + @Test public void testSimpleInnerJoin() throws Exception { joinAs("inner", InnerJoinChecker.class); } + @Test public void testSimpleOuterJoin() throws Exception { joinAs("outer", OuterJoinChecker.class); } + @Test public void testSimpleOverride() throws Exception { joinAs("override", OverrideChecker.class); } + @Test public void testNestedJoin() throws Exception { // outer(inner(S1,...,Sn),outer(S1,...Sn)) final int SOURCES = 3; @@ -350,6 +353,7 @@ public class TestDatamerge extends TestCase { } + @Test public void testEmptyJoin() throws Exception { JobConf job = new JobConf(); Path base = cluster.getFileSystem().makeQualified(new Path("/empty")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java index e421ede9827..56871550dc9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java @@ -26,8 +26,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.Random; -import junit.framework.TestCase; - import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; @@ -36,8 +34,12 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestTupleWritable extends TestCase { +public class TestTupleWritable { private TupleWritable makeTuple(Writable[] writs) { Writable[] sub1 = { writs[1], writs[2] }; @@ -100,6 +102,7 @@ public class TestTupleWritable extends TestCase { return i; } + @Test public void testIterable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -121,6 +124,7 @@ public class TestTupleWritable extends TestCase { verifIter(writs, t, 0); } + @Test public void testNestedIterable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -139,6 +143,7 @@ public class TestTupleWritable extends TestCase { assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0)); } + @Test public void testWritable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -162,6 +167,7 @@ public class TestTupleWritable extends TestCase { assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); } + @Test public void testWideWritable() throws Exception { Writable[] manyWrits = makeRandomWritables(131); @@ -180,7 +186,8 @@ public class TestTupleWritable extends TestCase { assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); assertEquals("All tuple data has not been read from the stream",-1,in.read()); } - + + @Test public void testWideWritable2() throws Exception { Writable[] manyWrits = makeRandomWritables(71); @@ -202,6 +209,7 @@ public class TestTupleWritable extends TestCase { * Tests a tuple writable with more than 64 values and the values set written * spread far apart. */ + @Test public void testSparseWideWritable() throws Exception { Writable[] manyWrits = makeRandomWritables(131); @@ -220,7 +228,7 @@ public class TestTupleWritable extends TestCase { assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); assertEquals("All tuple data has not been read from the stream",-1,in.read()); } - + @Test public void testWideTuple() throws Exception { Text emptyText = new Text("Should be empty"); Writable[] values = new Writable[64]; @@ -240,7 +248,7 @@ public class TestTupleWritable extends TestCase { } } } - + @Test public void testWideTuple2() throws Exception { Text emptyText = new Text("Should be empty"); Writable[] values = new Writable[64]; @@ -264,6 +272,7 @@ public class TestTupleWritable extends TestCase { /** * Tests that we can write more than 64 values. */ + @Test public void testWideTupleBoundary() throws Exception { Text emptyText = new Text("Should not be set written"); Writable[] values = new Writable[65]; @@ -287,6 +296,7 @@ public class TestTupleWritable extends TestCase { /** * Tests compatibility with pre-0.21 versions of TupleWritable */ + @Test public void testPreVersion21Compatibility() throws Exception { Writable[] manyWrits = makeRandomWritables(64); PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits); @@ -304,7 +314,7 @@ public class TestTupleWritable extends TestCase { assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple)); assertEquals("All tuple data has not been read from the stream",-1,in.read()); } - + @Test public void testPreVersion21CompatibilityEmptyTuple() throws Exception { Writable[] manyWrits = new Writable[0]; PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java index 3ca175a5049..ae5572f5dcd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java @@ -21,8 +21,6 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; @@ -35,13 +33,16 @@ import org.apache.hadoop.mapred.JobConfigurable; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.Test; +import static org.junit.Assert.assertTrue; -public class TestWrappedRecordReaderClassloader extends TestCase { +public class TestWrappedRecordReaderClassloader { /** * Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)} * is inherited by any {@link WrappedRecordReader}s created by * {@link CompositeRecordReader} */ + @Test public void testClassLoader() throws Exception { JobConf job = new JobConf(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java index 8bd855433ea..b916026272e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java @@ -20,8 +20,6 @@ package org.apache.hadoop.mapred.lib; import java.io.DataOutputStream; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -32,9 +30,12 @@ import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.TextInputFormat; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; -public class TestDelegatingInputFormat extends TestCase { - +public class TestDelegatingInputFormat { + @Test public void testSplitting() throws Exception { JobConf conf = new JobConf(); MiniDFSCluster dfs = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java index db9c219e9c1..388de0fb88d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java @@ -20,13 +20,14 @@ package org.apache.hadoop.mapred.lib; import java.io.*; import java.util.*; -import junit.framework.TestCase; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -public class TestLineInputFormat extends TestCase { +public class TestLineInputFormat { private static int MAX_LENGTH = 200; private static JobConf defaultConf = new JobConf(); @@ -43,7 +44,7 @@ public class TestLineInputFormat extends TestCase { private static Path workDir = new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestLineInputFormat"); - + @Test public void testFormat() throws Exception { JobConf job = new JobConf(); Path file = new Path(workDir, "test.txt"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java index 3a9cb9ec337..115a6f70d08 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java @@ -36,7 +36,6 @@ import static org.junit.Assert.assertEquals; * @see TestDelegatingInputFormat */ public class TestMultipleInputs { - @Test public void testAddInputPathWithFormat() { final JobConf conf = new JobConf(); @@ -49,7 +48,6 @@ public class TestMultipleInputs { assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar")) .getClass()); } - @Test public void testAddInputPathWithMapper() { final JobConf conf = new JobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java index e2fdd429c56..b839a2c3afe 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java @@ -22,13 +22,14 @@ import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapred.lib.*; import org.apache.hadoop.mapreduce.MapReduceTestUtil; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -import junit.framework.TestCase; import java.io.*; import java.util.*; import java.text.NumberFormat; -public class TestAggregates extends TestCase { +public class TestAggregates { private static NumberFormat idFormat = NumberFormat.getInstance(); static { @@ -36,7 +37,7 @@ public class TestAggregates extends TestCase { idFormat.setGroupingUsed(false); } - + @Test public void testAggregates() throws Exception { launch(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java index 968bb066565..203da4e0b7c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java @@ -19,13 +19,13 @@ package org.apache.hadoop.mapred.lib.db; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.JobConf; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; -public class TestConstructQuery extends TestCase { - +public class TestConstructQuery { private String[] fieldNames = new String[] { "id", "name", "value" }; private String[] nullFieldNames = new String[] { null, null, null }; private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);"; @@ -33,15 +33,15 @@ public class TestConstructQuery extends TestCase { private DBOutputFormat format = new DBOutputFormat(); - - public void testConstructQuery() { + @Test + public void testConstructQuery() { String actual = format.constructQuery("hadoop_output", fieldNames); assertEquals(expected, actual); - + actual = format.constructQuery("hadoop_output", nullFieldNames); assertEquals(nullExpected, actual); } - + @Test public void testSetOutput() throws IOException { JobConf job = new JobConf(); DBOutputFormat.setOutput(job, "hadoop_output", fieldNames); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java index dd7817d65b5..34b1d75dfed 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java @@ -44,10 +44,13 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.junit.Ignore; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; -import junit.framework.TestCase; @Ignore -public class TestPipes extends TestCase { +public class TestPipes { private static final Log LOG = LogFactory.getLog(TestPipes.class.getName()); @@ -66,7 +69,7 @@ public class TestPipes extends TestCase { fs.delete(p, true); assertFalse("output not cleaned up", fs.exists(p)); } - + @Test public void testPipes() throws IOException { if (System.getProperty("compile.c++") == null) { LOG.info("compile.c++ is not defined, so skipping TestPipes"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java index 29640c8854b..8177ecd405b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java @@ -17,36 +17,42 @@ */ package org.apache.hadoop.mapreduce; -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.fs.*; import org.apache.hadoop.mapred.LocalJobRunner; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.ReflectionUtils; - import org.junit.Test; -import junit.framework.TestCase; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.util.ArrayList; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; /** * Stress tests for the LocalJobRunner */ -public class TestLocalRunner extends TestCase { +public class TestLocalRunner { private static final Log LOG = LogFactory.getLog(TestLocalRunner.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java index 6f45b5f5dc2..8fe9078e9e0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java @@ -17,6 +17,23 @@ */ package org.apache.hadoop.mapreduce; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.LocatedFileStatus; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RemoteIterator; +import org.apache.hadoop.mapred.ClusterMapReduceTestCase; +import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.apache.hadoop.mapreduce.tools.CLI; +import org.apache.hadoop.util.ExitUtil; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.junit.Test; + import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -31,23 +48,11 @@ import java.io.PipedOutputStream; import java.io.PrintStream; import java.util.Arrays; -import org.apache.hadoop.fs.LocatedFileStatus; -import org.apache.hadoop.fs.RemoteIterator; -import org.codehaus.jettison.json.JSONException; -import org.codehaus.jettison.json.JSONObject; -import org.junit.Assert; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapred.ClusterMapReduceTestCase; -import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; -import org.apache.hadoop.mapreduce.tools.CLI; -import org.apache.hadoop.util.ExitUtil; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; /** test CLI class. CLI class implemented the Tool interface. @@ -103,7 +108,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { throw new IOException(); } } - + @Test public void testJobSubmissionSpecsAndFiles() throws Exception { Configuration conf = createJobConf(); Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(), @@ -127,7 +132,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { /** * main test method */ - + @Test public void testJobClient() throws Exception { Configuration conf = createJobConf(); Job job = runJob(conf); @@ -180,8 +185,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out); String answer = new String(out.toByteArray(), "UTF-8"); - Assert - .assertTrue(answer.contains("Killed task " + taid + " by failing it")); + assertTrue(answer.contains("Killed task " + taid + " by failing it")); } /** @@ -199,7 +203,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out); String answer = new String(out.toByteArray(), "UTF-8"); - Assert.assertTrue(answer.contains("Killed task " + taid)); + assertTrue(answer.contains("Killed task " + taid)); } /** @@ -686,6 +690,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { * Test -list option displays job name. * The name is capped to 20 characters for display. */ + @Test public void testJobName() throws Exception { Configuration conf = createJobConf(); CLI jc = createJobClient(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java index 1e4f4de9f93..a69e06eacd9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java @@ -25,8 +25,6 @@ import java.io.Writer; import java.util.Arrays; import java.util.List; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -42,13 +40,16 @@ import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; /** * A JUnit test to test the Map-Reduce framework's feature to create part * files only if there is an explicit output.collect. This helps in preventing * 0 byte files */ -public class TestMapReduceLazyOutput extends TestCase { +public class TestMapReduceLazyOutput { private static final int NUM_HADOOP_SLAVES = 3; private static final int NUM_MAPS_PER_NODE = 2; private static final Path INPUT = new Path("/testlazy/input"); @@ -122,7 +123,7 @@ public class TestMapReduceLazyOutput extends TestCase { } } - + @Test public void testLazyOutput() throws Exception { MiniDFSCluster dfs = null; MiniMRCluster mr = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java index 5cf08991869..b757fb2c34f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java @@ -27,8 +27,6 @@ import java.io.Writer; import java.util.ArrayList; import java.util.StringTokenizer; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -43,12 +41,15 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; /** * A JUnit test to test the Map-Reduce framework's support for the * "mark-reset" functionality in Reduce Values Iterator */ -public class TestValueIterReset extends TestCase { +public class TestValueIterReset { private static final int NUM_MAPS = 1; private static final int NUM_TESTS = 4; private static final int NUM_VALUES = 40; @@ -518,6 +519,7 @@ public class TestValueIterReset extends TestCase { } } + @Test public void testValueIterReset() { try { Configuration conf = new Configuration(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java index 4d84fa9e108..308b7775a67 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java @@ -18,6 +18,7 @@ package org.apache.hadoop.mapreduce; +import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -26,7 +27,6 @@ import static org.mockito.Mockito.doNothing; import java.io.IOException; import java.nio.ByteBuffer; -import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; @@ -44,8 +44,7 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.junit.Test; -public class TestYarnClientProtocolProvider extends TestCase { - +public class TestYarnClientProtocolProvider { private static final RecordFactory recordFactory = RecordFactoryProvider. getRecordFactory(null); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java index 788ad41ff96..3aac54e7159 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java @@ -18,22 +18,24 @@ package org.apache.hadoop.mapreduce.lib.aggregate; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; +import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.io.*; -import org.apache.hadoop.mapred.Utils; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.junit.Test; -import junit.framework.TestCase; -import java.io.*; import java.text.NumberFormat; -public class TestMapReduceAggregates extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class TestMapReduceAggregates { private static NumberFormat idFormat = NumberFormat.getInstance(); static { @@ -41,7 +43,7 @@ public class TestMapReduceAggregates extends TestCase { idFormat.setGroupingUsed(false); } - + @Test public void testAggregates() throws Exception { launch(); } @@ -123,11 +125,4 @@ public class TestMapReduceAggregates extends TestCase { fs.delete(OUTPUT_DIR, true); fs.delete(INPUT_DIR, true); } - - /** - * Launches all the tasks in order. - */ - public static void main(String[] argv) throws Exception { - launch(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java index bff25d20038..014855f7d6a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java @@ -19,14 +19,15 @@ package org.apache.hadoop.mapreduce.lib.db; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Job; +import org.junit.Test; -public class TestDBOutputFormat extends TestCase { - +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +public class TestDBOutputFormat { private String[] fieldNames = new String[] { "id", "name", "value" }; private String[] nullFieldNames = new String[] { null, null, null }; private String expected = "INSERT INTO hadoop_output " + @@ -35,15 +36,17 @@ public class TestDBOutputFormat extends TestCase { private DBOutputFormat format = new DBOutputFormat(); - - public void testConstructQuery() { + + @Test + public void testConstructQuery() { String actual = format.constructQuery("hadoop_output", fieldNames); assertEquals(expected, actual); actual = format.constructQuery("hadoop_output", nullFieldNames); assertEquals(nullExpected, actual); } - + + @Test public void testSetOutput() throws IOException { Job job = Job.getInstance(new Configuration()); DBOutputFormat.setOutput(job, "hadoop_output", fieldNames); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java index e50aba4f462..8b5d907dcdc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java @@ -17,15 +17,15 @@ */ package org.apache.hadoop.mapreduce.lib.db; -import java.io.IOException; -import java.math.BigDecimal; +import org.junit.Test; + import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -public class TestIntegerSplitter extends TestCase { +public class TestIntegerSplitter { private long [] toLongArray(List in) { long [] out = new long[in.size()]; for (int i = 0; i < in.size(); i++) { @@ -70,12 +70,14 @@ public class TestIntegerSplitter extends TestCase { } } + @Test public void testEvenSplits() throws SQLException { List splits = new IntegerSplitter().split(10, 0, 100); long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 }; assertLongArrayEquals(expected, toLongArray(splits)); } + @Test public void testOddSplits() throws SQLException { List splits = new IntegerSplitter().split(10, 0, 95); long [] expected = { 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 95 }; @@ -83,12 +85,14 @@ public class TestIntegerSplitter extends TestCase { } + @Test public void testSingletonSplit() throws SQLException { List splits = new IntegerSplitter().split(1, 5, 5); long [] expected = { 5, 5 }; assertLongArrayEquals(expected, toLongArray(splits)); } + @Test public void testSingletonSplit2() throws SQLException { // Same test, but overly-high numSplits List splits = new IntegerSplitter().split(5, 5, 5); @@ -96,6 +100,7 @@ public class TestIntegerSplitter extends TestCase { assertLongArrayEquals(expected, toLongArray(splits)); } + @Test public void testTooManySplits() throws SQLException { List splits = new IntegerSplitter().split(5, 3, 5); long [] expected = { 3, 4, 5 }; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java index 045e3a1b1f6..e16f4234877 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java @@ -17,15 +17,16 @@ */ package org.apache.hadoop.mapreduce.lib.db; -import java.io.IOException; +import org.junit.Test; + import java.math.BigDecimal; import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -public class TestTextSplitter extends TestCase { +public class TestTextSplitter { public String formatArray(Object [] ar) { StringBuilder sb = new StringBuilder(); @@ -62,48 +63,56 @@ public class TestTextSplitter extends TestCase { } } + @Test public void testStringConvertEmpty() { TextSplitter splitter = new TextSplitter(); BigDecimal emptyBigDec = splitter.stringToBigDecimal(""); assertEquals(BigDecimal.ZERO, emptyBigDec); } + @Test public void testBigDecConvertEmpty() { TextSplitter splitter = new TextSplitter(); String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO); assertEquals("", emptyStr); } + @Test public void testConvertA() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A")); assertEquals("A", out); } + @Test public void testConvertZ() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z")); assertEquals("Z", out); } + @Test public void testConvertThreeChars() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("abc")); assertEquals("abc", out); } + @Test public void testConvertStr() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("big str")); assertEquals("big str", out); } + @Test public void testConvertChomped() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("AVeryLongStringIndeed")); assertEquals("AVeryLon", out); } + @Test public void testAlphabetSplit() throws SQLException { // This should give us 25 splits, one per letter. TextSplitter splitter = new TextSplitter(); @@ -113,6 +122,7 @@ public class TestTextSplitter extends TestCase { assertArrayEquals(expected, splits.toArray(new String [0])); } + @Test public void testCommonPrefix() throws SQLException { // Splits between 'Hand' and 'Hardy' TextSplitter splitter = new TextSplitter(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java index 91070f89c42..6f9183ab21b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java @@ -18,15 +18,19 @@ package org.apache.hadoop.mapreduce.lib.fieldsel; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MapReduceTestUtil; +import org.junit.Test; -import junit.framework.TestCase; import java.text.NumberFormat; -public class TestMRFieldSelection extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class TestMRFieldSelection { private static NumberFormat idFormat = NumberFormat.getInstance(); static { @@ -34,6 +38,7 @@ private static NumberFormat idFormat = NumberFormat.getInstance(); idFormat.setGroupingUsed(false); } + @Test public void testFieldSelection() throws Exception { launch(); } @@ -114,11 +119,4 @@ private static NumberFormat idFormat = NumberFormat.getInstance(); System.out.println("ExpectedData:"); System.out.println(expectedOutput.toString()); } - - /** - * Launches all the tasks in order. - */ - public static void main(String[] argv) throws Exception { - launch(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java index f0b3d57486c..cbf9d183ef2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java @@ -18,11 +18,12 @@ package org.apache.hadoop.mapreduce.lib.input; -import java.io.IOException; -import java.util.Random; - -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.DataInputBuffer; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; @@ -31,12 +32,18 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.task.MapContextImpl; +import org.junit.Test; -import junit.framework.TestCase; +import java.io.IOException; +import java.util.Random; -public class TestMRSequenceFileAsBinaryInputFormat extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class TestMRSequenceFileAsBinaryInputFormat { private static final int RECORDS = 10000; + @Test public void testBinary() throws IOException, InterruptedException { Job job = Job.getInstance(); FileSystem fs = FileSystem.getLocal(job.getConfiguration()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java index 2d03c2dd96a..335ce050d82 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java @@ -18,11 +18,13 @@ package org.apache.hadoop.mapreduce.lib.input; -import java.util.*; -import junit.framework.TestCase; - -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; @@ -31,12 +33,19 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.task.MapContextImpl; -import org.apache.hadoop.conf.*; +import org.junit.Test; -public class TestMRSequenceFileAsTextInputFormat extends TestCase { +import java.util.BitSet; +import java.util.Random; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +public class TestMRSequenceFileAsTextInputFormat { private static int MAX_LENGTH = 10000; private static Configuration conf = new Configuration(); + @Test public void testFormat() throws Exception { Job job = Job.getInstance(conf); FileSystem fs = FileSystem.getLocal(conf); @@ -112,8 +121,4 @@ public class TestMRSequenceFileAsTextInputFormat extends TestCase { } } - - public static void main(String[] args) throws Exception { - new TestMRSequenceFileAsTextInputFormat().testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java index edf7e1ad10d..89aa7b23057 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java @@ -18,14 +18,14 @@ package org.apache.hadoop.mapreduce.lib.input; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; - -import org.apache.commons.logging.*; - -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; @@ -34,10 +34,15 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.task.MapContextImpl; -import org.apache.hadoop.conf.*; +import org.junit.Test; -public class TestMRSequenceFileInputFilter extends TestCase { - private static final Log LOG = +import java.io.IOException; +import java.util.Random; + +import static org.junit.Assert.assertEquals; + +public class TestMRSequenceFileInputFilter { + private static final Log LOG = LogFactory.getLog(TestMRSequenceFileInputFilter.class.getName()); private static final int MAX_LENGTH = 15000; @@ -113,7 +118,8 @@ public class TestMRSequenceFileInputFilter extends TestCase { } return count; } - + + @Test public void testRegexFilter() throws Exception { // set the filter class LOG.info("Testing Regex Filter with patter: \\A10*"); @@ -138,6 +144,7 @@ public class TestMRSequenceFileInputFilter extends TestCase { fs.delete(inDir, true); } + @Test public void testPercentFilter() throws Exception { LOG.info("Testing Percent Filter with frequency: 1000"); // set the filter class @@ -165,7 +172,8 @@ public class TestMRSequenceFileInputFilter extends TestCase { // clean up fs.delete(inDir, true); } - + + @Test public void testMD5Filter() throws Exception { // set the filter class LOG.info("Testing MD5 Filter with frequency: 1000"); @@ -187,9 +195,4 @@ public class TestMRSequenceFileInputFilter extends TestCase { // clean up fs.delete(inDir, true); } - - public static void main(String[] args) throws Exception { - TestMRSequenceFileInputFilter filter = new TestMRSequenceFileInputFilter(); - filter.testRegexFilter(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java index 7b3878d9475..477866f4e35 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java @@ -18,17 +18,28 @@ package org.apache.hadoop.mapreduce.lib.input; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.mapreduce.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.InputSplit; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.MapContext; +import org.apache.hadoop.mapreduce.MapReduceTestUtil; +import org.apache.hadoop.mapreduce.RecordReader; +import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.task.MapContextImpl; +import org.junit.Test; -public class TestNLineInputFormat extends TestCase { +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.util.List; + +import static org.junit.Assert.assertEquals; + +public class TestNLineInputFormat { private static int MAX_LENGTH = 200; private static Configuration conf = new Configuration(); @@ -45,7 +56,8 @@ public class TestNLineInputFormat extends TestCase { private static Path workDir = new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestNLineInputFormat"); - + + @Test public void testFormat() throws Exception { Job job = Job.getInstance(conf); Path file = new Path(workDir, "test.txt"); @@ -116,8 +128,4 @@ public class TestNLineInputFormat extends TestCase { } } } - - public static void main(String[] args) throws Exception { - new TestNLineInputFormat().testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java index d245bfd6cde..1173ea4fa47 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java @@ -19,11 +19,6 @@ package org.apache.hadoop.mapreduce.lib.join; import java.io.IOException; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; -import junit.extensions.TestSetup; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; @@ -37,23 +32,31 @@ import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; -public class TestJoinDatamerge extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class TestJoinDatamerge { private static MiniDFSCluster cluster = null; - public static Test suite() { - TestSetup setup = new TestSetup(new TestSuite(TestJoinDatamerge.class)) { - protected void setUp() throws Exception { - Configuration conf = new Configuration(); - cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); - } - protected void tearDown() throws Exception { - if (cluster != null) { - cluster.shutdown(); - } - } - }; - return setup; + + @BeforeClass + public static void setUp() throws Exception { + Configuration conf = new Configuration(); + cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); + } + + @AfterClass + public static void tearDown() throws Exception { + if (cluster != null) { + cluster.shutdown(); + } } private static SequenceFile.Writer[] createWriters(Path testdir, @@ -111,7 +114,7 @@ public class TestJoinDatamerge extends TestCase { extends Mapper{ protected final static IntWritable one = new IntWritable(1); int srcs; - + public void setup(Context context) { srcs = context.getConfiguration().getInt("testdatamerge.sources", 0); assertTrue("Invalid src count: " + srcs, srcs > 0); @@ -123,7 +126,7 @@ public class TestJoinDatamerge extends TestCase { protected final static IntWritable one = new IntWritable(1); int srcs; - + public void setup(Context context) { srcs = context.getConfiguration().getInt("testdatamerge.sources", 0); assertTrue("Invalid src count: " + srcs, srcs > 0); @@ -272,10 +275,12 @@ public class TestJoinDatamerge extends TestCase { base.getFileSystem(conf).delete(base, true); } + @Test public void testSimpleInnerJoin() throws Exception { joinAs("inner", InnerJoinMapChecker.class, InnerJoinReduceChecker.class); } + @Test public void testSimpleOuterJoin() throws Exception { joinAs("outer", OuterJoinMapChecker.class, OuterJoinReduceChecker.class); } @@ -322,11 +327,13 @@ public class TestJoinDatamerge extends TestCase { } return product; } - + + @Test public void testSimpleOverride() throws Exception { joinAs("override", OverrideMapChecker.class, OverrideReduceChecker.class); } + @Test public void testNestedJoin() throws Exception { // outer(inner(S1,...,Sn),outer(S1,...Sn)) final int SOURCES = 3; @@ -422,6 +429,7 @@ public class TestJoinDatamerge extends TestCase { } + @Test public void testEmptyJoin() throws Exception { Configuration conf = new Configuration(); Path base = cluster.getFileSystem().makeQualified(new Path("/empty")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java index 151bc875ad3..b6e76069d95 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java @@ -20,11 +20,6 @@ package org.apache.hadoop.mapreduce.lib.join; import java.io.IOException; import java.util.List; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; -import junit.extensions.TestSetup; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -36,8 +31,14 @@ import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.task.MapContextImpl; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; -public class TestJoinProperties extends TestCase { +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class TestJoinProperties { private static MiniDFSCluster cluster = null; final static int SOURCES = 3; @@ -46,21 +47,19 @@ public class TestJoinProperties extends TestCase { static Path[] src; static Path base; - public static Test suite() { - TestSetup setup = new TestSetup(new TestSuite(TestJoinProperties.class)) { - protected void setUp() throws Exception { - Configuration conf = new Configuration(); - cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); - base = cluster.getFileSystem().makeQualified(new Path("/nested")); - src = generateSources(conf); - } - protected void tearDown() throws Exception { - if (cluster != null) { - cluster.shutdown(); - } - } - }; - return setup; + @BeforeClass + public static void setUp() throws Exception { + Configuration conf = new Configuration(); + cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); + base = cluster.getFileSystem().makeQualified(new Path("/nested")); + src = generateSources(conf); + } + + @AfterClass + public static void tearDown() throws Exception { + if (cluster != null) { + cluster.shutdown(); + } } // Sources from 0 to srcs-2 have IntWritable key and IntWritable value @@ -233,6 +232,7 @@ public class TestJoinProperties extends TestCase { } // outer(outer(A, B), C) == outer(A,outer(B, C)) == outer(A, B, C) + @Test public void testOuterAssociativity() throws Exception { Configuration conf = new Configuration(); testExpr1(conf, "outer", TestType.OUTER_ASSOCIATIVITY, 33); @@ -241,6 +241,7 @@ public class TestJoinProperties extends TestCase { } // inner(inner(A, B), C) == inner(A,inner(B, C)) == inner(A, B, C) + @Test public void testInnerAssociativity() throws Exception { Configuration conf = new Configuration(); testExpr1(conf, "inner", TestType.INNER_ASSOCIATIVITY, 2); @@ -249,6 +250,7 @@ public class TestJoinProperties extends TestCase { } // override(inner(A, B), A) == A + @Test public void testIdentity() throws Exception { Configuration conf = new Configuration(); testExpr4(conf); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java index d35941fc884..093da266b95 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java @@ -24,8 +24,6 @@ import java.io.DataOutputStream; import java.util.Arrays; import java.util.Random; -import junit.framework.TestCase; - import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; @@ -33,8 +31,13 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; +import org.junit.Test; -public class TestJoinTupleWritable extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class TestJoinTupleWritable { private TupleWritable makeTuple(Writable[] writs) { Writable[] sub1 = { writs[1], writs[2] }; @@ -97,6 +100,7 @@ public class TestJoinTupleWritable extends TestCase { return i; } + @Test public void testIterable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -118,6 +122,7 @@ public class TestJoinTupleWritable extends TestCase { verifIter(writs, t, 0); } + @Test public void testNestedIterable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -136,6 +141,7 @@ public class TestJoinTupleWritable extends TestCase { assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0)); } + @Test public void testWritable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -159,6 +165,7 @@ public class TestJoinTupleWritable extends TestCase { assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); } + @Test public void testWideWritable() throws Exception { Writable[] manyWrits = makeRandomWritables(131); @@ -178,7 +185,8 @@ public class TestJoinTupleWritable extends TestCase { assertEquals("All tuple data has not been read from the stream", -1, in.read()); } - + + @Test public void testWideWritable2() throws Exception { Writable[] manyWrits = makeRandomWritables(71); @@ -201,6 +209,7 @@ public class TestJoinTupleWritable extends TestCase { * Tests a tuple writable with more than 64 values and the values set written * spread far apart. */ + @Test public void testSparseWideWritable() throws Exception { Writable[] manyWrits = makeRandomWritables(131); @@ -220,7 +229,8 @@ public class TestJoinTupleWritable extends TestCase { assertEquals("All tuple data has not been read from the stream", -1, in.read()); } - + + @Test public void testWideTuple() throws Exception { Text emptyText = new Text("Should be empty"); Writable[] values = new Writable[64]; @@ -241,7 +251,8 @@ public class TestJoinTupleWritable extends TestCase { } } } - + + @Test public void testWideTuple2() throws Exception { Text emptyText = new Text("Should be empty"); Writable[] values = new Writable[64]; @@ -266,6 +277,7 @@ public class TestJoinTupleWritable extends TestCase { /** * Tests that we can write more than 64 values. */ + @Test public void testWideTupleBoundary() throws Exception { Text emptyText = new Text("Should not be set written"); Writable[] values = new Writable[65]; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java index 36cf1872ad4..680e246b4e3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java @@ -17,23 +17,32 @@ */ package org.apache.hadoop.mapreduce.lib.join; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.mapreduce.*; +import org.apache.hadoop.mapreduce.InputSplit; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.MRJobConfig; +import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR; +import org.apache.hadoop.mapreduce.RecordReader; +import org.apache.hadoop.mapreduce.TaskAttemptContext; +import org.apache.hadoop.mapreduce.TaskAttemptID; +import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; +import org.junit.Test; -public class TestWrappedRRClassloader extends TestCase { +import static org.junit.Assert.assertTrue; + +public class TestWrappedRRClassloader { /** * Tests the class loader set by * {@link Configuration#setClassLoader(ClassLoader)} * is inherited by any {@link WrappedRecordReader}s created by * {@link CompositeRecordReader} */ + @Test public void testClassLoader() throws Exception { Configuration conf = new Configuration(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java index 2e40f72fdd2..5a8aeda83be 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java @@ -18,12 +18,17 @@ package org.apache.hadoop.mapreduce.lib.output; -import java.io.IOException; -import java.util.Random; - +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BooleanWritable; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.FloatWritable; +import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.mapred.InvalidJobConfException; import org.apache.hadoop.mapreduce.InputFormat; @@ -38,16 +43,22 @@ import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.task.MapContextImpl; +import org.junit.Test; -import junit.framework.TestCase; -import org.apache.commons.logging.*; +import java.io.IOException; +import java.util.Random; -public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class TestMRSequenceFileAsBinaryOutputFormat { private static final Log LOG = LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName()); private static final int RECORDS = 10000; - + + @Test public void testBinary() throws IOException, InterruptedException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf); @@ -144,7 +155,8 @@ public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase { assertEquals("Some records not found", RECORDS, count); } - public void testSequenceOutputClassDefaultsToMapRedOutputClass() + @Test + public void testSequenceOutputClassDefaultsToMapRedOutputClass() throws IOException { Job job = Job.getInstance(); // Setting Random class to test getSequenceFileOutput{Key,Value}Class @@ -172,7 +184,8 @@ public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase { SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job)); } - public void testcheckOutputSpecsForbidRecordCompression() + @Test + public void testcheckOutputSpecsForbidRecordCompression() throws IOException { Job job = Job.getInstance(); FileSystem fs = FileSystem.getLocal(job.getConfiguration()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java index 7be538ecf41..f83bc11a216 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java @@ -22,11 +22,14 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.BinaryComparable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; -public class TestBinaryPartitioner extends TestCase { +public class TestBinaryPartitioner { + @Test public void testDefaultOffsets() { Configuration conf = new Configuration(); BinaryPartitioner partitioner = @@ -50,7 +53,8 @@ public class TestBinaryPartitioner extends TestCase { partition2 = partitioner.getPartition(key2, null, 10); assertTrue(partition1 != partition2); } - + + @Test public void testCustomOffsets() { Configuration conf = new Configuration(); BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 }); @@ -75,7 +79,8 @@ public class TestBinaryPartitioner extends TestCase { partition2 = partitioner.getPartition(key2, null, 10); assertEquals(partition1, partition2); } - + + @Test public void testLowerBound() { Configuration conf = new Configuration(); BinaryPartitioner.setLeftOffset(conf, 0); @@ -87,7 +92,8 @@ public class TestBinaryPartitioner extends TestCase { int partition2 = partitioner.getPartition(key2, null, 10); assertTrue(partition1 != partition2); } - + + @Test public void testUpperBound() { Configuration conf = new Configuration(); BinaryPartitioner.setRightOffset(conf, 4); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java index 6bad846f6d3..4d05d13d445 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java @@ -19,14 +19,17 @@ package org.apache.hadoop.mapreduce.lib.partition; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; -public class TestKeyFieldHelper extends TestCase { +public class TestKeyFieldHelper { private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class); /** * Test is key-field-helper's parse option. */ + @Test public void testparseOption() throws Exception { KeyFieldHelper helper = new KeyFieldHelper(); helper.setKeyFieldSeparator("\t"); @@ -212,6 +215,7 @@ public class TestKeyFieldHelper extends TestCase { /** * Test is key-field-helper's getWordLengths. */ + @Test public void testGetWordLengths() throws Exception { KeyFieldHelper helper = new KeyFieldHelper(); helper.setKeyFieldSeparator("\t"); @@ -270,6 +274,7 @@ public class TestKeyFieldHelper extends TestCase { /** * Test is key-field-helper's getStartOffset/getEndOffset. */ + @Test public void testgetStartEndOffset() throws Exception { KeyFieldHelper helper = new KeyFieldHelper(); helper.setKeyFieldSeparator("\t"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java index 9c2fb48d9bf..00b415f32cb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java @@ -19,14 +19,16 @@ package org.apache.hadoop.mapreduce.lib.partition; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; +import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; -public class TestMRKeyFieldBasedPartitioner extends TestCase { +public class TestMRKeyFieldBasedPartitioner { /** * Test is key-field-based partitioned works with empty key. */ + @Test public void testEmptyKey() throws Exception { int numReducers = 10; KeyFieldBasedPartitioner kfbp = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java index a844737e09d..bdb4ff4794e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.FileSystem; @@ -41,8 +39,11 @@ import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.WritableSerialization; import org.apache.hadoop.mapreduce.MRJobConfig; +import org.junit.Test; -public class TestTotalOrderPartitioner extends TestCase { +import static org.junit.Assert.assertEquals; + +public class TestTotalOrderPartitioner { private static final Text[] splitStrings = new Text[] { // -inf // 0 @@ -140,6 +141,7 @@ public class TestTotalOrderPartitioner extends TestCase { return p; } + @Test public void testTotalOrderWithCustomSerialization() throws Exception { TotalOrderPartitioner partitioner = new TotalOrderPartitioner(); @@ -165,6 +167,7 @@ public class TestTotalOrderPartitioner extends TestCase { } } + @Test public void testTotalOrderMemCmp() throws Exception { TotalOrderPartitioner partitioner = new TotalOrderPartitioner(); @@ -184,6 +187,7 @@ public class TestTotalOrderPartitioner extends TestCase { } } + @Test public void testTotalOrderBinarySearch() throws Exception { TotalOrderPartitioner partitioner = new TotalOrderPartitioner(); @@ -216,6 +220,7 @@ public class TestTotalOrderPartitioner extends TestCase { } } + @Test public void testTotalOrderCustomComparator() throws Exception { TotalOrderPartitioner partitioner = new TotalOrderPartitioner(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java index e1849a3ce9c..07b5d8b9f50 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java @@ -20,8 +20,6 @@ package org.apache.hadoop.mapreduce.util; import java.io.File; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -30,20 +28,27 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.mapreduce.util.MRAsyncDiskService; +import org.junit.Before; import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + /** * A test for MRAsyncDiskService. */ -public class TestMRAsyncDiskService extends TestCase { +public class TestMRAsyncDiskService { public static final Log LOG = LogFactory.getLog(TestMRAsyncDiskService.class); private static String TEST_ROOT_DIR = new Path(System.getProperty( "test.build.data", "/tmp")).toString(); - @Override - protected void setUp() { + @Before + public void setUp() { FileUtil.fullyDelete(new File(TEST_ROOT_DIR)); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java index aa769f85974..f68cc8310a6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.mapreduce.v2; -import junit.framework.TestCase; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; @@ -29,22 +28,25 @@ import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.ProxyUsers; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; -import java.net.InetAddress; -import java.io.File; -import java.io.FileOutputStream; -import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; +import java.net.InetAddress; import java.security.PrivilegedExceptionAction; -public class TestMiniMRProxyUser extends TestCase { +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class TestMiniMRProxyUser { private MiniDFSCluster dfsCluster = null; private MiniMRCluster mrCluster = null; - - protected void setUp() throws Exception { - super.setUp(); + + @Before + public void setUp() throws Exception { if (System.getProperty("hadoop.log.dir") == null) { System.setProperty("hadoop.log.dir", "/tmp"); } @@ -91,15 +93,14 @@ public class TestMiniMRProxyUser extends TestCase { return mrCluster.createJobConf(); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { if (mrCluster != null) { mrCluster.shutdown(); } if (dfsCluster != null) { dfsCluster.shutdown(); } - super.tearDown(); } private void mrRun() throws Exception { @@ -125,11 +126,13 @@ public class TestMiniMRProxyUser extends TestCase { assertTrue(runJob.isComplete()); assertTrue(runJob.isSuccessful()); } - + + @Test public void __testCurrentUser() throws Exception { mrRun(); } + @Test public void testValidProxyUser() throws Exception { UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", UserGroupInformation.getLoginUser()); ugi.doAs(new PrivilegedExceptionAction() { @@ -142,6 +145,7 @@ public class TestMiniMRProxyUser extends TestCase { }); } + @Test public void ___testInvalidProxyUser() throws Exception { UserGroupInformation ugi = UserGroupInformation.createProxyUser("u2", UserGroupInformation.getLoginUser()); ugi.doAs(new PrivilegedExceptionAction() { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java index b6947f3fc48..e90c509d7a8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.mapreduce.v2; -import junit.framework.TestCase; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; @@ -28,17 +27,22 @@ import org.apache.hadoop.mapred.JobID; import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.security.authorize.ProxyUsers; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import java.io.IOException; import java.net.InetAddress; -public class TestNonExistentJob extends TestCase { +import static org.junit.Assert.assertNull; + +public class TestNonExistentJob { private MiniDFSCluster dfsCluster = null; private MiniMRCluster mrCluster = null; - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { if (System.getProperty("hadoop.log.dir") == null) { System.setProperty("hadoop.log.dir", "/tmp"); } @@ -78,17 +82,17 @@ public class TestNonExistentJob extends TestCase { return mrCluster.createJobConf(); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { if (mrCluster != null) { mrCluster.shutdown(); } if (dfsCluster != null) { dfsCluster.shutdown(); } - super.tearDown(); } + @Test public void testGetInvalidJob() throws Exception { RunningJob runJob = new JobClient(getJobConf()).getJob(JobID.forName("job_0_0")); assertNull(runJob); diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java index 7b7901faad1..860fb89cfcf 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java @@ -42,6 +42,11 @@ import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.SkipBadRecords; import org.apache.hadoop.mapred.Utils; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; public class TestStreamingBadRecords extends ClusterMapReduceTestCase { @@ -68,7 +73,8 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase utilTest.redirectIfAntJunit(); } - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { Properties props = new Properties(); props.setProperty(JTConfig.JT_RETIREJOBS, "false"); props.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "false"); @@ -242,6 +248,7 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase } */ + @Test public void testNoOp() { // Added to avoid warnings when running this disabled test }