diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
index 12bec0869f6..1caa2cdae6c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
@@ -28,6 +28,8 @@ import java.io.PrintStream;
import java.util.Date;
import java.util.StringTokenizer;
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -37,9 +39,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.*;
import org.junit.Ignore;
-import org.junit.Test;
- /**
+/**
* Distributed i/o benchmark.
*
* This test writes into or reads from a specified number of files.
@@ -67,7 +68,7 @@ import org.junit.Test;
*
*/
@Ignore
-public class DFSCIOTest {
+public class DFSCIOTest extends TestCase {
// Constants
private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
private static final int TEST_TYPE_READ = 0;
@@ -97,7 +98,6 @@ public class DFSCIOTest {
*
* @throws Exception
*/
- @Test
public void testIOs() throws Exception {
testIOs(10, 10);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
index 8121015c938..d35dfe0cb47 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
@@ -34,6 +34,8 @@ import java.util.HashMap;
import java.net.InetSocketAddress;
import java.net.URI;
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
@@ -48,15 +50,8 @@ import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.lib.LongSumReducer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotSame;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
-
-public class TestFileSystem {
+public class TestFileSystem extends TestCase {
private static final Log LOG = FileSystem.LOG;
private static Configuration conf = new Configuration();
@@ -71,7 +66,6 @@ public class TestFileSystem {
private static Path READ_DIR = new Path(ROOT, "fs_read");
private static Path DATA_DIR = new Path(ROOT, "fs_data");
- @Test
public void testFs() throws Exception {
testFs(10 * MEGA, 100, 0);
}
@@ -96,7 +90,6 @@ public class TestFileSystem {
fs.delete(READ_DIR, true);
}
- @Test
public static void testCommandFormat() throws Exception {
// This should go to TestFsShell.java when it is added.
CommandFormat cf;
@@ -495,7 +488,6 @@ public class TestFileSystem {
}
}
- @Test
public void testFsCache() throws Exception {
{
long now = System.currentTimeMillis();
@@ -569,7 +561,6 @@ public class TestFileSystem {
+ StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
}
- @Test
public void testFsClose() throws Exception {
{
Configuration conf = new Configuration();
@@ -590,7 +581,6 @@ public class TestFileSystem {
}
}
- @Test
public void testFsShutdownHook() throws Exception {
final Set closed = Collections.synchronizedSet(new HashSet());
Configuration conf = new Configuration();
@@ -622,7 +612,7 @@ public class TestFileSystem {
assertTrue(closed.contains(fsWithoutAuto));
}
- @Test
+
public void testCacheKeysAreCaseInsensitive()
throws Exception
{
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
index 31950fd6104..f2bc4edc46d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
@@ -23,18 +23,19 @@ import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.File;
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.After;
import org.junit.Before;
-import org.junit.Test;
/**
* Test Job History Log Analyzer.
*
* @see JHLogAnalyzer
*/
-public class TestJHLA {
+public class TestJHLA extends TestCase {
private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
private String historyLog = System.getProperty("test.build.data",
"build/test/data") + "/history/test.log";
@@ -132,7 +133,6 @@ public class TestJHLA {
/**
* Run log analyzer in test mode for file test.log.
*/
- @Test
public void testJHLA() {
String[] args = {"-test", historyLog, "-jobDelimiter", ".!!FILE=.*!!"};
JHLogAnalyzer.main(args);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java
index 97dfa26acf4..1d7b98a6719 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java
@@ -32,25 +32,21 @@ import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.mapred.*;
+import junit.framework.TestCase;
import org.apache.commons.logging.*;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-public class TestSequenceFileMergeProgress {
+public class TestSequenceFileMergeProgress extends TestCase {
private static final Log LOG = FileInputFormat.LOG;
private static final int RECORDS = 10000;
-
- @Test
+
public void testMergeProgressWithNoCompression() throws IOException {
runTest(SequenceFile.CompressionType.NONE);
}
- @Test
public void testMergeProgressWithRecordCompression() throws IOException {
runTest(SequenceFile.CompressionType.RECORD);
}
- @Test
public void testMergeProgressWithBlockCompression() throws IOException {
runTest(SequenceFile.CompressionType.BLOCK);
}
@@ -96,7 +92,7 @@ public class TestSequenceFileMergeProgress {
count++;
}
assertEquals(RECORDS, count);
- assertEquals(1.0f, rIter.getProgress().get(), 0.0000);
+ assertEquals(1.0f, rIter.getProgress().get());
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
index 8d33b1580a8..5bf4ff11b89 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
@@ -17,11 +17,10 @@
*/
package org.apache.hadoop.mapred;
+import junit.framework.TestCase;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.junit.After;
-import org.junit.Before;
import java.io.IOException;
import java.util.Map;
@@ -42,7 +41,7 @@ import java.util.Properties;
*
* The DFS filesystem is formated before the testcase starts and after it ends.
*/
-public abstract class ClusterMapReduceTestCase {
+public abstract class ClusterMapReduceTestCase extends TestCase {
private MiniDFSCluster dfsCluster = null;
private MiniMRCluster mrCluster = null;
@@ -51,8 +50,9 @@ public abstract class ClusterMapReduceTestCase {
*
* @throws Exception
*/
- @Before
- public void setUp() throws Exception {
+ protected void setUp() throws Exception {
+ super.setUp();
+
startCluster(true, null);
}
@@ -139,9 +139,9 @@ public abstract class ClusterMapReduceTestCase {
*
* @throws Exception
*/
- @After
- public void tearDown() throws Exception {
+ protected void tearDown() throws Exception {
stopCluster();
+ super.tearDown();
}
/**
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java
index bc85703bc84..353185b59e3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java
@@ -28,13 +28,13 @@ import org.apache.hadoop.ipc.TestRPC.TestImpl;
import org.apache.hadoop.ipc.TestRPC.TestProtocol;
import org.apache.hadoop.mapred.AuditLogger.Keys;
import org.apache.hadoop.net.NetUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+
+import junit.framework.TestCase;
/**
* Tests {@link AuditLogger}.
*/
-public class TestAuditLogger {
+public class TestAuditLogger extends TestCase {
private static final String USER = "test";
private static final String OPERATION = "oper";
private static final String TARGET = "tgt";
@@ -44,7 +44,6 @@ public class TestAuditLogger {
/**
* Test the AuditLog format with key-val pair.
*/
- @Test
public void testKeyValLogFormat() {
StringBuilder actLog = new StringBuilder();
StringBuilder expLog = new StringBuilder();
@@ -115,7 +114,6 @@ public class TestAuditLogger {
/**
* Test {@link AuditLogger} without IP set.
*/
- @Test
public void testAuditLoggerWithoutIP() throws Exception {
// test without ip
testSuccessLogFormat(false);
@@ -139,7 +137,6 @@ public class TestAuditLogger {
/**
* Test {@link AuditLogger} with IP set.
*/
- @Test
public void testAuditLoggerWithIP() throws Exception {
Configuration conf = new Configuration();
// start the IPC server
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
index c2d6257823e..ea9f3d3f989 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
@@ -40,11 +40,6 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Ignore;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertNotNull;
@Ignore
public class TestBadRecords extends ClusterMapReduceTestCase {
@@ -211,8 +206,7 @@ public class TestBadRecords extends ClusterMapReduceTestCase {
}
return processed;
}
-
- @Test
+
public void testBadMapRed() throws Exception {
JobConf conf = createJobConf();
conf.setMapperClass(BadMapper.class);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java
index f04fbd7a29a..ada2d0c634b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java
@@ -29,12 +29,6 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertFalse;
public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
public void _testMapReduce(boolean restart) throws Exception {
OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
@@ -91,17 +85,14 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
}
- @Test
public void testMapReduce() throws Exception {
_testMapReduce(false);
}
- @Test
public void testMapReduceRestarting() throws Exception {
_testMapReduce(true);
}
- @Test
public void testDFSRestart() throws Exception {
Path file = new Path(getInputDir(), "text.txt");
OutputStream os = getFileSystem().create(file);
@@ -118,7 +109,6 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
}
- @Test
public void testMRConfig() throws Exception {
JobConf conf = createJobConf();
assertNull(conf.get("xyz"));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java
index 595d09cc2a0..4bd20d54ad5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java
@@ -21,15 +21,15 @@ import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat;
import org.apache.hadoop.mapreduce.MRConfig;
-import org.junit.Test;
+import junit.framework.TestCase;
import java.io.*;
import java.util.*;
/**
* TestCollect checks if the collect can handle simultaneous invocations.
*/
-public class TestCollect
+public class TestCollect extends TestCase
{
final static Path OUTPUT_DIR = new Path("build/test/test.collect.output");
static final int NUM_FEEDERS = 10;
@@ -127,7 +127,7 @@ public class TestCollect
conf.setNumMapTasks(1);
conf.setNumReduceTasks(1);
}
- @Test
+
public void testCollect() throws IOException {
JobConf conf = new JobConf();
configure(conf);
@@ -144,5 +144,9 @@ public class TestCollect
fs.delete(OUTPUT_DIR, true);
}
}
+
+ public static void main(String[] args) throws IOException {
+ new TestCollect().testCollect();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java
index 7cf5e71e1a5..69353871cf4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java
@@ -21,29 +21,28 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
/**
* check for the job submission options of
* -libjars -files -archives
*/
@Ignore
-public class TestCommandLineJobSubmission {
- // Input output paths for this..
+public class TestCommandLineJobSubmission extends TestCase {
+ // Input output paths for this..
// these are all dummy and does not test
// much in map reduce except for the command line
// params
static final Path input = new Path("/test/input/");
static final Path output = new Path("/test/output");
File buildDir = new File(System.getProperty("test.build.data", "/tmp"));
- @Test
public void testJobShell() throws Exception {
MiniDFSCluster dfs = null;
MiniMRCluster mr = null;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
index 7d7a7b0330f..239c239230e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
@@ -23,12 +23,11 @@ import org.apache.hadoop.mapred.lib.*;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper;
import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import junit.framework.TestCase;
import java.text.NumberFormat;
-public class TestFieldSelection {
+public class TestFieldSelection extends TestCase {
private static NumberFormat idFormat = NumberFormat.getInstance();
static {
@@ -36,7 +35,6 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
idFormat.setGroupingUsed(false);
}
- @Test
public void testFieldSelection() throws Exception {
launch();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java
index d87f6fd91a9..1c8be66d084 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java
@@ -17,14 +17,12 @@
*/
package org.apache.hadoop.mapred;
+import junit.framework.TestCase;
+
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.io.Writer;
@@ -32,7 +30,7 @@ import java.io.OutputStreamWriter;
import java.util.Set;
import java.util.HashSet;
-public class TestFileInputFormatPathFilter {
+public class TestFileInputFormatPathFilter extends TestCase {
public static class DummyFileInputFormat extends FileInputFormat {
@@ -57,12 +55,12 @@ public class TestFileInputFormatPathFilter {
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
"TestFileInputFormatPathFilter");
- @Before
+
public void setUp() throws Exception {
tearDown();
localFs.mkdirs(workDir);
}
- @After
+
public void tearDown() throws Exception {
if (localFs.exists(workDir)) {
localFs.delete(workDir, true);
@@ -131,19 +129,18 @@ public class TestFileInputFormatPathFilter {
assertEquals(createdFiles, computedFiles);
}
- @Test
public void testWithoutPathFilterWithoutGlob() throws Exception {
_testInputFiles(false, false);
}
- @Test
+
public void testWithoutPathFilterWithGlob() throws Exception {
_testInputFiles(false, true);
}
- @Test
+
public void testWithPathFilterWithoutGlob() throws Exception {
_testInputFiles(true, false);
}
- @Test
+
public void testWithPathFilterWithGlob() throws Exception {
_testInputFiles(true, true);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java
index 3d1c2e71bff..7891bca7990 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java
@@ -20,11 +20,10 @@ package org.apache.hadoop.mapred;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.net.NetworkTopology;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
+
+public class TestGetSplitHosts extends TestCase {
-public class TestGetSplitHosts {
- @Test
public void testGetSplitHosts() throws Exception {
int numBlocks = 3;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java
index 2b97d3b95ad..86431e5c135 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java
@@ -21,12 +21,11 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
-import org.junit.Test;
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertEquals;
-public class TestIFileStreams {
- @Test
+import junit.framework.TestCase;
+
+public class TestIFileStreams extends TestCase {
+
public void testIFileStream() throws Exception {
final int DLEN = 100;
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
@@ -43,7 +42,7 @@ public class TestIFileStreams {
}
ifis.close();
}
- @Test
+
public void testBadIFileStream() throws Exception {
final int DLEN = 100;
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
@@ -74,7 +73,7 @@ public class TestIFileStreams {
}
fail("Did not detect bad data in checksum");
}
- @Test
+
public void testBadLength() throws Exception {
final int DLEN = 100;
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java
index 0c20c335d89..1398f9e5aaa 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java
@@ -17,15 +17,14 @@
*/
package org.apache.hadoop.mapred;
+import junit.framework.TestCase;
+
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.StringUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-public class TestInputPath {
- @Test
+public class TestInputPath extends TestCase {
public void testInputPath() throws Exception {
JobConf jobConf = new JobConf();
Path workingDir = jobConf.getWorkingDirectory();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
index a787e68c124..265118a70f6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
@@ -26,6 +26,8 @@ import java.io.Writer;
import java.util.Iterator;
import java.util.StringTokenizer;
+import junit.framework.TestCase;
+
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -34,11 +36,8 @@ import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
import org.apache.hadoop.mapreduce.MRConfig;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-public class TestJavaSerialization {
+public class TestJavaSerialization extends TestCase {
private static String TEST_ROOT_DIR =
new File(System.getProperty("test.build.data", "/tmp")).toURI()
@@ -91,7 +90,7 @@ public class TestJavaSerialization {
wr.write("b a\n");
wr.close();
}
- @Test
+
public void testMapReduceJob() throws Exception {
JobConf conf = new JobConf(TestJavaSerialization.class);
@@ -150,7 +149,6 @@ public class TestJavaSerialization {
* coupled to Writable types, if so, the job will fail.
*
*/
- @Test
public void testWriteToSequencefile() throws Exception {
JobConf conf = new JobConf(TestJavaSerialization.class);
conf.setJobName("JavaSerialization");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java
index 2659a14a70b..4b62b4a1d8e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java
@@ -29,13 +29,8 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.lib.IdentityMapper;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-
public class TestJobName extends ClusterMapReduceTestCase {
- @Test
public void testComplexName() throws Exception {
OutputStream os = getFileSystem().create(new Path(getInputDir(),
"text.txt"));
@@ -70,7 +65,6 @@ public class TestJobName extends ClusterMapReduceTestCase {
reader.close();
}
- @Test
public void testComplexNameWithRegex() throws Exception {
OutputStream os = getFileSystem().create(new Path(getInputDir(),
"text.txt"));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java
index 3dbc5777bd5..109c781c2b0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapred;
import java.io.DataOutputStream;
import java.io.IOException;
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -30,15 +32,11 @@ import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
/**
* A JUnit test to test Job System Directory with Mini-DFS.
*/
-public class TestJobSysDirWithDFS {
+public class TestJobSysDirWithDFS extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestJobSysDirWithDFS.class.getName());
@@ -117,7 +115,7 @@ public class TestJobSysDirWithDFS {
// between Job Client & Job Tracker
assertTrue(result.job.isSuccessful());
}
- @Test
+
public void testWithDFS() throws IOException {
MiniDFSCluster dfs = null;
MiniMRCluster mr = null;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
index bacc196008e..27070783e14 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.mapred;
import java.io.*;
import java.util.*;
+import junit.framework.TestCase;
import org.apache.commons.logging.*;
import org.apache.hadoop.fs.*;
@@ -27,11 +28,8 @@ import org.apache.hadoop.io.*;
import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-public class TestKeyValueTextInputFormat {
+public class TestKeyValueTextInputFormat extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestKeyValueTextInputFormat.class.getName());
@@ -49,7 +47,7 @@ public class TestKeyValueTextInputFormat {
private static Path workDir =
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
"TestKeyValueTextInputFormat");
- @Test
+
public void testFormat() throws Exception {
JobConf job = new JobConf();
Path file = new Path(workDir, "test.txt");
@@ -136,7 +134,7 @@ public class TestKeyValueTextInputFormat {
(str.getBytes("UTF-8")),
defaultConf);
}
- @Test
+
public void testUTF8() throws Exception {
LineReader in = null;
@@ -155,7 +153,7 @@ public class TestKeyValueTextInputFormat {
}
}
}
- @Test
+
public void testNewLines() throws Exception {
LineReader in = null;
try {
@@ -221,8 +219,7 @@ public class TestKeyValueTextInputFormat {
/**
* Test using the gzip codec for reading
*/
- @Test
- public void testGzip() throws IOException {
+ public static void testGzip() throws IOException {
JobConf job = new JobConf();
CompressionCodec gzip = new GzipCodec();
ReflectionUtils.setConf(gzip, job);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java
index dde9310607f..7412832d5c2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java
@@ -35,15 +35,14 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.lib.LazyOutputFormat;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
/**
* A JUnit test to test the Map-Reduce framework's feature to create part
* files only if there is an explicit output.collect. This helps in preventing
* 0 byte files
*/
-public class TestLazyOutput {
+public class TestLazyOutput extends TestCase {
private static final int NUM_HADOOP_SLAVES = 3;
private static final int NUM_MAPS_PER_NODE = 2;
private static final Path INPUT = new Path("/testlazy/input");
@@ -133,7 +132,7 @@ public class TestLazyOutput {
}
}
- @Test
+
public void testLazyOutput() throws Exception {
MiniDFSCluster dfs = null;
MiniMRCluster mr = null;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
index 20d0173cc81..fb9e8fcce3a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
@@ -17,6 +17,16 @@
*/
package org.apache.hadoop.mapred;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.concurrent.TimeoutException;
+
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -26,21 +36,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.Text;
-import org.junit.After;
-import org.junit.Test;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.concurrent.TimeoutException;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
@SuppressWarnings("deprecation")
-public class TestMRCJCFileInputFormat {
+public class TestMRCJCFileInputFormat extends TestCase {
Configuration conf = new Configuration();
MiniDFSCluster dfs = null;
@@ -52,7 +50,6 @@ public class TestMRCJCFileInputFormat {
.build();
}
- @Test
public void testLocality() throws Exception {
JobConf job = new JobConf(conf);
dfs = newDFSCluster(job);
@@ -112,7 +109,6 @@ public class TestMRCJCFileInputFormat {
DFSTestUtil.waitReplication(fs, path, replication);
}
- @Test
public void testNumInputs() throws Exception {
JobConf job = new JobConf(conf);
dfs = newDFSCluster(job);
@@ -161,7 +157,6 @@ public class TestMRCJCFileInputFormat {
}
}
- @Test
public void testMultiLevelInput() throws Exception {
JobConf job = new JobConf(conf);
@@ -200,7 +195,6 @@ public class TestMRCJCFileInputFormat {
}
@SuppressWarnings("rawtypes")
- @Test
public void testLastInputSplitAtSplitBoundary() throws Exception {
FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024,
128l * 1024 * 1024);
@@ -214,7 +208,6 @@ public class TestMRCJCFileInputFormat {
}
@SuppressWarnings("rawtypes")
- @Test
public void testLastInputSplitExceedingSplitBoundary() throws Exception {
FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024,
128l * 1024 * 1024);
@@ -228,7 +221,6 @@ public class TestMRCJCFileInputFormat {
}
@SuppressWarnings("rawtypes")
- @Test
public void testLastInputSplitSingleSplit() throws Exception {
FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024,
128l * 1024 * 1024);
@@ -313,7 +305,7 @@ public class TestMRCJCFileInputFormat {
DFSTestUtil.waitReplication(fileSys, name, replication);
}
- @After
+ @Override
public void tearDown() throws Exception {
if (dfs != null) {
dfs.shutdown();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
index 74b6d77f6a0..3b86f81cc23 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
@@ -18,25 +18,18 @@
package org.apache.hadoop.mapred;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RawLocalFileSystem;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.JobStatus;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.IOException;
+import java.io.*;
import java.net.URI;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
-public class TestMRCJCFileOutputCommitter {
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.mapred.JobContextImpl;
+import org.apache.hadoop.mapred.TaskAttemptContextImpl;
+import org.apache.hadoop.mapreduce.JobStatus;
+
+public class TestMRCJCFileOutputCommitter extends TestCase {
private static Path outDir = new Path(
System.getProperty("test.build.data", "/tmp"), "output");
@@ -74,7 +67,6 @@ public class TestMRCJCFileOutputCommitter {
}
@SuppressWarnings("unchecked")
- @Test
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
@@ -116,7 +108,6 @@ public class TestMRCJCFileOutputCommitter {
FileUtil.fullyDelete(new File(outDir.toString()));
}
- @Test
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
@@ -170,7 +161,6 @@ public class TestMRCJCFileOutputCommitter {
}
}
- @Test
public void testFailAbort() throws IOException {
JobConf job = new JobConf();
job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java
index 39438ed753c..1fe549b179f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java
@@ -23,6 +23,8 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.List;
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
@@ -42,8 +44,6 @@ import org.apache.hadoop.mapreduce.split.JobSplit.SplitMetaInfo;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitIndex;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
/**
* Validates map phase progress.
@@ -59,7 +59,7 @@ import static org.junit.Assert.assertTrue;
* once mapTask.run() is finished. Sort phase progress in map task is not
* validated here.
*/
-public class TestMapProgress {
+public class TestMapProgress extends TestCase {
public static final Log LOG = LogFactory.getLog(TestMapProgress.class);
private static String TEST_ROOT_DIR;
static {
@@ -220,8 +220,7 @@ public class TestMapProgress {
/**
* Validates map phase progress after each record is processed by map task
* using custom task reporter.
- */
- @Test
+ */
public void testMapProgress() throws Exception {
JobConf job = new JobConf();
fs = FileSystem.getLocal(job);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java
index a9e7f64c0b8..e19ff589fa4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java
@@ -44,8 +44,8 @@ import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.mapred.Task.TaskReporter;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+
+import junit.framework.TestCase;
@SuppressWarnings(value={"unchecked", "deprecation"})
/**
@@ -56,7 +56,7 @@ import static org.junit.Assert.assertEquals;
* framework's merge on the reduce side will merge the partitions created to
* generate the final output which is sorted on the key.
*/
-public class TestMerge {
+public class TestMerge extends TestCase {
private static final int NUM_HADOOP_DATA_NODES = 2;
// Number of input files is same as the number of mappers.
private static final int NUM_MAPPERS = 10;
@@ -69,7 +69,6 @@ public class TestMerge {
// Where output goes.
private static final Path OUTPUT = new Path("/testplugin/output");
- @Test
public void testMerge() throws Exception {
MiniDFSCluster dfsCluster = null;
MiniMRClientCluster mrCluster = null;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java
index b608d756a49..8b7b8f51b96 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java
@@ -18,16 +18,14 @@
package org.apache.hadoop.mapred;
-import org.junit.Test;
-
import java.io.IOException;
+import junit.framework.TestCase;
/**
* A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster.
*/
-public class TestMiniMRBringup {
+public class TestMiniMRBringup extends TestCase {
- @Test
public void testBringUp() throws IOException {
MiniMRCluster mr = null;
try {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java
index 3f64f7a35b9..45879aff623 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java
@@ -18,23 +18,20 @@
package org.apache.hadoop.mapred;
+import java.io.*;
+import junit.framework.TestCase;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.MRCaching.TestResult;
import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
/**
* A JUnit test to test caching with DFS
*
*/
@Ignore
-public class TestMiniMRDFSCaching {
+public class TestMiniMRDFSCaching extends TestCase {
- @Test
public void testWithDFS() throws IOException {
MiniMRCluster mr = null;
MiniDFSCluster dfs = null;
@@ -73,4 +70,9 @@ public class TestMiniMRDFSCaching {
}
}
}
+
+ public static void main(String[] argv) throws Exception {
+ TestMiniMRDFSCaching td = new TestMiniMRDFSCaching();
+ td.testWithDFS();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java
index 1bd29542fcd..49825e99f57 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java
@@ -21,17 +21,17 @@ import java.io.IOException;
import java.util.BitSet;
import java.util.HashMap;
import java.util.Random;
+
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-public class TestMultiFileInputFormat {
+public class TestMultiFileInputFormat extends TestCase{
private static JobConf job = new JobConf();
@@ -79,8 +79,7 @@ public class TestMultiFileInputFormat {
FileInputFormat.setInputPaths(job, multiFileDir);
return multiFileDir;
}
-
- @Test
+
public void testFormat() throws IOException {
LOG.info("Test started");
LOG.info("Max split count = " + MAX_SPLIT_COUNT);
@@ -123,8 +122,7 @@ public class TestMultiFileInputFormat {
}
LOG.info("Test Finished");
}
-
- @Test
+
public void testFormatWithLessPathsThanSplits() throws Exception {
MultiFileInputFormat format = new DummyMultiFileInputFormat();
FileSystem fs = FileSystem.getLocal(job);
@@ -137,4 +135,9 @@ public class TestMultiFileInputFormat {
initFiles(fs, 2, 500);
assertEquals(2, format.getSplits(job, 4).length);
}
+
+ public static void main(String[] args) throws Exception{
+ TestMultiFileInputFormat test = new TestMultiFileInputFormat();
+ test.testFormat();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
index 5bb336e4e81..16ff6af9271 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
@@ -27,19 +27,16 @@ import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
+import junit.framework.TestCase;
+
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
/**
*
* test MultiFileSplit class
*/
-public class TestMultiFileSplit {
+public class TestMultiFileSplit extends TestCase{
- @Test
public void testReadWrite() throws Exception {
MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {new Path("/test/path/1"), new Path("/test/path/2")}, new long[] {100,200});
@@ -73,7 +70,6 @@ public class TestMultiFileSplit {
* test method getLocations
* @throws IOException
*/
- @Test
public void testgetLocations() throws IOException{
JobConf job= new JobConf();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java
index 7e8dfef03f1..294723a9c87 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java
@@ -17,6 +17,10 @@
*/
package org.apache.hadoop.mapred;
+import java.io.IOException;
+
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -28,17 +32,12 @@ import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.mapreduce.JobCounter;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.junit.Ignore;
-import org.junit.Test;
-
-import java.io.IOException;
-
-import static org.junit.Assert.assertEquals;
/**
* This test checks whether the task caches are created and used properly.
*/
@Ignore
-public class TestMultipleLevelCaching {
+public class TestMultipleLevelCaching extends TestCase {
private static final int MAX_LEVEL = 5;
final Path inDir = new Path("/cachetesting");
final Path outputPath = new Path("/output");
@@ -72,7 +71,6 @@ public class TestMultipleLevelCaching {
return rack.toString();
}
- @Test
public void testMultiLevelCaching() throws Exception {
for (int i = 1 ; i <= MAX_LEVEL; ++i) {
testCachingAtLevel(i);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
index b5047fc8331..14c097d77e1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
@@ -18,19 +18,15 @@
package org.apache.hadoop.mapred;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat;
-import org.junit.Test;
+import java.io.*;
+import junit.framework.TestCase;
-import java.io.File;
-import java.io.IOException;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import org.apache.hadoop.mapred.lib.*;
-public class TestMultipleTextOutputFormat {
+public class TestMultipleTextOutputFormat extends TestCase {
private static JobConf defaultConf = new JobConf();
private static FileSystem localFs = null;
@@ -87,8 +83,7 @@ public class TestMultipleTextOutputFormat {
writeData(rw);
rw.close(null);
}
-
- @Test
+
public void testFormat() throws Exception {
JobConf job = new JobConf();
job.set(JobContext.TASK_ATTEMPT_ID, attempt);
@@ -150,4 +145,8 @@ public class TestMultipleTextOutputFormat {
//System.out.printf("File_2 output: %s\n", output);
assertEquals(output, expectedOutput.toString());
}
+
+ public static void main(String[] args) throws Exception {
+ new TestMultipleTextOutputFormat().testFormat();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java
index 767459f88b4..586df38dcfc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java
@@ -19,18 +19,17 @@
package org.apache.hadoop.mapred;
import org.apache.hadoop.mapreduce.TaskCounter;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
public class TestReduceFetch extends TestReduceFetchFromPartialMem {
+ static {
+ setSuite(TestReduceFetch.class);
+ }
+
/**
* Verify that all segments are read from disk
* @throws Exception might be thrown
*/
- @Test
public void testReduceFromDisk() throws Exception {
final int MAP_TASKS = 8;
JobConf job = mrCluster.createJobConf();
@@ -54,7 +53,6 @@ public class TestReduceFetch extends TestReduceFetchFromPartialMem {
* Verify that no segment hits disk.
* @throws Exception might be thrown
*/
- @Test
public void testReduceFromMem() throws Exception {
final int MAP_TASKS = 3;
JobConf job = mrCluster.createJobConf();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java
index 9b04f64ac60..3a1a275ab91 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java
@@ -18,6 +18,10 @@
package org.apache.hadoop.mapred;
+import junit.extensions.TestSetup;
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -26,9 +30,7 @@ import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.TaskCounter;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.apache.hadoop.mapreduce.MRConfig;
import java.io.DataInput;
import java.io.DataOutput;
@@ -37,27 +39,34 @@ import java.util.Arrays;
import java.util.Formatter;
import java.util.Iterator;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-public class TestReduceFetchFromPartialMem {
+public class TestReduceFetchFromPartialMem extends TestCase {
protected static MiniMRCluster mrCluster = null;
protected static MiniDFSCluster dfsCluster = null;
+ protected static TestSuite mySuite;
- @Before
- public void setUp() throws Exception {
- Configuration conf = new Configuration();
- dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
- mrCluster = new MiniMRCluster(2,
- dfsCluster.getFileSystem().getUri().toString(), 1);
+ protected static void setSuite(Class extends TestCase> klass) {
+ mySuite = new TestSuite(klass);
}
- @After
- public void tearDown() throws Exception {
- if (dfsCluster != null) { dfsCluster.shutdown(); }
- if (mrCluster != null) { mrCluster.shutdown(); }
+ static {
+ setSuite(TestReduceFetchFromPartialMem.class);
+ }
+
+ public static Test suite() {
+ TestSetup setup = new TestSetup(mySuite) {
+ protected void setUp() throws Exception {
+ Configuration conf = new Configuration();
+ dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+ mrCluster = new MiniMRCluster(2,
+ dfsCluster.getFileSystem().getUri().toString(), 1);
+ }
+ protected void tearDown() throws Exception {
+ if (dfsCluster != null) { dfsCluster.shutdown(); }
+ if (mrCluster != null) { mrCluster.shutdown(); }
+ }
+ };
+ return setup;
}
private static final String tagfmt = "%04d";
@@ -69,7 +78,6 @@ public class TestReduceFetchFromPartialMem {
}
/** Verify that at least one segment does not hit disk */
- @Test
public void testReduceFromPartialMem() throws Exception {
final int MAP_TASKS = 7;
JobConf job = mrCluster.createJobConf();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java
index 69546a6cba2..43fd94871a2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java
@@ -17,6 +17,10 @@
*/
package org.apache.hadoop.mapred;
+import java.io.IOException;
+
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
@@ -26,17 +30,11 @@ import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.util.Progressable;
-import org.junit.Test;
-
-import java.io.IOException;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
/**
* This test exercises the ValueIterator.
*/
-public class TestReduceTask {
+public class TestReduceTask extends TestCase {
static class NullProgress implements Progressable {
public void progress() { }
@@ -121,10 +119,9 @@ public class TestReduceTask {
}
assertEquals(vals.length, i);
// make sure we have progress equal to 1.0
- assertEquals(1.0f, rawItr.getProgress().get(),0.0000);
+ assertEquals(1.0f, rawItr.getProgress().get());
}
- @Test
public void testValueIterator() throws Exception {
Path tmpDir = new Path("build/test/test.reduce.task");
Configuration conf = new Configuration();
@@ -132,8 +129,7 @@ public class TestReduceTask {
runValueIterator(tmpDir, testCase, conf, null);
}
}
-
- @Test
+
public void testValueIteratorWithCompression() throws Exception {
Path tmpDir = new Path("build/test/test.reduce.task.compression");
Configuration conf = new Configuration();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java
index 64b0983a5d6..b8be7400070 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java
@@ -18,26 +18,19 @@
package org.apache.hadoop.mapred;
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
import java.io.IOException;
import java.util.Random;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
-public class TestSequenceFileAsBinaryInputFormat {
+import junit.framework.TestCase;
+import org.apache.commons.logging.*;
+
+public class TestSequenceFileAsBinaryInputFormat extends TestCase {
private static final Log LOG = FileInputFormat.LOG;
private static final int RECORDS = 10000;
- @Test
public void testBinary() throws IOException {
JobConf job = new JobConf();
FileSystem fs = FileSystem.getLocal(job);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
index 03dc6a69003..abe21f223ef 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
@@ -18,35 +18,24 @@
package org.apache.hadoop.mapred;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.DoubleWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.junit.Test;
-
import java.io.IOException;
import java.util.Random;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
-public class TestSequenceFileAsBinaryOutputFormat {
+import junit.framework.TestCase;
+import org.apache.commons.logging.*;
+
+public class TestSequenceFileAsBinaryOutputFormat extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestSequenceFileAsBinaryOutputFormat.class.getName());
+
private static final int RECORDS = 10000;
// A random task attempt id for testing.
private static final String attempt = "attempt_200707121733_0001_m_000000_0";
- @Test
public void testBinary() throws IOException {
JobConf job = new JobConf();
FileSystem fs = FileSystem.getLocal(job);
@@ -140,8 +129,7 @@ public class TestSequenceFileAsBinaryOutputFormat {
assertEquals("Some records not found", RECORDS, count);
}
- @Test
- public void testSequenceOutputClassDefaultsToMapRedOutputClass()
+ public void testSequenceOutputClassDefaultsToMapRedOutputClass()
throws IOException {
JobConf job = new JobConf();
FileSystem fs = FileSystem.getLocal(job);
@@ -175,7 +163,6 @@ public class TestSequenceFileAsBinaryOutputFormat {
job));
}
- @Test
public void testcheckOutputSpecsForbidRecordCompression() throws IOException {
JobConf job = new JobConf();
FileSystem fs = FileSystem.getLocal(job);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java
index d4e5e17e11f..4cfd59af745 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java
@@ -18,29 +18,22 @@
package org.apache.hadoop.mapred;
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
-import java.util.BitSet;
-import java.util.Random;
+import org.apache.commons.logging.*;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
-public class TestSequenceFileAsTextInputFormat {
+public class TestSequenceFileAsTextInputFormat extends TestCase {
private static final Log LOG = FileInputFormat.LOG;
private static int MAX_LENGTH = 10000;
private static Configuration conf = new Configuration();
- @Test
public void testFormat() throws Exception {
JobConf job = new JobConf(conf);
FileSystem fs = FileSystem.getLocal(conf);
@@ -119,4 +112,8 @@ public class TestSequenceFileAsTextInputFormat {
}
}
+
+ public static void main(String[] args) throws Exception {
+ new TestSequenceFileAsTextInputFormat().testFormat();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
index 93f21ce9e49..e50c396a434 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
@@ -18,21 +18,17 @@
package org.apache.hadoop.mapred;
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
-import java.io.IOException;
-import java.util.Random;
+import org.apache.commons.logging.*;
-import static org.junit.Assert.assertEquals;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
-public class TestSequenceFileInputFilter {
+public class TestSequenceFileInputFilter extends TestCase {
private static final Log LOG = FileInputFormat.LOG;
private static final int MAX_LENGTH = 15000;
@@ -101,8 +97,7 @@ public class TestSequenceFileInputFilter {
}
return count;
}
-
- @Test
+
public void testRegexFilter() throws Exception {
// set the filter class
LOG.info("Testing Regex Filter with patter: \\A10*");
@@ -126,7 +121,6 @@ public class TestSequenceFileInputFilter {
fs.delete(inDir, true);
}
- @Test
public void testPercentFilter() throws Exception {
LOG.info("Testing Percent Filter with frequency: 1000");
// set the filter class
@@ -153,8 +147,7 @@ public class TestSequenceFileInputFilter {
// clean up
fs.delete(inDir, true);
}
-
- @Test
+
public void testMD5Filter() throws Exception {
// set the filter class
LOG.info("Testing MD5 Filter with frequency: 1000");
@@ -175,4 +168,9 @@ public class TestSequenceFileInputFilter {
// clean up
fs.delete(inDir, true);
}
+
+ public static void main(String[] args) throws Exception {
+ TestSequenceFileInputFilter filter = new TestSequenceFileInputFilter();
+ filter.testRegexFilter();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
index 338e91d4d35..575ed532545 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
@@ -18,28 +18,22 @@
package org.apache.hadoop.mapred;
-import org.apache.commons.logging.Log;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.junit.Test;
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
-import java.util.BitSet;
-import java.util.Random;
+import org.apache.commons.logging.*;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
-public class TestSequenceFileInputFormat {
+public class TestSequenceFileInputFormat extends TestCase {
private static final Log LOG = FileInputFormat.LOG;
private static int MAX_LENGTH = 10000;
private static Configuration conf = new Configuration();
- @Test
public void testFormat() throws Exception {
JobConf job = new JobConf(conf);
FileSystem fs = FileSystem.getLocal(conf);
@@ -116,4 +110,8 @@ public class TestSequenceFileInputFormat {
}
}
+
+ public static void main(String[] args) throws Exception {
+ new TestSequenceFileInputFormat().testFormat();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
index 82d1d2d09a1..ad4d4ce17a9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
@@ -17,20 +17,18 @@
*/
package org.apache.hadoop.mapred;
+import java.util.Iterator;
+
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.SortedRanges.Range;
-import org.junit.Test;
-import java.util.Iterator;
-
-import static org.junit.Assert.assertEquals;
-
-public class TestSortedRanges {
- private static final Log LOG =
+public class TestSortedRanges extends TestCase {
+ private static final Log LOG =
LogFactory.getLog(TestSortedRanges.class);
-
- @Test
+
public void testAdd() {
SortedRanges sr = new SortedRanges();
sr.add(new Range(2,9));
@@ -68,8 +66,7 @@ public class TestSortedRanges {
assertEquals(77, it.next().longValue());
}
-
- @Test
+
public void testRemove() {
SortedRanges sr = new SortedRanges();
sr.add(new Range(2,19));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
index b9e32759fa4..426686f9bb5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
@@ -18,6 +18,12 @@
package org.apache.hadoop.mapred;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.net.URI;
+
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -28,20 +34,14 @@ import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.util.Progressable;
-import org.junit.Test;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.net.URI;
-
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
/**
* A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795).
*/
-public class TestSpecialCharactersInOutputPath {
+public class TestSpecialCharactersInOutputPath extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestSpecialCharactersInOutputPath.class.getName());
@@ -96,8 +96,7 @@ public class TestSpecialCharactersInOutputPath {
LOG.info("job is complete: " + runningJob.isSuccessful());
return (runningJob.isSuccessful());
}
-
- @Test
+
public void testJobWithDFS() throws IOException {
String namenode = null;
MiniDFSCluster dfs = null;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
index 8a83e8153e3..12568d09175 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
@@ -19,18 +19,14 @@ package org.apache.hadoop.mapred;
import java.util.Map;
+import junit.framework.TestCase;
+
import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
import org.apache.hadoop.mapred.StatisticsCollector.Stat;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-
-public class TestStatisticsCollector {
+public class TestStatisticsCollector extends TestCase{
@SuppressWarnings("rawtypes")
- @Test
public void testMovingWindow() throws Exception {
StatisticsCollector collector = new StatisticsCollector(1);
TimeWindow window = new TimeWindow("test", 6, 2);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java
index 2d67edc581a..3c2cf215fb3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java
@@ -17,15 +17,6 @@
*/
package org.apache.hadoop.mapred;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.lib.IdentityMapper;
-import org.apache.hadoop.mapred.lib.IdentityReducer;
-import org.junit.Test;
-
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
@@ -35,10 +26,18 @@ import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
-public class TestUserDefinedCounters {
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.lib.IdentityMapper;
+import org.apache.hadoop.mapred.lib.IdentityReducer;
+
+public class TestUserDefinedCounters extends TestCase {
+
private static String TEST_ROOT_DIR =
new File(System.getProperty("test.build.data", "/tmp")).toURI()
.toString().replace(' ', '+')
@@ -76,7 +75,6 @@ public class TestUserDefinedCounters {
wr.close();
}
- @Test
public void testMapReduceJob() throws Exception {
JobConf conf = new JobConf(TestUserDefinedCounters.class);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java
index 82c68db30c5..2c0cedcbb30 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java
@@ -18,6 +18,12 @@
package org.apache.hadoop.mapred;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
@@ -25,15 +31,8 @@ import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.util.GenericsUtil;
-import org.junit.Test;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-
-import static org.junit.Assert.assertTrue;
-
-public class TestWritableJobConf {
+public class TestWritableJobConf extends TestCase {
private static final Configuration CONF = new Configuration();
@@ -79,17 +78,15 @@ public class TestWritableJobConf {
}
}
- assertTrue(map1.equals(map2));
+ assertEquals(map1, map2);
}
- @Test
public void testEmptyConfiguration() throws Exception {
JobConf conf = new JobConf();
Configuration deser = serDeser(conf);
assertEquals(conf, deser);
}
- @Test
public void testNonEmptyConfiguration() throws Exception {
JobConf conf = new JobConf();
conf.set("a", "A");
@@ -98,7 +95,6 @@ public class TestWritableJobConf {
assertEquals(conf, deser);
}
- @Test
public void testConfigurationWithDefaults() throws Exception {
JobConf conf = new JobConf(false);
conf.set("a", "A");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
index abf2e72e0d1..0e340428214 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
@@ -18,10 +18,6 @@
package org.apache.hadoop.mapred;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
@@ -42,6 +38,8 @@ import java.security.PrivilegedExceptionAction;
import java.util.List;
import java.util.Map;
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -115,7 +113,7 @@ import org.mockito.stubbing.Answer;
* Test YarnRunner and make sure the client side plugin works
* fine
*/
-public class TestYARNRunner {
+public class TestYARNRunner extends TestCase {
private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java
index a3066765ec0..15cea69dab2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java
@@ -22,6 +22,11 @@ import java.io.DataOutput;
import java.io.IOException;
import java.util.Iterator;
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+import junit.extensions.TestSetup;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
@@ -49,27 +54,23 @@ import org.apache.hadoop.mapred.Utils;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
-public class TestDatamerge {
+public class TestDatamerge extends TestCase {
private static MiniDFSCluster cluster = null;
-
- @Before
- public void setUp() throws Exception {
- Configuration conf = new Configuration();
- cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
- }
- @After
- public void tearDown() throws Exception {
- if (cluster != null) {
- cluster.shutdown();
- }
+ public static Test suite() {
+ TestSetup setup = new TestSetup(new TestSuite(TestDatamerge.class)) {
+ protected void setUp() throws Exception {
+ Configuration conf = new Configuration();
+ cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+ }
+ protected void tearDown() throws Exception {
+ if (cluster != null) {
+ cluster.shutdown();
+ }
+ }
+ };
+ return setup;
}
private static SequenceFile.Writer[] createWriters(Path testdir,
@@ -245,22 +246,18 @@ public class TestDatamerge {
base.getFileSystem(job).delete(base, true);
}
- @Test
public void testSimpleInnerJoin() throws Exception {
joinAs("inner", InnerJoinChecker.class);
}
- @Test
public void testSimpleOuterJoin() throws Exception {
joinAs("outer", OuterJoinChecker.class);
}
- @Test
public void testSimpleOverride() throws Exception {
joinAs("override", OverrideChecker.class);
}
- @Test
public void testNestedJoin() throws Exception {
// outer(inner(S1,...,Sn),outer(S1,...Sn))
final int SOURCES = 3;
@@ -353,7 +350,6 @@ public class TestDatamerge {
}
- @Test
public void testEmptyJoin() throws Exception {
JobConf job = new JobConf();
Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java
index 56871550dc9..e421ede9827 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java
@@ -26,6 +26,8 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
+import junit.framework.TestCase;
+
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.FloatWritable;
@@ -34,12 +36,8 @@ import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-public class TestTupleWritable {
+public class TestTupleWritable extends TestCase {
private TupleWritable makeTuple(Writable[] writs) {
Writable[] sub1 = { writs[1], writs[2] };
@@ -102,7 +100,6 @@ public class TestTupleWritable {
return i;
}
- @Test
public void testIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
@@ -124,7 +121,6 @@ public class TestTupleWritable {
verifIter(writs, t, 0);
}
- @Test
public void testNestedIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
@@ -143,7 +139,6 @@ public class TestTupleWritable {
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
- @Test
public void testWritable() throws Exception {
Random r = new Random();
Writable[] writs = {
@@ -167,7 +162,6 @@ public class TestTupleWritable {
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
- @Test
public void testWideWritable() throws Exception {
Writable[] manyWrits = makeRandomWritables(131);
@@ -186,8 +180,7 @@ public class TestTupleWritable {
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
assertEquals("All tuple data has not been read from the stream",-1,in.read());
}
-
- @Test
+
public void testWideWritable2() throws Exception {
Writable[] manyWrits = makeRandomWritables(71);
@@ -209,7 +202,6 @@ public class TestTupleWritable {
* Tests a tuple writable with more than 64 values and the values set written
* spread far apart.
*/
- @Test
public void testSparseWideWritable() throws Exception {
Writable[] manyWrits = makeRandomWritables(131);
@@ -228,7 +220,7 @@ public class TestTupleWritable {
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
assertEquals("All tuple data has not been read from the stream",-1,in.read());
}
- @Test
+
public void testWideTuple() throws Exception {
Text emptyText = new Text("Should be empty");
Writable[] values = new Writable[64];
@@ -248,7 +240,7 @@ public class TestTupleWritable {
}
}
}
- @Test
+
public void testWideTuple2() throws Exception {
Text emptyText = new Text("Should be empty");
Writable[] values = new Writable[64];
@@ -272,7 +264,6 @@ public class TestTupleWritable {
/**
* Tests that we can write more than 64 values.
*/
- @Test
public void testWideTupleBoundary() throws Exception {
Text emptyText = new Text("Should not be set written");
Writable[] values = new Writable[65];
@@ -296,7 +287,6 @@ public class TestTupleWritable {
/**
* Tests compatibility with pre-0.21 versions of TupleWritable
*/
- @Test
public void testPreVersion21Compatibility() throws Exception {
Writable[] manyWrits = makeRandomWritables(64);
PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
@@ -314,7 +304,7 @@ public class TestTupleWritable {
assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple));
assertEquals("All tuple data has not been read from the stream",-1,in.read());
}
- @Test
+
public void testPreVersion21CompatibilityEmptyTuple() throws Exception {
Writable[] manyWrits = new Writable[0];
PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java
index ae5572f5dcd..3ca175a5049 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java
@@ -21,6 +21,8 @@ import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import junit.framework.TestCase;
+
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
@@ -33,16 +35,13 @@ import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-public class TestWrappedRecordReaderClassloader {
+public class TestWrappedRecordReaderClassloader extends TestCase {
/**
* Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)}
* is inherited by any {@link WrappedRecordReader}s created by
* {@link CompositeRecordReader}
*/
- @Test
public void testClassLoader() throws Exception {
JobConf job = new JobConf();
Fake_ClassLoader classLoader = new Fake_ClassLoader();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java
index b916026272e..8bd855433ea 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapred.lib;
import java.io.DataOutputStream;
import java.io.IOException;
+import junit.framework.TestCase;
+
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -30,12 +32,9 @@ import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-public class TestDelegatingInputFormat {
- @Test
+public class TestDelegatingInputFormat extends TestCase {
+
public void testSplitting() throws Exception {
JobConf conf = new JobConf();
MiniDFSCluster dfs = null;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java
index 388de0fb88d..db9c219e9c1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java
@@ -20,14 +20,13 @@ package org.apache.hadoop.mapred.lib;
import java.io.*;
import java.util.*;
+import junit.framework.TestCase;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-public class TestLineInputFormat {
+public class TestLineInputFormat extends TestCase {
private static int MAX_LENGTH = 200;
private static JobConf defaultConf = new JobConf();
@@ -44,7 +43,7 @@ public class TestLineInputFormat {
private static Path workDir =
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
"TestLineInputFormat");
- @Test
+
public void testFormat() throws Exception {
JobConf job = new JobConf();
Path file = new Path(workDir, "test.txt");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java
index 115a6f70d08..3a9cb9ec337 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java
@@ -36,6 +36,7 @@ import static org.junit.Assert.assertEquals;
* @see TestDelegatingInputFormat
*/
public class TestMultipleInputs {
+
@Test
public void testAddInputPathWithFormat() {
final JobConf conf = new JobConf();
@@ -48,6 +49,7 @@ public class TestMultipleInputs {
assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
.getClass());
}
+
@Test
public void testAddInputPathWithMapper() {
final JobConf conf = new JobConf();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java
index f33f83cb6c1..6da96ce22bd 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java
@@ -22,14 +22,13 @@ import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.lib.*;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import junit.framework.TestCase;
import java.io.*;
import java.util.*;
import java.text.NumberFormat;
-public class TestAggregates {
+public class TestAggregates extends TestCase {
private static NumberFormat idFormat = NumberFormat.getInstance();
static {
@@ -37,7 +36,7 @@ public class TestAggregates {
idFormat.setGroupingUsed(false);
}
- @Test
+
public void testAggregates() throws Exception {
launch();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java
index 203da4e0b7c..968bb066565 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.mapred.lib.db;
import java.io.IOException;
+import junit.framework.TestCase;
+
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.JobConf;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-public class TestConstructQuery {
+public class TestConstructQuery extends TestCase {
+
private String[] fieldNames = new String[] { "id", "name", "value" };
private String[] nullFieldNames = new String[] { null, null, null };
private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);";
@@ -33,15 +33,15 @@ public class TestConstructQuery {
private DBOutputFormat format
= new DBOutputFormat();
- @Test
- public void testConstructQuery() {
+
+ public void testConstructQuery() {
String actual = format.constructQuery("hadoop_output", fieldNames);
assertEquals(expected, actual);
-
+
actual = format.constructQuery("hadoop_output", nullFieldNames);
assertEquals(nullExpected, actual);
}
- @Test
+
public void testSetOutput() throws IOException {
JobConf job = new JobConf();
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java
index 34b1d75dfed..dd7817d65b5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java
@@ -44,13 +44,10 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
+import junit.framework.TestCase;
@Ignore
-public class TestPipes {
+public class TestPipes extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestPipes.class.getName());
@@ -69,7 +66,7 @@ public class TestPipes {
fs.delete(p, true);
assertFalse("output not cleaned up", fs.exists(p));
}
- @Test
+
public void testPipes() throws IOException {
if (System.getProperty("compile.c++") == null) {
LOG.info("compile.c++ is not defined, so skipping TestPipes");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java
index 8177ecd405b..29640c8854b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java
@@ -17,42 +17,36 @@
*/
package org.apache.hadoop.mapreduce;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.fs.*;
import org.apache.hadoop.mapred.LocalJobRunner;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;
+
import org.junit.Test;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
/**
* Stress tests for the LocalJobRunner
*/
-public class TestLocalRunner {
+public class TestLocalRunner extends TestCase {
private static final Log LOG = LogFactory.getLog(TestLocalRunner.class);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
index c3746cfb71d..bd78b15be92 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
@@ -17,19 +17,6 @@
*/
package org.apache.hadoop.mapreduce;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
-import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hadoop.mapreduce.tools.CLI;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.junit.Test;
-
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -42,11 +29,19 @@ import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.PrintStream;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import org.junit.Assert;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
+import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.apache.hadoop.mapreduce.tools.CLI;
+import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
/**
test CLI class. CLI class implemented the Tool interface.
@@ -101,7 +96,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
throw new IOException();
}
}
- @Test
+
public void testJobSubmissionSpecsAndFiles() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(),
@@ -125,7 +120,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
/**
* main test method
*/
- @Test
+
public void testJobClient() throws Exception {
Configuration conf = createJobConf();
Job job = runJob(conf);
@@ -178,7 +173,8 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out);
String answer = new String(out.toByteArray(), "UTF-8");
- assertTrue(answer.contains("Killed task " + taid + " by failing it"));
+ Assert
+ .assertTrue(answer.contains("Killed task " + taid + " by failing it"));
}
/**
@@ -196,7 +192,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out);
String answer = new String(out.toByteArray(), "UTF-8");
- assertTrue(answer.contains("Killed task " + taid));
+ Assert.assertTrue(answer.contains("Killed task " + taid));
}
/**
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java
index a69e06eacd9..1e4f4de9f93 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java
@@ -25,6 +25,8 @@ import java.io.Writer;
import java.util.Arrays;
import java.util.List;
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -40,16 +42,13 @@ import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.junit.Test;
-
-import static org.junit.Assert.assertTrue;
/**
* A JUnit test to test the Map-Reduce framework's feature to create part
* files only if there is an explicit output.collect. This helps in preventing
* 0 byte files
*/
-public class TestMapReduceLazyOutput {
+public class TestMapReduceLazyOutput extends TestCase {
private static final int NUM_HADOOP_SLAVES = 3;
private static final int NUM_MAPS_PER_NODE = 2;
private static final Path INPUT = new Path("/testlazy/input");
@@ -123,7 +122,7 @@ public class TestMapReduceLazyOutput {
}
}
- @Test
+
public void testLazyOutput() throws Exception {
MiniDFSCluster dfs = null;
MiniMRCluster mr = null;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java
index b757fb2c34f..5cf08991869 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java
@@ -27,6 +27,8 @@ import java.io.Writer;
import java.util.ArrayList;
import java.util.StringTokenizer;
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -41,15 +43,12 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.junit.Test;
-
-import static org.junit.Assert.assertTrue;
/**
* A JUnit test to test the Map-Reduce framework's support for the
* "mark-reset" functionality in Reduce Values Iterator
*/
-public class TestValueIterReset {
+public class TestValueIterReset extends TestCase {
private static final int NUM_MAPS = 1;
private static final int NUM_TESTS = 4;
private static final int NUM_VALUES = 40;
@@ -519,7 +518,6 @@ public class TestValueIterReset {
}
}
- @Test
public void testValueIterReset() {
try {
Configuration conf = new Configuration();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java
index 308b7775a67..4d84fa9e108 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.mapreduce;
-import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -27,6 +26,7 @@ import static org.mockito.Mockito.doNothing;
import java.io.IOException;
import java.nio.ByteBuffer;
+import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
@@ -44,7 +44,8 @@ import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.junit.Test;
-public class TestYarnClientProtocolProvider {
+public class TestYarnClientProtocolProvider extends TestCase {
+
private static final RecordFactory recordFactory = RecordFactoryProvider.
getRecordFactory(null);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
index 789ed98193e..f24dffe2655 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
@@ -18,24 +18,22 @@
package org.apache.hadoop.mapreduce.lib.aggregate;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.mapred.Utils;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.junit.Test;
+import junit.framework.TestCase;
+import java.io.*;
import java.text.NumberFormat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-public class TestMapReduceAggregates {
+public class TestMapReduceAggregates extends TestCase {
private static NumberFormat idFormat = NumberFormat.getInstance();
static {
@@ -43,7 +41,7 @@ public class TestMapReduceAggregates {
idFormat.setGroupingUsed(false);
}
- @Test
+
public void testAggregates() throws Exception {
launch();
}
@@ -124,4 +122,11 @@ public class TestMapReduceAggregates {
fs.delete(OUTPUT_DIR, true);
fs.delete(INPUT_DIR, true);
}
+
+ /**
+ * Launches all the tasks in order.
+ */
+ public static void main(String[] argv) throws Exception {
+ launch();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java
index 014855f7d6a..bff25d20038 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java
@@ -19,15 +19,14 @@ package org.apache.hadoop.mapreduce.lib.db;
import java.io.IOException;
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-
-public class TestDBOutputFormat {
+public class TestDBOutputFormat extends TestCase {
+
private String[] fieldNames = new String[] { "id", "name", "value" };
private String[] nullFieldNames = new String[] { null, null, null };
private String expected = "INSERT INTO hadoop_output " +
@@ -36,17 +35,15 @@ public class TestDBOutputFormat {
private DBOutputFormat format
= new DBOutputFormat();
-
- @Test
- public void testConstructQuery() {
+
+ public void testConstructQuery() {
String actual = format.constructQuery("hadoop_output", fieldNames);
assertEquals(expected, actual);
actual = format.constructQuery("hadoop_output", nullFieldNames);
assertEquals(nullExpected, actual);
}
-
- @Test
+
public void testSetOutput() throws IOException {
Job job = Job.getInstance(new Configuration());
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java
index 8b5d907dcdc..e50aba4f462 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java
@@ -17,15 +17,15 @@
*/
package org.apache.hadoop.mapreduce.lib.db;
-import org.junit.Test;
-
+import java.io.IOException;
+import java.math.BigDecimal;
import java.sql.SQLException;
+import java.util.ArrayList;
import java.util.List;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import junit.framework.TestCase;
-public class TestIntegerSplitter {
+public class TestIntegerSplitter extends TestCase {
private long [] toLongArray(List in) {
long [] out = new long[in.size()];
for (int i = 0; i < in.size(); i++) {
@@ -70,14 +70,12 @@ public class TestIntegerSplitter {
}
}
- @Test
public void testEvenSplits() throws SQLException {
List splits = new IntegerSplitter().split(10, 0, 100);
long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 };
assertLongArrayEquals(expected, toLongArray(splits));
}
- @Test
public void testOddSplits() throws SQLException {
List splits = new IntegerSplitter().split(10, 0, 95);
long [] expected = { 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 95 };
@@ -85,14 +83,12 @@ public class TestIntegerSplitter {
}
- @Test
public void testSingletonSplit() throws SQLException {
List splits = new IntegerSplitter().split(1, 5, 5);
long [] expected = { 5, 5 };
assertLongArrayEquals(expected, toLongArray(splits));
}
- @Test
public void testSingletonSplit2() throws SQLException {
// Same test, but overly-high numSplits
List splits = new IntegerSplitter().split(5, 5, 5);
@@ -100,7 +96,6 @@ public class TestIntegerSplitter {
assertLongArrayEquals(expected, toLongArray(splits));
}
- @Test
public void testTooManySplits() throws SQLException {
List splits = new IntegerSplitter().split(5, 3, 5);
long [] expected = { 3, 4, 5 };
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java
index e16f4234877..045e3a1b1f6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java
@@ -17,16 +17,15 @@
*/
package org.apache.hadoop.mapreduce.lib.db;
-import org.junit.Test;
-
+import java.io.IOException;
import java.math.BigDecimal;
import java.sql.SQLException;
+import java.util.ArrayList;
import java.util.List;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import junit.framework.TestCase;
-public class TestTextSplitter {
+public class TestTextSplitter extends TestCase {
public String formatArray(Object [] ar) {
StringBuilder sb = new StringBuilder();
@@ -63,56 +62,48 @@ public class TestTextSplitter {
}
}
- @Test
public void testStringConvertEmpty() {
TextSplitter splitter = new TextSplitter();
BigDecimal emptyBigDec = splitter.stringToBigDecimal("");
assertEquals(BigDecimal.ZERO, emptyBigDec);
}
- @Test
public void testBigDecConvertEmpty() {
TextSplitter splitter = new TextSplitter();
String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO);
assertEquals("", emptyStr);
}
- @Test
public void testConvertA() {
TextSplitter splitter = new TextSplitter();
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A"));
assertEquals("A", out);
}
- @Test
public void testConvertZ() {
TextSplitter splitter = new TextSplitter();
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z"));
assertEquals("Z", out);
}
- @Test
public void testConvertThreeChars() {
TextSplitter splitter = new TextSplitter();
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("abc"));
assertEquals("abc", out);
}
- @Test
public void testConvertStr() {
TextSplitter splitter = new TextSplitter();
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("big str"));
assertEquals("big str", out);
}
- @Test
public void testConvertChomped() {
TextSplitter splitter = new TextSplitter();
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("AVeryLongStringIndeed"));
assertEquals("AVeryLon", out);
}
- @Test
public void testAlphabetSplit() throws SQLException {
// This should give us 25 splits, one per letter.
TextSplitter splitter = new TextSplitter();
@@ -122,7 +113,6 @@ public class TestTextSplitter {
assertArrayEquals(expected, splits.toArray(new String [0]));
}
- @Test
public void testCommonPrefix() throws SQLException {
// Splits between 'Hand' and 'Hardy'
TextSplitter splitter = new TextSplitter();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java
index 6f9183ab21b..91070f89c42 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java
@@ -18,19 +18,15 @@
package org.apache.hadoop.mapreduce.lib.fieldsel;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
-import org.junit.Test;
+import junit.framework.TestCase;
import java.text.NumberFormat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-public class TestMRFieldSelection {
+public class TestMRFieldSelection extends TestCase {
private static NumberFormat idFormat = NumberFormat.getInstance();
static {
@@ -38,7 +34,6 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
idFormat.setGroupingUsed(false);
}
- @Test
public void testFieldSelection() throws Exception {
launch();
}
@@ -119,4 +114,11 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
System.out.println("ExpectedData:");
System.out.println(expectedOutput.toString());
}
+
+ /**
+ * Launches all the tasks in order.
+ */
+ public static void main(String[] argv) throws Exception {
+ launch();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java
index cbf9d183ef2..f0b3d57486c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java
@@ -18,12 +18,11 @@
package org.apache.hadoop.mapreduce.lib.input;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
+import java.io.IOException;
+import java.util.Random;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
@@ -32,18 +31,12 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
-import java.io.IOException;
-import java.util.Random;
+import junit.framework.TestCase;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-public class TestMRSequenceFileAsBinaryInputFormat {
+public class TestMRSequenceFileAsBinaryInputFormat extends TestCase {
private static final int RECORDS = 10000;
- @Test
public void testBinary() throws IOException, InterruptedException {
Job job = Job.getInstance();
FileSystem fs = FileSystem.getLocal(job.getConfiguration());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java
index 335ce050d82..2d03c2dd96a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.mapreduce.lib.input;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
+import java.util.*;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
@@ -33,19 +31,12 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
+import org.apache.hadoop.conf.*;
-import java.util.BitSet;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-
-public class TestMRSequenceFileAsTextInputFormat {
+public class TestMRSequenceFileAsTextInputFormat extends TestCase {
private static int MAX_LENGTH = 10000;
private static Configuration conf = new Configuration();
- @Test
public void testFormat() throws Exception {
Job job = Job.getInstance(conf);
FileSystem fs = FileSystem.getLocal(conf);
@@ -121,4 +112,8 @@ public class TestMRSequenceFileAsTextInputFormat {
}
}
+
+ public static void main(String[] args) throws Exception {
+ new TestMRSequenceFileAsTextInputFormat().testFormat();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java
index 89aa7b23057..edf7e1ad10d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java
@@ -18,14 +18,14 @@
package org.apache.hadoop.mapreduce.lib.input;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.*;
+
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
@@ -34,15 +34,10 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
+import org.apache.hadoop.conf.*;
-import java.io.IOException;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-
-public class TestMRSequenceFileInputFilter {
- private static final Log LOG =
+public class TestMRSequenceFileInputFilter extends TestCase {
+ private static final Log LOG =
LogFactory.getLog(TestMRSequenceFileInputFilter.class.getName());
private static final int MAX_LENGTH = 15000;
@@ -118,8 +113,7 @@ public class TestMRSequenceFileInputFilter {
}
return count;
}
-
- @Test
+
public void testRegexFilter() throws Exception {
// set the filter class
LOG.info("Testing Regex Filter with patter: \\A10*");
@@ -144,7 +138,6 @@ public class TestMRSequenceFileInputFilter {
fs.delete(inDir, true);
}
- @Test
public void testPercentFilter() throws Exception {
LOG.info("Testing Percent Filter with frequency: 1000");
// set the filter class
@@ -172,8 +165,7 @@ public class TestMRSequenceFileInputFilter {
// clean up
fs.delete(inDir, true);
}
-
- @Test
+
public void testMD5Filter() throws Exception {
// set the filter class
LOG.info("Testing MD5 Filter with frequency: 1000");
@@ -195,4 +187,9 @@ public class TestMRSequenceFileInputFilter {
// clean up
fs.delete(inDir, true);
}
+
+ public static void main(String[] args) throws Exception {
+ TestMRSequenceFileInputFilter filter = new TestMRSequenceFileInputFilter();
+ filter.testRegexFilter();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java
index 477866f4e35..7b3878d9475 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java
@@ -18,28 +18,17 @@
package org.apache.hadoop.mapreduce.lib.input;
+import java.io.*;
+import java.util.*;
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.MapContext;
-import org.apache.hadoop.mapreduce.MapReduceTestUtil;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-
-public class TestNLineInputFormat {
+public class TestNLineInputFormat extends TestCase {
private static int MAX_LENGTH = 200;
private static Configuration conf = new Configuration();
@@ -56,8 +45,7 @@ public class TestNLineInputFormat {
private static Path workDir =
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
"TestNLineInputFormat");
-
- @Test
+
public void testFormat() throws Exception {
Job job = Job.getInstance(conf);
Path file = new Path(workDir, "test.txt");
@@ -128,4 +116,8 @@ public class TestNLineInputFormat {
}
}
}
+
+ public static void main(String[] args) throws Exception {
+ new TestNLineInputFormat().testFormat();
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java
index 1173ea4fa47..d245bfd6cde 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java
@@ -19,6 +19,11 @@ package org.apache.hadoop.mapreduce.lib.join;
import java.io.IOException;
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+import junit.extensions.TestSetup;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
@@ -32,31 +37,23 @@ import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-public class TestJoinDatamerge {
+public class TestJoinDatamerge extends TestCase {
private static MiniDFSCluster cluster = null;
-
- @BeforeClass
- public static void setUp() throws Exception {
- Configuration conf = new Configuration();
- cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
- }
-
- @AfterClass
- public static void tearDown() throws Exception {
- if (cluster != null) {
- cluster.shutdown();
- }
+ public static Test suite() {
+ TestSetup setup = new TestSetup(new TestSuite(TestJoinDatamerge.class)) {
+ protected void setUp() throws Exception {
+ Configuration conf = new Configuration();
+ cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+ }
+ protected void tearDown() throws Exception {
+ if (cluster != null) {
+ cluster.shutdown();
+ }
+ }
+ };
+ return setup;
}
private static SequenceFile.Writer[] createWriters(Path testdir,
@@ -114,7 +111,7 @@ public class TestJoinDatamerge {
extends Mapper{
protected final static IntWritable one = new IntWritable(1);
int srcs;
-
+
public void setup(Context context) {
srcs = context.getConfiguration().getInt("testdatamerge.sources", 0);
assertTrue("Invalid src count: " + srcs, srcs > 0);
@@ -126,7 +123,7 @@ public class TestJoinDatamerge {
protected final static IntWritable one = new IntWritable(1);
int srcs;
-
+
public void setup(Context context) {
srcs = context.getConfiguration().getInt("testdatamerge.sources", 0);
assertTrue("Invalid src count: " + srcs, srcs > 0);
@@ -275,12 +272,10 @@ public class TestJoinDatamerge {
base.getFileSystem(conf).delete(base, true);
}
- @Test
public void testSimpleInnerJoin() throws Exception {
joinAs("inner", InnerJoinMapChecker.class, InnerJoinReduceChecker.class);
}
- @Test
public void testSimpleOuterJoin() throws Exception {
joinAs("outer", OuterJoinMapChecker.class, OuterJoinReduceChecker.class);
}
@@ -327,13 +322,11 @@ public class TestJoinDatamerge {
}
return product;
}
-
- @Test
+
public void testSimpleOverride() throws Exception {
joinAs("override", OverrideMapChecker.class, OverrideReduceChecker.class);
}
- @Test
public void testNestedJoin() throws Exception {
// outer(inner(S1,...,Sn),outer(S1,...Sn))
final int SOURCES = 3;
@@ -429,7 +422,6 @@ public class TestJoinDatamerge {
}
- @Test
public void testEmptyJoin() throws Exception {
Configuration conf = new Configuration();
Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java
index b6e76069d95..151bc875ad3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java
@@ -20,6 +20,11 @@ package org.apache.hadoop.mapreduce.lib.join;
import java.io.IOException;
import java.util.List;
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+import junit.extensions.TestSetup;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -31,14 +36,8 @@ import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-public class TestJoinProperties {
+public class TestJoinProperties extends TestCase {
private static MiniDFSCluster cluster = null;
final static int SOURCES = 3;
@@ -47,19 +46,21 @@ public class TestJoinProperties {
static Path[] src;
static Path base;
- @BeforeClass
- public static void setUp() throws Exception {
- Configuration conf = new Configuration();
- cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
- base = cluster.getFileSystem().makeQualified(new Path("/nested"));
- src = generateSources(conf);
- }
-
- @AfterClass
- public static void tearDown() throws Exception {
- if (cluster != null) {
- cluster.shutdown();
- }
+ public static Test suite() {
+ TestSetup setup = new TestSetup(new TestSuite(TestJoinProperties.class)) {
+ protected void setUp() throws Exception {
+ Configuration conf = new Configuration();
+ cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+ base = cluster.getFileSystem().makeQualified(new Path("/nested"));
+ src = generateSources(conf);
+ }
+ protected void tearDown() throws Exception {
+ if (cluster != null) {
+ cluster.shutdown();
+ }
+ }
+ };
+ return setup;
}
// Sources from 0 to srcs-2 have IntWritable key and IntWritable value
@@ -232,7 +233,6 @@ public class TestJoinProperties {
}
// outer(outer(A, B), C) == outer(A,outer(B, C)) == outer(A, B, C)
- @Test
public void testOuterAssociativity() throws Exception {
Configuration conf = new Configuration();
testExpr1(conf, "outer", TestType.OUTER_ASSOCIATIVITY, 33);
@@ -241,7 +241,6 @@ public class TestJoinProperties {
}
// inner(inner(A, B), C) == inner(A,inner(B, C)) == inner(A, B, C)
- @Test
public void testInnerAssociativity() throws Exception {
Configuration conf = new Configuration();
testExpr1(conf, "inner", TestType.INNER_ASSOCIATIVITY, 2);
@@ -250,7 +249,6 @@ public class TestJoinProperties {
}
// override(inner(A, B), A) == A
- @Test
public void testIdentity() throws Exception {
Configuration conf = new Configuration();
testExpr4(conf);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java
index 093da266b95..d35941fc884 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java
@@ -24,6 +24,8 @@ import java.io.DataOutputStream;
import java.util.Arrays;
import java.util.Random;
+import junit.framework.TestCase;
+
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.FloatWritable;
@@ -31,13 +33,8 @@ import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-public class TestJoinTupleWritable {
+public class TestJoinTupleWritable extends TestCase {
private TupleWritable makeTuple(Writable[] writs) {
Writable[] sub1 = { writs[1], writs[2] };
@@ -100,7 +97,6 @@ public class TestJoinTupleWritable {
return i;
}
- @Test
public void testIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
@@ -122,7 +118,6 @@ public class TestJoinTupleWritable {
verifIter(writs, t, 0);
}
- @Test
public void testNestedIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
@@ -141,7 +136,6 @@ public class TestJoinTupleWritable {
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
- @Test
public void testWritable() throws Exception {
Random r = new Random();
Writable[] writs = {
@@ -165,7 +159,6 @@ public class TestJoinTupleWritable {
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
- @Test
public void testWideWritable() throws Exception {
Writable[] manyWrits = makeRandomWritables(131);
@@ -185,8 +178,7 @@ public class TestJoinTupleWritable {
assertEquals("All tuple data has not been read from the stream",
-1, in.read());
}
-
- @Test
+
public void testWideWritable2() throws Exception {
Writable[] manyWrits = makeRandomWritables(71);
@@ -209,7 +201,6 @@ public class TestJoinTupleWritable {
* Tests a tuple writable with more than 64 values and the values set written
* spread far apart.
*/
- @Test
public void testSparseWideWritable() throws Exception {
Writable[] manyWrits = makeRandomWritables(131);
@@ -229,8 +220,7 @@ public class TestJoinTupleWritable {
assertEquals("All tuple data has not been read from the stream",
-1, in.read());
}
-
- @Test
+
public void testWideTuple() throws Exception {
Text emptyText = new Text("Should be empty");
Writable[] values = new Writable[64];
@@ -251,8 +241,7 @@ public class TestJoinTupleWritable {
}
}
}
-
- @Test
+
public void testWideTuple2() throws Exception {
Text emptyText = new Text("Should be empty");
Writable[] values = new Writable[64];
@@ -277,7 +266,6 @@ public class TestJoinTupleWritable {
/**
* Tests that we can write more than 64 values.
*/
- @Test
public void testWideTupleBoundary() throws Exception {
Text emptyText = new Text("Should not be set written");
Writable[] values = new Writable[65];
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
index 680e246b4e3..36cf1872ad4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
@@ -17,32 +17,23 @@
*/
package org.apache.hadoop.mapreduce.lib.join;
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.MapReduceTestUtil;
+import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-
-public class TestWrappedRRClassloader {
+public class TestWrappedRRClassloader extends TestCase {
/**
* Tests the class loader set by
* {@link Configuration#setClassLoader(ClassLoader)}
* is inherited by any {@link WrappedRecordReader}s created by
* {@link CompositeRecordReader}
*/
- @Test
public void testClassLoader() throws Exception {
Configuration conf = new Configuration();
Fake_ClassLoader classLoader = new Fake_ClassLoader();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
index 5a8aeda83be..2e40f72fdd2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
@@ -18,17 +18,12 @@
package org.apache.hadoop.mapreduce.lib.output;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
+import java.util.Random;
+
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.DoubleWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.*;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.InvalidJobConfException;
import org.apache.hadoop.mapreduce.InputFormat;
@@ -43,22 +38,16 @@ import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
-import org.junit.Test;
-import java.io.IOException;
-import java.util.Random;
+import junit.framework.TestCase;
+import org.apache.commons.logging.*;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-public class TestMRSequenceFileAsBinaryOutputFormat {
+public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName());
private static final int RECORDS = 10000;
-
- @Test
+
public void testBinary() throws IOException, InterruptedException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
@@ -155,8 +144,7 @@ public class TestMRSequenceFileAsBinaryOutputFormat {
assertEquals("Some records not found", RECORDS, count);
}
- @Test
- public void testSequenceOutputClassDefaultsToMapRedOutputClass()
+ public void testSequenceOutputClassDefaultsToMapRedOutputClass()
throws IOException {
Job job = Job.getInstance();
// Setting Random class to test getSequenceFileOutput{Key,Value}Class
@@ -184,8 +172,7 @@ public class TestMRSequenceFileAsBinaryOutputFormat {
SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job));
}
- @Test
- public void testcheckOutputSpecsForbidRecordCompression()
+ public void testcheckOutputSpecsForbidRecordCompression()
throws IOException {
Job job = Job.getInstance();
FileSystem fs = FileSystem.getLocal(job.getConfiguration());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java
index f83bc11a216..7be538ecf41 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java
@@ -22,14 +22,11 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.BinaryComparable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
-public class TestBinaryPartitioner {
+public class TestBinaryPartitioner extends TestCase {
- @Test
public void testDefaultOffsets() {
Configuration conf = new Configuration();
BinaryPartitioner> partitioner =
@@ -53,8 +50,7 @@ public class TestBinaryPartitioner {
partition2 = partitioner.getPartition(key2, null, 10);
assertTrue(partition1 != partition2);
}
-
- @Test
+
public void testCustomOffsets() {
Configuration conf = new Configuration();
BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 });
@@ -79,8 +75,7 @@ public class TestBinaryPartitioner {
partition2 = partitioner.getPartition(key2, null, 10);
assertEquals(partition1, partition2);
}
-
- @Test
+
public void testLowerBound() {
Configuration conf = new Configuration();
BinaryPartitioner.setLeftOffset(conf, 0);
@@ -92,8 +87,7 @@ public class TestBinaryPartitioner {
int partition2 = partitioner.getPartition(key2, null, 10);
assertTrue(partition1 != partition2);
}
-
- @Test
+
public void testUpperBound() {
Configuration conf = new Configuration();
BinaryPartitioner.setRightOffset(conf, 4);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java
index 4d05d13d445..6bad846f6d3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java
@@ -19,17 +19,14 @@ package org.apache.hadoop.mapreduce.lib.partition;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import junit.framework.TestCase;
-public class TestKeyFieldHelper {
+public class TestKeyFieldHelper extends TestCase {
private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class);
/**
* Test is key-field-helper's parse option.
*/
- @Test
public void testparseOption() throws Exception {
KeyFieldHelper helper = new KeyFieldHelper();
helper.setKeyFieldSeparator("\t");
@@ -215,7 +212,6 @@ public class TestKeyFieldHelper {
/**
* Test is key-field-helper's getWordLengths.
*/
- @Test
public void testGetWordLengths() throws Exception {
KeyFieldHelper helper = new KeyFieldHelper();
helper.setKeyFieldSeparator("\t");
@@ -274,7 +270,6 @@ public class TestKeyFieldHelper {
/**
* Test is key-field-helper's getStartOffset/getEndOffset.
*/
- @Test
public void testgetStartEndOffset() throws Exception {
KeyFieldHelper helper = new KeyFieldHelper();
helper.setKeyFieldSeparator("\t");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
index 00b415f32cb..9c2fb48d9bf 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
@@ -19,16 +19,14 @@ package org.apache.hadoop.mapreduce.lib.partition;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import junit.framework.TestCase;
-public class TestMRKeyFieldBasedPartitioner {
+public class TestMRKeyFieldBasedPartitioner extends TestCase {
/**
* Test is key-field-based partitioned works with empty key.
*/
- @Test
public void testEmptyKey() throws Exception {
int numReducers = 10;
KeyFieldBasedPartitioner kfbp =
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
index cc217da68fe..a3cd18c4baa 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
@@ -22,6 +22,8 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -33,11 +35,8 @@ import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-
-public class TestTotalOrderPartitioner {
+public class TestTotalOrderPartitioner extends TestCase {
private static final Text[] splitStrings = new Text[] {
// -inf // 0
@@ -100,7 +99,6 @@ public class TestTotalOrderPartitioner {
return p;
}
- @Test
public void testTotalOrderMemCmp() throws Exception {
TotalOrderPartitioner partitioner =
new TotalOrderPartitioner();
@@ -120,7 +118,6 @@ public class TestTotalOrderPartitioner {
}
}
- @Test
public void testTotalOrderBinarySearch() throws Exception {
TotalOrderPartitioner partitioner =
new TotalOrderPartitioner();
@@ -153,7 +150,6 @@ public class TestTotalOrderPartitioner {
}
}
- @Test
public void testTotalOrderCustomComparator() throws Exception {
TotalOrderPartitioner partitioner =
new TotalOrderPartitioner();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java
index 07b5d8b9f50..e1849a3ce9c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.util;
import java.io.File;
import java.io.IOException;
+import junit.framework.TestCase;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -28,27 +30,20 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
-import org.junit.Before;
import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
/**
* A test for MRAsyncDiskService.
*/
-public class TestMRAsyncDiskService {
+public class TestMRAsyncDiskService extends TestCase {
public static final Log LOG = LogFactory.getLog(TestMRAsyncDiskService.class);
private static String TEST_ROOT_DIR = new Path(System.getProperty(
"test.build.data", "/tmp")).toString();
- @Before
- public void setUp() {
+ @Override
+ protected void setUp() {
FileUtil.fullyDelete(new File(TEST_ROOT_DIR));
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java
index f68cc8310a6..aa769f85974 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.v2;
+import junit.framework.TestCase;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
@@ -28,25 +29,22 @@ import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import java.net.InetAddress;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
-import java.net.InetAddress;
import java.security.PrivilegedExceptionAction;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-public class TestMiniMRProxyUser {
+public class TestMiniMRProxyUser extends TestCase {
private MiniDFSCluster dfsCluster = null;
private MiniMRCluster mrCluster = null;
-
- @Before
- public void setUp() throws Exception {
+
+ protected void setUp() throws Exception {
+ super.setUp();
if (System.getProperty("hadoop.log.dir") == null) {
System.setProperty("hadoop.log.dir", "/tmp");
}
@@ -93,14 +91,15 @@ public class TestMiniMRProxyUser {
return mrCluster.createJobConf();
}
- @After
- public void tearDown() throws Exception {
+ @Override
+ protected void tearDown() throws Exception {
if (mrCluster != null) {
mrCluster.shutdown();
}
if (dfsCluster != null) {
dfsCluster.shutdown();
}
+ super.tearDown();
}
private void mrRun() throws Exception {
@@ -126,13 +125,11 @@ public class TestMiniMRProxyUser {
assertTrue(runJob.isComplete());
assertTrue(runJob.isSuccessful());
}
-
- @Test
+
public void __testCurrentUser() throws Exception {
mrRun();
}
- @Test
public void testValidProxyUser() throws Exception {
UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", UserGroupInformation.getLoginUser());
ugi.doAs(new PrivilegedExceptionAction() {
@@ -145,7 +142,6 @@ public class TestMiniMRProxyUser {
});
}
- @Test
public void ___testInvalidProxyUser() throws Exception {
UserGroupInformation ugi = UserGroupInformation.createProxyUser("u2", UserGroupInformation.getLoginUser());
ugi.doAs(new PrivilegedExceptionAction() {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java
index e90c509d7a8..b6947f3fc48 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.v2;
+import junit.framework.TestCase;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
@@ -27,22 +28,17 @@ import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
import java.io.IOException;
import java.net.InetAddress;
-import static org.junit.Assert.assertNull;
-
-public class TestNonExistentJob {
+public class TestNonExistentJob extends TestCase {
private MiniDFSCluster dfsCluster = null;
private MiniMRCluster mrCluster = null;
- @Before
- public void setUp() throws Exception {
+ protected void setUp() throws Exception {
+ super.setUp();
if (System.getProperty("hadoop.log.dir") == null) {
System.setProperty("hadoop.log.dir", "/tmp");
}
@@ -82,17 +78,17 @@ public class TestNonExistentJob {
return mrCluster.createJobConf();
}
- @After
- public void tearDown() throws Exception {
+ @Override
+ protected void tearDown() throws Exception {
if (mrCluster != null) {
mrCluster.shutdown();
}
if (dfsCluster != null) {
dfsCluster.shutdown();
}
+ super.tearDown();
}
- @Test
public void testGetInvalidJob() throws Exception {
RunningJob runJob = new JobClient(getJobConf()).getJob(JobID.forName("job_0_0"));
assertNull(runJob);
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
index 860fb89cfcf..7b7901faad1 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
@@ -42,11 +42,6 @@ import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.SkipBadRecords;
import org.apache.hadoop.mapred.Utils;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
public class TestStreamingBadRecords extends ClusterMapReduceTestCase
{
@@ -73,8 +68,7 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
utilTest.redirectIfAntJunit();
}
- @Before
- public void setUp() throws Exception {
+ protected void setUp() throws Exception {
Properties props = new Properties();
props.setProperty(JTConfig.JT_RETIREJOBS, "false");
props.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "false");
@@ -248,7 +242,6 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
}
*/
- @Test
public void testNoOp() {
// Added to avoid warnings when running this disabled test
}