Revert "MAPREDUCE-6520. Migrate MR Client test cases part 1."

This reverts commit 70e232f36e.
This commit is contained in:
Akira Ajisaka 2016-03-11 22:55:01 +09:00
parent 70e232f36e
commit 097a03fb6d
25 changed files with 68 additions and 286 deletions

View File

@ -17,30 +17,17 @@
*/ */
package org.apache.hadoop.conf; package org.apache.hadoop.conf;
import org.apache.hadoop.fs.FileUtil; import org.junit.Assert;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.Utils;
import org.junit.Test;
import java.io.BufferedReader; import java.io.*;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/** /**
* This testcase tests that a JobConf without default values submits jobs * This testcase tests that a JobConf without default values submits jobs
@ -53,7 +40,6 @@ public TestNoDefaultsJobConf() throws IOException {
super(HadoopTestCase.CLUSTER_MR, HadoopTestCase.DFS_FS, 1, 1); super(HadoopTestCase.CLUSTER_MR, HadoopTestCase.DFS_FS, 1, 1);
} }
@Test
public void testNoDefaults() throws Exception { public void testNoDefaults() throws Exception {
JobConf configuration = new JobConf(); JobConf configuration = new JobConf();
assertTrue(configuration.get("hadoop.tmp.dir", null) != null); assertTrue(configuration.get("hadoop.tmp.dir", null) != null);

View File

@ -18,12 +18,13 @@
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import org.apache.hadoop.fs.FileSystem; import junit.framework.TestCase;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.junit.After;
import org.junit.Before;
import java.io.File;
import java.io.IOException; import java.io.IOException;
/** /**
@ -37,7 +38,7 @@
* Job Configurations should be created using a configuration returned by the * Job Configurations should be created using a configuration returned by the
* 'createJobConf()' method. * 'createJobConf()' method.
*/ */
public abstract class HadoopTestCase { public abstract class HadoopTestCase extends TestCase {
public static final int LOCAL_MR = 1; public static final int LOCAL_MR = 1;
public static final int CLUSTER_MR = 2; public static final int CLUSTER_MR = 2;
public static final int LOCAL_FS = 4; public static final int LOCAL_FS = 4;
@ -139,8 +140,8 @@ public boolean isLocalFS() {
* *
* @throws Exception * @throws Exception
*/ */
@Before protected void setUp() throws Exception {
public void setUp() throws Exception { super.setUp();
if (localFS) { if (localFS) {
fileSystem = FileSystem.getLocal(new JobConf()); fileSystem = FileSystem.getLocal(new JobConf());
} }
@ -163,8 +164,7 @@ public void setUp() throws Exception {
* *
* @throws Exception * @throws Exception
*/ */
@After protected void tearDown() throws Exception {
public void tearDown() throws Exception {
try { try {
if (mrCluster != null) { if (mrCluster != null) {
mrCluster.shutdown(); mrCluster.shutdown();
@ -181,6 +181,7 @@ public void tearDown() throws Exception {
catch (Exception ex) { catch (Exception ex) {
System.out.println(ex); System.out.println(ex);
} }
super.tearDown();
} }
/** /**

View File

@ -34,13 +34,6 @@
import java.io.IOException; import java.io.IOException;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.After;
import org.junit.Test;
/** /**
* Base class to test Job end notification in local and cluster mode. * Base class to test Job end notification in local and cluster mode.
* *
@ -147,19 +140,17 @@ protected JobConf createJobConf() {
return conf; return conf;
} }
@Before
public void setUp() throws Exception { protected void setUp() throws Exception {
super.setUp(); super.setUp();
startHttpServer(); startHttpServer();
} }
@After protected void tearDown() throws Exception {
public void tearDown() throws Exception {
stopHttpServer(); stopHttpServer();
super.tearDown(); super.tearDown();
} }
@Test
public void testMR() throws Exception { public void testMR() throws Exception {
System.out.println(launchWordCount(this.createJobConf(), System.out.println(launchWordCount(this.createJobConf(),

View File

@ -30,16 +30,12 @@
import java.io.OutputStream; import java.io.OutputStream;
import java.util.Iterator; import java.util.Iterator;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
public class TestFileOutputFormat extends HadoopTestCase { public class TestFileOutputFormat extends HadoopTestCase {
public TestFileOutputFormat() throws IOException { public TestFileOutputFormat() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
} }
@Test
public void testCustomFile() throws Exception { public void testCustomFile() throws Exception {
Path inDir = new Path("testing/fileoutputformat/input"); Path inDir = new Path("testing/fileoutputformat/input");
Path outDir = new Path("testing/fileoutputformat/output"); Path outDir = new Path("testing/fileoutputformat/output");

View File

@ -17,6 +17,9 @@
*/ */
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -24,16 +27,6 @@
import org.apache.hadoop.ipc.ProtocolSignature; import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.mapred.SortedRanges.Range; import org.apache.hadoop.mapred.SortedRanges.Range;
import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.TaskType;
import org.junit.After;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestTaskCommit extends HadoopTestCase { public class TestTaskCommit extends HadoopTestCase {
Path rootDir = Path rootDir =
@ -83,13 +76,12 @@ public TestTaskCommit() throws IOException {
super(LOCAL_MR, LOCAL_FS, 1, 1); super(LOCAL_MR, LOCAL_FS, 1, 1);
} }
@After @Override
public void tearDown() throws Exception { public void tearDown() throws Exception {
super.tearDown(); super.tearDown();
FileUtil.fullyDelete(new File(rootDir.toString())); FileUtil.fullyDelete(new File(rootDir.toString()));
} }
@Test
public void testCommitFail() throws IOException { public void testCommitFail() throws IOException {
final Path inDir = new Path(rootDir, "./input"); final Path inDir = new Path(rootDir, "./input");
final Path outDir = new Path(rootDir, "./output"); final Path outDir = new Path(rootDir, "./output");
@ -190,7 +182,6 @@ public ProtocolSignature getProtocolSignature(String protocol,
* *
* @throws Exception * @throws Exception
*/ */
@Test
public void testTaskCleanupDoesNotCommit() throws Exception { public void testTaskCleanupDoesNotCommit() throws Exception {
// Mimic a job with a special committer that does not cleanup // Mimic a job with a special committer that does not cleanup
// files when a task fails. // files when a task fails.
@ -237,27 +228,23 @@ public void testTaskCleanupDoesNotCommit() throws Exception {
assertTrue("Task did not succeed", umbilical.taskDone); assertTrue("Task did not succeed", umbilical.taskDone);
} }
@Test
public void testCommitRequiredForMapTask() throws Exception { public void testCommitRequiredForMapTask() throws Exception {
Task testTask = createDummyTask(TaskType.MAP); Task testTask = createDummyTask(TaskType.MAP);
assertTrue("MapTask should need commit", testTask.isCommitRequired()); assertTrue("MapTask should need commit", testTask.isCommitRequired());
} }
@Test
public void testCommitRequiredForReduceTask() throws Exception { public void testCommitRequiredForReduceTask() throws Exception {
Task testTask = createDummyTask(TaskType.REDUCE); Task testTask = createDummyTask(TaskType.REDUCE);
assertTrue("ReduceTask should need commit", testTask.isCommitRequired()); assertTrue("ReduceTask should need commit", testTask.isCommitRequired());
} }
@Test
public void testCommitNotRequiredForJobSetup() throws Exception { public void testCommitNotRequiredForJobSetup() throws Exception {
Task testTask = createDummyTask(TaskType.MAP); Task testTask = createDummyTask(TaskType.MAP);
testTask.setJobSetupTask(); testTask.setJobSetupTask();
assertFalse("Job setup task should not need commit", assertFalse("Job setup task should not need commit",
testTask.isCommitRequired()); testTask.isCommitRequired());
} }
@Test
public void testCommitNotRequiredForJobCleanup() throws Exception { public void testCommitNotRequiredForJobCleanup() throws Exception {
Task testTask = createDummyTask(TaskType.MAP); Task testTask = createDummyTask(TaskType.MAP);
testTask.setJobCleanupTask(); testTask.setJobCleanupTask();
@ -265,7 +252,6 @@ public void testCommitNotRequiredForJobCleanup() throws Exception {
testTask.isCommitRequired()); testTask.isCommitRequired());
} }
@Test
public void testCommitNotRequiredForTaskCleanup() throws Exception { public void testCommitNotRequiredForTaskCleanup() throws Exception {
Task testTask = createDummyTask(TaskType.REDUCE); Task testTask = createDummyTask(TaskType.REDUCE);
testTask.setTaskCleanupTask(); testTask.setTaskCleanupTask();

View File

@ -27,8 +27,6 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/** /**
* HadoopTestCase that tests the local job runner. * HadoopTestCase that tests the local job runner.
@ -61,7 +59,6 @@ public TestLocalJobControl() throws IOException {
* object. Finally, it creates a thread to run the JobControl object and * object. Finally, it creates a thread to run the JobControl object and
* monitors/reports the job states. * monitors/reports the job states.
*/ */
@Test
public void testLocalJobControlDataCopy() throws Exception { public void testLocalJobControlDataCopy() throws Exception {
FileSystem fs = FileSystem.get(createJobConf()); FileSystem fs = FileSystem.get(createJobConf());

View File

@ -21,29 +21,12 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.junit.Test;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestChainMapReduce extends HadoopTestCase { public class TestChainMapReduce extends HadoopTestCase {
private static Path getFlagDir(boolean local) { private static Path getFlagDir(boolean local) {
@ -84,7 +67,6 @@ public TestChainMapReduce() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
} }
@Test
public void testChain() throws Exception { public void testChain() throws Exception {
Path inDir = new Path("testing/chain/input"); Path inDir = new Path("testing/chain/input");
Path outDir = new Path("testing/chain/output"); Path outDir = new Path("testing/chain/output");

View File

@ -36,8 +36,6 @@
import org.apache.hadoop.mapred.Utils; import org.apache.hadoop.mapred.Utils;
import org.junit.After; import org.junit.After;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;

View File

@ -17,6 +17,11 @@
*/ */
package org.apache.hadoop.mapred.lib; package org.apache.hadoop.mapred.lib;
import java.io.IOException;
import java.util.Map;
import junit.framework.TestCase;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
@ -25,19 +30,12 @@
import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextInputFormat;
import org.junit.Test;
import java.io.IOException;
import java.util.Map;
import static org.junit.Assert.assertEquals;
/** /**
* @see TestDelegatingInputFormat * @see TestDelegatingInputFormat
*/ */
public class TestMultipleInputs { public class TestMultipleInputs extends TestCase {
@Test
public void testAddInputPathWithFormat() { public void testAddInputPathWithFormat() {
final JobConf conf = new JobConf(); final JobConf conf = new JobConf();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class); MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class);
@ -50,7 +48,6 @@ public void testAddInputPathWithFormat() {
.getClass()); .getClass());
} }
@Test
public void testAddInputPathWithMapper() { public void testAddInputPathWithMapper() {
final JobConf conf = new JobConf(); final JobConf conf = new JobConf();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class, MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,

View File

@ -24,23 +24,7 @@
import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.io.serializer.JavaSerializationComparator;
import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.DataOutputStream; import java.io.DataOutputStream;
@ -48,23 +32,17 @@
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.util.Iterator; import java.util.Iterator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestMultipleOutputs extends HadoopTestCase { public class TestMultipleOutputs extends HadoopTestCase {
public TestMultipleOutputs() throws IOException { public TestMultipleOutputs() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
} }
@Test
public void testWithoutCounters() throws Exception { public void testWithoutCounters() throws Exception {
_testMultipleOutputs(false); _testMultipleOutputs(false);
_testMOWithJavaSerialization(false); _testMOWithJavaSerialization(false);
} }
@Test
public void testWithCounters() throws Exception { public void testWithCounters() throws Exception {
_testMultipleOutputs(true); _testMultipleOutputs(true);
_testMOWithJavaSerialization(true); _testMOWithJavaSerialization(true);
@ -84,7 +62,6 @@ private Path getDir(Path dir) {
return dir; return dir;
} }
@Before
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
Path rootDir = getDir(ROOT_DIR); Path rootDir = getDir(ROOT_DIR);
@ -98,7 +75,6 @@ public void setUp() throws Exception {
} }
} }
@After
public void tearDown() throws Exception { public void tearDown() throws Exception {
Path rootDir = getDir(ROOT_DIR); Path rootDir = getDir(ROOT_DIR);

View File

@ -22,45 +22,26 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper; import org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper;
import org.junit.Test;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestMultithreadedMapRunner extends HadoopTestCase { public class TestMultithreadedMapRunner extends HadoopTestCase {
public TestMultithreadedMapRunner() throws IOException { public TestMultithreadedMapRunner() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
} }
@Test
public void testOKRun() throws Exception { public void testOKRun() throws Exception {
run(false, false); run(false, false);
} }
@Test
public void testIOExRun() throws Exception { public void testIOExRun() throws Exception {
run(true, false); run(true, false);
} }
@Test
public void testRuntimeExRun() throws Exception { public void testRuntimeExRun() throws Exception {
run(false, true); run(false, true);
} }

View File

@ -30,13 +30,6 @@
import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class TestChild extends HadoopTestCase { public class TestChild extends HadoopTestCase {
private static String TEST_ROOT_DIR = private static String TEST_ROOT_DIR =
@ -152,8 +145,7 @@ private Job submitAndValidateJob(JobConf conf, int numMaps, int numReds,
+ numPartFiles, list.length == numPartFiles); + numPartFiles, list.length == numPartFiles);
return job; return job;
} }
@Test
public void testChild() throws Exception { public void testChild() throws Exception {
try { try {
submitAndValidateJob(createJobConf(), 1, 1, true); submitAndValidateJob(createJobConf(), 1, 1, true);

View File

@ -30,9 +30,6 @@
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
@Ignore @Ignore
public class TestNoJobSetupCleanup extends HadoopTestCase { public class TestNoJobSetupCleanup extends HadoopTestCase {
private static String TEST_ROOT_DIR = private static String TEST_ROOT_DIR =
@ -71,8 +68,7 @@ private Job submitAndValidateJob(Configuration conf, int numMaps, int numReds)
+ numPartFiles, list.length == numPartFiles); + numPartFiles, list.length == numPartFiles);
return job; return job;
} }
@Test
public void testNoJobSetupCleanup() throws Exception { public void testNoJobSetupCleanup() throws Exception {
try { try {
Configuration conf = createJobConf(); Configuration conf = createJobConf();

View File

@ -33,8 +33,6 @@
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/** /**
* Tests context api and {@link StatusReporter#getProgress()} via * Tests context api and {@link StatusReporter#getProgress()} via

View File

@ -29,10 +29,6 @@
import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.Reducer;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
/** /**
* Tests error conditions in ChainMapper/ChainReducer. * Tests error conditions in ChainMapper/ChainReducer.
@ -55,7 +51,6 @@ public TestChainErrors() throws IOException {
* *
* @throws Exception * @throws Exception
*/ */
@Test
public void testChainSubmission() throws Exception { public void testChainSubmission() throws Exception {
Configuration conf = createJobConf(); Configuration conf = createJobConf();
@ -94,7 +89,6 @@ public void testChainSubmission() throws Exception {
* *
* @throws Exception * @throws Exception
*/ */
@Test
public void testChainFail() throws Exception { public void testChainFail() throws Exception {
Configuration conf = createJobConf(); Configuration conf = createJobConf();
@ -120,7 +114,6 @@ public void testChainFail() throws Exception {
* *
* @throws Exception * @throws Exception
*/ */
@Test
public void testReducerFail() throws Exception { public void testReducerFail() throws Exception {
Configuration conf = createJobConf(); Configuration conf = createJobConf();
@ -146,7 +139,6 @@ public void testReducerFail() throws Exception {
* *
* @throws Exception * @throws Exception
*/ */
@Test
public void testChainMapNoOuptut() throws Exception { public void testChainMapNoOuptut() throws Exception {
Configuration conf = createJobConf(); Configuration conf = createJobConf();
String expectedOutput = ""; String expectedOutput = "";
@ -171,7 +163,6 @@ public void testChainMapNoOuptut() throws Exception {
* *
* @throws Exception * @throws Exception
*/ */
@Test
public void testChainReduceNoOuptut() throws Exception { public void testChainReduceNoOuptut() throws Exception {
Configuration conf = createJobConf(); Configuration conf = createJobConf();
String expectedOutput = ""; String expectedOutput = "";

View File

@ -30,11 +30,6 @@
import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.Reducer;
import org.junit.Test;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class TestMapReduceChain extends HadoopTestCase { public class TestMapReduceChain extends HadoopTestCase {
@ -68,7 +63,6 @@ public TestMapReduceChain() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
} }
@Test
public void testChain() throws Exception { public void testChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input"); Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output"); Path outDir = new Path(localPathRoot, "testing/chain/output");

View File

@ -26,9 +26,6 @@
import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper; import org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper;
import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer; import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
@ -45,7 +42,6 @@ public TestSingleElementChain() throws IOException {
} }
// test chain mapper and reducer by adding single mapper and reducer to chain // test chain mapper and reducer by adding single mapper and reducer to chain
@Test
public void testNoChain() throws Exception { public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input"); Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output"); Path outDir = new Path(localPathRoot, "testing/chain/output");

View File

@ -18,40 +18,25 @@
package org.apache.hadoop.mapreduce.lib.db; package org.apache.hadoop.mapreduce.lib.db;
import org.apache.commons.logging.Log; import java.sql.*;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.StringUtils;
import org.hsqldb.server.Server;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
//import org.apache.hadoop.examples.DBCountPageView; //import org.apache.hadoop.examples.DBCountPageView;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.db.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.util.StringUtils;
import org.hsqldb.server.Server;
/** /**
* Test aspects of DataDrivenDBInputFormat * Test aspects of DataDrivenDBInputFormat
@ -124,13 +109,11 @@ private void initialize(String driverClassName, String url)
createConnection(driverClassName, url); createConnection(driverClassName, url);
} }
@Before
public void setUp() throws Exception { public void setUp() throws Exception {
initialize(DRIVER_CLASS, DB_URL); initialize(DRIVER_CLASS, DB_URL);
super.setUp(); super.setUp();
} }
@After
public void tearDown() throws Exception { public void tearDown() throws Exception {
super.tearDown(); super.tearDown();
shutdown(); shutdown();
@ -187,7 +170,6 @@ public void map(Object k, Object v, Context c)
} }
} }
@Test
public void testDateSplits() throws Exception { public void testDateSplits() throws Exception {
Statement s = connection.createStatement(); Statement s = connection.createStatement();
final String DATE_TABLE = "datetable"; final String DATE_TABLE = "datetable";

View File

@ -38,8 +38,6 @@
import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.Reducer;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/** /**
* @see TestDelegatingInputFormat * @see TestDelegatingInputFormat
@ -141,7 +139,7 @@ public void testDoMultipleInputs() throws IOException {
assertTrue(output.readLine().equals("e 2")); assertTrue(output.readLine().equals("e 2"));
} }
@Test @SuppressWarnings("unchecked")
public void testAddInputPathWithFormat() throws IOException { public void testAddInputPathWithFormat() throws IOException {
final Job conf = Job.getInstance(); final Job conf = Job.getInstance();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class); MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class);
@ -154,7 +152,7 @@ public void testAddInputPathWithFormat() throws IOException {
.getClass()); .getClass());
} }
@Test @SuppressWarnings("unchecked")
public void testAddInputPathWithMapper() throws IOException { public void testAddInputPathWithMapper() throws IOException {
final Job conf = Job.getInstance(); final Job conf = Job.getInstance();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class, MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,

View File

@ -33,9 +33,6 @@
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
/** /**
* This class performs unit test for Job/JobControl classes. * This class performs unit test for Job/JobControl classes.
@ -123,8 +120,7 @@ private void waitTillAllFinished(JobControl theControl) {
} catch (Exception e) {} } catch (Exception e) {}
} }
} }
@Test
public void testJobControlWithFailJob() throws Exception { public void testJobControlWithFailJob() throws Exception {
LOG.info("Starting testJobControlWithFailJob"); LOG.info("Starting testJobControlWithFailJob");
Configuration conf = createJobConf(); Configuration conf = createJobConf();
@ -148,7 +144,6 @@ public void testJobControlWithFailJob() throws Exception {
theControl.stop(); theControl.stop();
} }
@Test
public void testJobControlWithKillJob() throws Exception { public void testJobControlWithKillJob() throws Exception {
LOG.info("Starting testJobControlWithKillJob"); LOG.info("Starting testJobControlWithKillJob");
@ -187,7 +182,6 @@ public void testJobControlWithKillJob() throws Exception {
theControl.stop(); theControl.stop();
} }
@Test
public void testJobControl() throws Exception { public void testJobControl() throws Exception {
LOG.info("Starting testJobControl"); LOG.info("Starting testJobControl");

View File

@ -23,33 +23,23 @@
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestMultithreadedMapper extends HadoopTestCase { public class TestMultithreadedMapper extends HadoopTestCase {
public TestMultithreadedMapper() throws IOException { public TestMultithreadedMapper() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
} }
@Test
public void testOKRun() throws Exception { public void testOKRun() throws Exception {
run(false, false); run(false, false);
} }
@Test
public void testIOExRun() throws Exception { public void testIOExRun() throws Exception {
run(true, false); run(true, false);
} }
@Test
public void testRuntimeExRun() throws Exception { public void testRuntimeExRun() throws Exception {
run(false, true); run(false, true);
} }

View File

@ -33,11 +33,6 @@
import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
/** /**
* A JUnit test to test Map-Reduce job committer. * A JUnit test to test Map-Reduce job committer.
@ -59,15 +54,15 @@ public TestJobOutputCommitter() throws IOException {
private FileSystem fs; private FileSystem fs;
private Configuration conf = null; private Configuration conf = null;
@Before @Override
public void setUp() throws Exception { protected void setUp() throws Exception {
super.setUp(); super.setUp();
conf = createJobConf(); conf = createJobConf();
fs = getFileSystem(); fs = getFileSystem();
} }
@After @Override
public void tearDown() throws Exception { protected void tearDown() throws Exception {
fs.delete(new Path(TEST_ROOT_DIR), true); fs.delete(new Path(TEST_ROOT_DIR), true);
super.tearDown(); super.tearDown();
} }
@ -224,7 +219,6 @@ private void testKilledJob(String fileName,
* *
* @throws Exception * @throws Exception
*/ */
@Test
public void testDefaultCleanupAndAbort() throws Exception { public void testDefaultCleanupAndAbort() throws Exception {
// check with a successful job // check with a successful job
testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME, testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME,
@ -244,7 +238,6 @@ public void testDefaultCleanupAndAbort() throws Exception {
* *
* @throws Exception * @throws Exception
*/ */
@Test
public void testCustomAbort() throws Exception { public void testCustomAbort() throws Exception {
// check with a successful job // check with a successful job
testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME, testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME,
@ -271,7 +264,6 @@ public void testCustomAbort() throws Exception {
* compatibility testing. * compatibility testing.
* @throws Exception * @throws Exception
*/ */
@Test
public void testCustomCleanup() throws Exception { public void testCustomCleanup() throws Exception {
// check with a successful job // check with a successful job
testSuccessfulJob(CUSTOM_CLEANUP_FILE_NAME, testSuccessfulJob(CUSTOM_CLEANUP_FILE_NAME,

View File

@ -27,36 +27,23 @@
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.io.serializer.JavaSerializationComparator;
import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.CounterGroup; import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestMRMultipleOutputs extends HadoopTestCase { public class TestMRMultipleOutputs extends HadoopTestCase {
public TestMRMultipleOutputs() throws IOException { public TestMRMultipleOutputs() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1); super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
} }
@Test
public void testWithoutCounters() throws Exception { public void testWithoutCounters() throws Exception {
_testMultipleOutputs(false); _testMultipleOutputs(false);
_testMOWithJavaSerialization(false); _testMOWithJavaSerialization(false);
} }
@Test
public void testWithCounters() throws Exception { public void testWithCounters() throws Exception {
_testMultipleOutputs(true); _testMultipleOutputs(true);
_testMOWithJavaSerialization(true); _testMOWithJavaSerialization(true);
@ -70,7 +57,6 @@ public void testWithCounters() throws Exception {
private static String TEXT = "text"; private static String TEXT = "text";
private static String SEQUENCE = "sequence"; private static String SEQUENCE = "sequence";
@Before
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
Configuration conf = createJobConf(); Configuration conf = createJobConf();
@ -78,7 +64,6 @@ public void setUp() throws Exception {
fs.delete(ROOT_DIR, true); fs.delete(ROOT_DIR, true);
} }
@After
public void tearDown() throws Exception { public void tearDown() throws Exception {
Configuration conf = createJobConf(); Configuration conf = createJobConf();
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.mapreduce.lib.partition; package org.apache.hadoop.mapreduce.lib.partition;
import java.io.*;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -30,15 +32,6 @@
import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.map.InverseMapper; import org.apache.hadoop.mapreduce.lib.map.InverseMapper;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestMRKeyFieldBasedComparator extends HadoopTestCase { public class TestMRKeyFieldBasedComparator extends HadoopTestCase {
@ -52,8 +45,8 @@ public TestMRKeyFieldBasedComparator() throws IOException {
conf = createJobConf(); conf = createJobConf();
conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " "); conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
} }
private void testComparator(String keySpec, int expect) private void testComparator(String keySpec, int expect)
throws Exception { throws Exception {
String root = System.getProperty("test.build.data", "/tmp"); String root = System.getProperty("test.build.data", "/tmp");
Path inDir = new Path(root, "test_cmp/in"); Path inDir = new Path(root, "test_cmp/in");
@ -100,8 +93,7 @@ private void testComparator(String keySpec, int expect)
reader.close(); reader.close();
} }
} }
@Test
public void testBasicUnixComparator() throws Exception { public void testBasicUnixComparator() throws Exception {
testComparator("-k1,1n", 1); testComparator("-k1,1n", 1);
testComparator("-k2,2n", 1); testComparator("-k2,2n", 1);
@ -125,7 +117,7 @@ public void testBasicUnixComparator() throws Exception {
byte[] line1_bytes = line1.getBytes(); byte[] line1_bytes = line1.getBytes();
byte[] line2_bytes = line2.getBytes(); byte[] line2_bytes = line2.getBytes();
private void testWithoutMRJob(String keySpec, int expect) throws Exception { public void testWithoutMRJob(String keySpec, int expect) throws Exception {
KeyFieldBasedComparator<Void, Void> keyFieldCmp = KeyFieldBasedComparator<Void, Void> keyFieldCmp =
new KeyFieldBasedComparator<Void, Void>(); new KeyFieldBasedComparator<Void, Void>();
conf.set("mapreduce.partition.keycomparator.options", keySpec); conf.set("mapreduce.partition.keycomparator.options", keySpec);

View File

@ -27,12 +27,6 @@
import org.apache.hadoop.mapred.FileAlreadyExistsException; import org.apache.hadoop.mapred.FileAlreadyExistsException;
import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class TestTeraSort extends HadoopTestCase { public class TestTeraSort extends HadoopTestCase {
private static Log LOG = LogFactory.getLog(TestTeraSort.class); private static Log LOG = LogFactory.getLog(TestTeraSort.class);
@ -41,8 +35,7 @@ public TestTeraSort()
super(LOCAL_MR, LOCAL_FS, 1, 1); super(LOCAL_MR, LOCAL_FS, 1, 1);
} }
@After protected void tearDown() throws Exception {
public void tearDown() throws Exception {
getFileSystem().delete(new Path(TEST_DIR), true); getFileSystem().delete(new Path(TEST_DIR), true);
super.tearDown(); super.tearDown();
} }
@ -83,7 +76,6 @@ private void runTeraValidator(Configuration job,
assertEquals(ToolRunner.run(job, new TeraValidate(), svArgs), 0); assertEquals(ToolRunner.run(job, new TeraValidate(), svArgs), 0);
} }
@Test
public void testTeraSort() throws Exception { public void testTeraSort() throws Exception {
// Run TeraGen to generate input for 'terasort' // Run TeraGen to generate input for 'terasort'
runTeraGen(createJobConf(), SORT_INPUT_PATH); runTeraGen(createJobConf(), SORT_INPUT_PATH);
@ -112,7 +104,6 @@ public void testTeraSort() throws Exception {
TERA_OUTPUT_PATH); TERA_OUTPUT_PATH);
} }
@Test
public void testTeraSortWithLessThanTwoArgs() throws Exception { public void testTeraSortWithLessThanTwoArgs() throws Exception {
String[] args = new String[1]; String[] args = new String[1];
assertEquals(new TeraSort().run(args), 2); assertEquals(new TeraSort().run(args), 2);