MAPREDUCE-6520. Migrate MR Client test cases part 1.

(cherry picked from commit ef68b44195)
(cherry picked from commit 7d665f0a98)
This commit is contained in:
Akira Ajisaka 2016-03-11 22:20:55 +09:00
parent 0d3272b54e
commit 70e232f36e
25 changed files with 283 additions and 65 deletions

View File

@ -17,17 +17,30 @@
*/
package org.apache.hadoop.conf;
import org.junit.Assert;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.Utils;
import org.junit.Test;
import java.io.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* This testcase tests that a JobConf without default values submits jobs
@ -40,6 +53,7 @@ public class TestNoDefaultsJobConf extends HadoopTestCase {
super(HadoopTestCase.CLUSTER_MR, HadoopTestCase.DFS_FS, 1, 1);
}
@Test
public void testNoDefaults() throws Exception {
JobConf configuration = new JobConf();
assertTrue(configuration.get("hadoop.tmp.dir", null) != null);

View File

@ -18,13 +18,12 @@
package org.apache.hadoop.mapred;
import junit.framework.TestCase;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapreduce.MRConfig;
import org.junit.After;
import org.junit.Before;
import java.io.File;
import java.io.IOException;
/**
@ -38,7 +37,7 @@ import java.io.IOException;
* Job Configurations should be created using a configuration returned by the
* 'createJobConf()' method.
*/
public abstract class HadoopTestCase extends TestCase {
public abstract class HadoopTestCase {
public static final int LOCAL_MR = 1;
public static final int CLUSTER_MR = 2;
public static final int LOCAL_FS = 4;
@ -140,8 +139,8 @@ public abstract class HadoopTestCase extends TestCase {
*
* @throws Exception
*/
protected void setUp() throws Exception {
super.setUp();
@Before
public void setUp() throws Exception {
if (localFS) {
fileSystem = FileSystem.getLocal(new JobConf());
}
@ -164,7 +163,8 @@ public abstract class HadoopTestCase extends TestCase {
*
* @throws Exception
*/
protected void tearDown() throws Exception {
@After
public void tearDown() throws Exception {
try {
if (mrCluster != null) {
mrCluster.shutdown();
@ -181,7 +181,6 @@ public abstract class HadoopTestCase extends TestCase {
catch (Exception ex) {
System.out.println(ex);
}
super.tearDown();
}
/**

View File

@ -34,6 +34,13 @@ import javax.servlet.ServletException;
import java.io.IOException;
import java.io.DataOutputStream;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.After;
import org.junit.Test;
/**
* Base class to test Job end notification in local and cluster mode.
*
@ -140,17 +147,19 @@ public abstract class NotificationTestCase extends HadoopTestCase {
return conf;
}
protected void setUp() throws Exception {
@Before
public void setUp() throws Exception {
super.setUp();
startHttpServer();
}
protected void tearDown() throws Exception {
@After
public void tearDown() throws Exception {
stopHttpServer();
super.tearDown();
}
@Test
public void testMR() throws Exception {
System.out.println(launchWordCount(this.createJobConf(),

View File

@ -30,12 +30,16 @@ import java.io.IOException;
import java.io.OutputStream;
import java.util.Iterator;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
public class TestFileOutputFormat extends HadoopTestCase {
public TestFileOutputFormat() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
}
@Test
public void testCustomFile() throws Exception {
Path inDir = new Path("testing/fileoutputformat/input");
Path outDir = new Path("testing/fileoutputformat/output");

View File

@ -17,9 +17,6 @@
*/
package org.apache.hadoop.mapred;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
@ -27,6 +24,16 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.mapred.SortedRanges.Range;
import org.apache.hadoop.mapreduce.TaskType;
import org.junit.After;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestTaskCommit extends HadoopTestCase {
Path rootDir =
@ -76,12 +83,13 @@ public class TestTaskCommit extends HadoopTestCase {
super(LOCAL_MR, LOCAL_FS, 1, 1);
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
FileUtil.fullyDelete(new File(rootDir.toString()));
}
@Test
public void testCommitFail() throws IOException {
final Path inDir = new Path(rootDir, "./input");
final Path outDir = new Path(rootDir, "./output");
@ -182,6 +190,7 @@ public class TestTaskCommit extends HadoopTestCase {
*
* @throws Exception
*/
@Test
public void testTaskCleanupDoesNotCommit() throws Exception {
// Mimic a job with a special committer that does not cleanup
// files when a task fails.
@ -228,23 +237,27 @@ public class TestTaskCommit extends HadoopTestCase {
assertTrue("Task did not succeed", umbilical.taskDone);
}
@Test
public void testCommitRequiredForMapTask() throws Exception {
Task testTask = createDummyTask(TaskType.MAP);
assertTrue("MapTask should need commit", testTask.isCommitRequired());
}
@Test
public void testCommitRequiredForReduceTask() throws Exception {
Task testTask = createDummyTask(TaskType.REDUCE);
assertTrue("ReduceTask should need commit", testTask.isCommitRequired());
}
@Test
public void testCommitNotRequiredForJobSetup() throws Exception {
Task testTask = createDummyTask(TaskType.MAP);
testTask.setJobSetupTask();
assertFalse("Job setup task should not need commit",
testTask.isCommitRequired());
}
@Test
public void testCommitNotRequiredForJobCleanup() throws Exception {
Task testTask = createDummyTask(TaskType.MAP);
testTask.setJobCleanupTask();
@ -252,6 +265,7 @@ public class TestTaskCommit extends HadoopTestCase {
testTask.isCommitRequired());
}
@Test
public void testCommitNotRequiredForTaskCleanup() throws Exception {
Task testTask = createDummyTask(TaskType.REDUCE);
testTask.setTaskCleanupTask();

View File

@ -27,6 +27,8 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobConf;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* HadoopTestCase that tests the local job runner.
@ -59,6 +61,7 @@ public class TestLocalJobControl extends HadoopTestCase {
* object. Finally, it creates a thread to run the JobControl object and
* monitors/reports the job states.
*/
@Test
public void testLocalJobControlDataCopy() throws Exception {
FileSystem fs = FileSystem.get(createJobConf());

View File

@ -21,12 +21,29 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.junit.Test;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Iterator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestChainMapReduce extends HadoopTestCase {
private static Path getFlagDir(boolean local) {
@ -67,6 +84,7 @@ public class TestChainMapReduce extends HadoopTestCase {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
}
@Test
public void testChain() throws Exception {
Path inDir = new Path("testing/chain/input");
Path outDir = new Path("testing/chain/output");

View File

@ -36,6 +36,8 @@ import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.Utils;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.BufferedReader;
import java.io.File;

View File

@ -17,11 +17,6 @@
*/
package org.apache.hadoop.mapred.lib;
import java.io.IOException;
import java.util.Map;
import junit.framework.TestCase;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
@ -30,12 +25,19 @@ import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.junit.Test;
import java.io.IOException;
import java.util.Map;
import static org.junit.Assert.assertEquals;
/**
* @see TestDelegatingInputFormat
*/
public class TestMultipleInputs extends TestCase {
public class TestMultipleInputs {
@Test
public void testAddInputPathWithFormat() {
final JobConf conf = new JobConf();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class);
@ -48,6 +50,7 @@ public class TestMultipleInputs extends TestCase {
.getClass());
}
@Test
public void testAddInputPathWithMapper() {
final JobConf conf = new JobConf();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,

View File

@ -24,7 +24,23 @@ import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.DataOutputStream;
@ -32,17 +48,23 @@ import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Iterator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestMultipleOutputs extends HadoopTestCase {
public TestMultipleOutputs() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
}
@Test
public void testWithoutCounters() throws Exception {
_testMultipleOutputs(false);
_testMOWithJavaSerialization(false);
}
@Test
public void testWithCounters() throws Exception {
_testMultipleOutputs(true);
_testMOWithJavaSerialization(true);
@ -62,6 +84,7 @@ public class TestMultipleOutputs extends HadoopTestCase {
return dir;
}
@Before
public void setUp() throws Exception {
super.setUp();
Path rootDir = getDir(ROOT_DIR);
@ -75,6 +98,7 @@ public class TestMultipleOutputs extends HadoopTestCase {
}
}
@After
public void tearDown() throws Exception {
Path rootDir = getDir(ROOT_DIR);

View File

@ -22,26 +22,45 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper;
import org.junit.Test;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Iterator;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestMultithreadedMapRunner extends HadoopTestCase {
public TestMultithreadedMapRunner() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
}
@Test
public void testOKRun() throws Exception {
run(false, false);
}
@Test
public void testIOExRun() throws Exception {
run(true, false);
}
@Test
public void testRuntimeExRun() throws Exception {
run(false, true);
}

View File

@ -30,6 +30,13 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobConf;
import org.apache.log4j.Level;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class TestChild extends HadoopTestCase {
private static String TEST_ROOT_DIR =
@ -145,7 +152,8 @@ public class TestChild extends HadoopTestCase {
+ numPartFiles, list.length == numPartFiles);
return job;
}
@Test
public void testChild() throws Exception {
try {
submitAndValidateJob(createJobConf(), 1, 1, true);

View File

@ -30,6 +30,9 @@ import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
@Ignore
public class TestNoJobSetupCleanup extends HadoopTestCase {
private static String TEST_ROOT_DIR =
@ -68,7 +71,8 @@ public class TestNoJobSetupCleanup extends HadoopTestCase {
+ numPartFiles, list.length == numPartFiles);
return job;
}
@Test
public void testNoJobSetupCleanup() throws Exception {
try {
Configuration conf = createJobConf();

View File

@ -33,6 +33,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Tests context api and {@link StatusReporter#getProgress()} via

View File

@ -29,6 +29,10 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
/**
* Tests error conditions in ChainMapper/ChainReducer.
@ -51,6 +55,7 @@ public class TestChainErrors extends HadoopTestCase {
*
* @throws Exception
*/
@Test
public void testChainSubmission() throws Exception {
Configuration conf = createJobConf();
@ -89,6 +94,7 @@ public class TestChainErrors extends HadoopTestCase {
*
* @throws Exception
*/
@Test
public void testChainFail() throws Exception {
Configuration conf = createJobConf();
@ -114,6 +120,7 @@ public class TestChainErrors extends HadoopTestCase {
*
* @throws Exception
*/
@Test
public void testReducerFail() throws Exception {
Configuration conf = createJobConf();
@ -139,6 +146,7 @@ public class TestChainErrors extends HadoopTestCase {
*
* @throws Exception
*/
@Test
public void testChainMapNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
@ -163,6 +171,7 @@ public class TestChainErrors extends HadoopTestCase {
*
* @throws Exception
*/
@Test
public void testChainReduceNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";

View File

@ -30,6 +30,11 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.Test;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class TestMapReduceChain extends HadoopTestCase {
@ -63,6 +68,7 @@ public class TestMapReduceChain extends HadoopTestCase {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
}
@Test
public void testChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");

View File

@ -26,6 +26,9 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper;
import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
@ -42,6 +45,7 @@ public class TestSingleElementChain extends HadoopTestCase {
}
// test chain mapper and reducer by adding single mapper and reducer to chain
@Test
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");

View File

@ -18,25 +18,40 @@
package org.apache.hadoop.mapreduce.lib.db;
import java.sql.*;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
//import org.apache.hadoop.examples.DBCountPageView;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.db.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.StringUtils;
import org.hsqldb.server.Server;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
//import org.apache.hadoop.examples.DBCountPageView;
/**
* Test aspects of DataDrivenDBInputFormat
@ -109,11 +124,13 @@ public class TestDataDrivenDBInputFormat extends HadoopTestCase {
createConnection(driverClassName, url);
}
@Before
public void setUp() throws Exception {
initialize(DRIVER_CLASS, DB_URL);
super.setUp();
}
@After
public void tearDown() throws Exception {
super.tearDown();
shutdown();
@ -170,6 +187,7 @@ public class TestDataDrivenDBInputFormat extends HadoopTestCase {
}
}
@Test
public void testDateSplits() throws Exception {
Statement s = connection.createStatement();
final String DATE_TABLE = "datetable";

View File

@ -38,6 +38,8 @@ import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @see TestDelegatingInputFormat
@ -139,7 +141,7 @@ public class TestMultipleInputs extends HadoopTestCase {
assertTrue(output.readLine().equals("e 2"));
}
@SuppressWarnings("unchecked")
@Test
public void testAddInputPathWithFormat() throws IOException {
final Job conf = Job.getInstance();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class);
@ -152,7 +154,7 @@ public class TestMultipleInputs extends HadoopTestCase {
.getClass());
}
@SuppressWarnings("unchecked")
@Test
public void testAddInputPathWithMapper() throws IOException {
final Job conf = Job.getInstance();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,

View File

@ -33,6 +33,9 @@ import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
/**
* This class performs unit test for Job/JobControl classes.
@ -120,7 +123,8 @@ public class TestMapReduceJobControl extends HadoopTestCase {
} catch (Exception e) {}
}
}
@Test
public void testJobControlWithFailJob() throws Exception {
LOG.info("Starting testJobControlWithFailJob");
Configuration conf = createJobConf();
@ -144,6 +148,7 @@ public class TestMapReduceJobControl extends HadoopTestCase {
theControl.stop();
}
@Test
public void testJobControlWithKillJob() throws Exception {
LOG.info("Starting testJobControlWithKillJob");
@ -182,6 +187,7 @@ public class TestMapReduceJobControl extends HadoopTestCase {
theControl.stop();
}
@Test
public void testJobControl() throws Exception {
LOG.info("Starting testJobControl");

View File

@ -23,23 +23,33 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestMultithreadedMapper extends HadoopTestCase {
public TestMultithreadedMapper() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
}
@Test
public void testOKRun() throws Exception {
run(false, false);
}
@Test
public void testIOExRun() throws Exception {
run(true, false);
}
@Test
public void testRuntimeExRun() throws Exception {
run(false, true);
}

View File

@ -33,6 +33,11 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
/**
* A JUnit test to test Map-Reduce job committer.
@ -54,15 +59,15 @@ public class TestJobOutputCommitter extends HadoopTestCase {
private FileSystem fs;
private Configuration conf = null;
@Override
protected void setUp() throws Exception {
@Before
public void setUp() throws Exception {
super.setUp();
conf = createJobConf();
fs = getFileSystem();
}
@Override
protected void tearDown() throws Exception {
@After
public void tearDown() throws Exception {
fs.delete(new Path(TEST_ROOT_DIR), true);
super.tearDown();
}
@ -219,6 +224,7 @@ public class TestJobOutputCommitter extends HadoopTestCase {
*
* @throws Exception
*/
@Test
public void testDefaultCleanupAndAbort() throws Exception {
// check with a successful job
testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME,
@ -238,6 +244,7 @@ public class TestJobOutputCommitter extends HadoopTestCase {
*
* @throws Exception
*/
@Test
public void testCustomAbort() throws Exception {
// check with a successful job
testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME,
@ -264,6 +271,7 @@ public class TestJobOutputCommitter extends HadoopTestCase {
* compatibility testing.
* @throws Exception
*/
@Test
public void testCustomCleanup() throws Exception {
// check with a successful job
testSuccessfulJob(CUSTOM_CLEANUP_FILE_NAME,

View File

@ -27,23 +27,36 @@ import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestMRMultipleOutputs extends HadoopTestCase {
public TestMRMultipleOutputs() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
}
@Test
public void testWithoutCounters() throws Exception {
_testMultipleOutputs(false);
_testMOWithJavaSerialization(false);
}
@Test
public void testWithCounters() throws Exception {
_testMultipleOutputs(true);
_testMOWithJavaSerialization(true);
@ -57,6 +70,7 @@ public class TestMRMultipleOutputs extends HadoopTestCase {
private static String TEXT = "text";
private static String SEQUENCE = "sequence";
@Before
public void setUp() throws Exception {
super.setUp();
Configuration conf = createJobConf();
@ -64,6 +78,7 @@ public class TestMRMultipleOutputs extends HadoopTestCase {
fs.delete(ROOT_DIR, true);
}
@After
public void tearDown() throws Exception {
Configuration conf = createJobConf();
FileSystem fs = FileSystem.get(conf);

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.mapreduce.lib.partition;
import java.io.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
@ -32,6 +30,15 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.map.InverseMapper;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestMRKeyFieldBasedComparator extends HadoopTestCase {
@ -45,8 +52,8 @@ public class TestMRKeyFieldBasedComparator extends HadoopTestCase {
conf = createJobConf();
conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
}
private void testComparator(String keySpec, int expect)
private void testComparator(String keySpec, int expect)
throws Exception {
String root = System.getProperty("test.build.data", "/tmp");
Path inDir = new Path(root, "test_cmp/in");
@ -93,7 +100,8 @@ public class TestMRKeyFieldBasedComparator extends HadoopTestCase {
reader.close();
}
}
@Test
public void testBasicUnixComparator() throws Exception {
testComparator("-k1,1n", 1);
testComparator("-k2,2n", 1);
@ -117,7 +125,7 @@ public class TestMRKeyFieldBasedComparator extends HadoopTestCase {
byte[] line1_bytes = line1.getBytes();
byte[] line2_bytes = line2.getBytes();
public void testWithoutMRJob(String keySpec, int expect) throws Exception {
private void testWithoutMRJob(String keySpec, int expect) throws Exception {
KeyFieldBasedComparator<Void, Void> keyFieldCmp =
new KeyFieldBasedComparator<Void, Void>();
conf.set("mapreduce.partition.keycomparator.options", keySpec);

View File

@ -27,6 +27,12 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.FileAlreadyExistsException;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class TestTeraSort extends HadoopTestCase {
private static Log LOG = LogFactory.getLog(TestTeraSort.class);
@ -35,7 +41,8 @@ public class TestTeraSort extends HadoopTestCase {
super(LOCAL_MR, LOCAL_FS, 1, 1);
}
protected void tearDown() throws Exception {
@After
public void tearDown() throws Exception {
getFileSystem().delete(new Path(TEST_DIR), true);
super.tearDown();
}
@ -76,6 +83,7 @@ public class TestTeraSort extends HadoopTestCase {
assertEquals(ToolRunner.run(job, new TeraValidate(), svArgs), 0);
}
@Test
public void testTeraSort() throws Exception {
// Run TeraGen to generate input for 'terasort'
runTeraGen(createJobConf(), SORT_INPUT_PATH);
@ -104,6 +112,7 @@ public class TestTeraSort extends HadoopTestCase {
TERA_OUTPUT_PATH);
}
@Test
public void testTeraSortWithLessThanTwoArgs() throws Exception {
String[] args = new String[1];
assertEquals(new TeraSort().run(args), 2);