MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.
This commit is contained in:
parent
2e1d0ff4e9
commit
2c268cc936
|
@ -28,8 +28,6 @@ import java.io.PrintStream;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.StringTokenizer;
|
import java.util.StringTokenizer;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -39,6 +37,7 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||||
import org.apache.hadoop.mapred.*;
|
import org.apache.hadoop.mapred.*;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Distributed i/o benchmark.
|
* Distributed i/o benchmark.
|
||||||
|
@ -68,7 +67,7 @@ import org.junit.Ignore;
|
||||||
* </ul>
|
* </ul>
|
||||||
*/
|
*/
|
||||||
@Ignore
|
@Ignore
|
||||||
public class DFSCIOTest extends TestCase {
|
public class DFSCIOTest {
|
||||||
// Constants
|
// Constants
|
||||||
private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
|
private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
|
||||||
private static final int TEST_TYPE_READ = 0;
|
private static final int TEST_TYPE_READ = 0;
|
||||||
|
@ -98,6 +97,7 @@ public class DFSCIOTest extends TestCase {
|
||||||
*
|
*
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testIOs() throws Exception {
|
public void testIOs() throws Exception {
|
||||||
testIOs(10, 10);
|
testIOs(10, 10);
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,8 +34,6 @@ import java.util.HashMap;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
|
@ -50,8 +48,15 @@ import org.apache.hadoop.mapred.*;
|
||||||
import org.apache.hadoop.mapred.lib.LongSumReducer;
|
import org.apache.hadoop.mapred.lib.LongSumReducer;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotSame;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
public class TestFileSystem extends TestCase {
|
|
||||||
|
public class TestFileSystem {
|
||||||
private static final Log LOG = FileSystem.LOG;
|
private static final Log LOG = FileSystem.LOG;
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
|
@ -66,6 +71,7 @@ public class TestFileSystem extends TestCase {
|
||||||
private static Path READ_DIR = new Path(ROOT, "fs_read");
|
private static Path READ_DIR = new Path(ROOT, "fs_read");
|
||||||
private static Path DATA_DIR = new Path(ROOT, "fs_data");
|
private static Path DATA_DIR = new Path(ROOT, "fs_data");
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFs() throws Exception {
|
public void testFs() throws Exception {
|
||||||
testFs(10 * MEGA, 100, 0);
|
testFs(10 * MEGA, 100, 0);
|
||||||
}
|
}
|
||||||
|
@ -90,6 +96,7 @@ public class TestFileSystem extends TestCase {
|
||||||
fs.delete(READ_DIR, true);
|
fs.delete(READ_DIR, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public static void testCommandFormat() throws Exception {
|
public static void testCommandFormat() throws Exception {
|
||||||
// This should go to TestFsShell.java when it is added.
|
// This should go to TestFsShell.java when it is added.
|
||||||
CommandFormat cf;
|
CommandFormat cf;
|
||||||
|
@ -488,6 +495,7 @@ public class TestFileSystem extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFsCache() throws Exception {
|
public void testFsCache() throws Exception {
|
||||||
{
|
{
|
||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
|
@ -561,6 +569,7 @@ public class TestFileSystem extends TestCase {
|
||||||
+ StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
|
+ StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFsClose() throws Exception {
|
public void testFsClose() throws Exception {
|
||||||
{
|
{
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
@ -569,6 +578,7 @@ public class TestFileSystem extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFsShutdownHook() throws Exception {
|
public void testFsShutdownHook() throws Exception {
|
||||||
final Set<FileSystem> closed = Collections.synchronizedSet(new HashSet<FileSystem>());
|
final Set<FileSystem> closed = Collections.synchronizedSet(new HashSet<FileSystem>());
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
@ -600,7 +610,7 @@ public class TestFileSystem extends TestCase {
|
||||||
assertTrue(closed.contains(fsWithoutAuto));
|
assertTrue(closed.contains(fsWithoutAuto));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testCacheKeysAreCaseInsensitive()
|
public void testCacheKeysAreCaseInsensitive()
|
||||||
throws Exception
|
throws Exception
|
||||||
{
|
{
|
||||||
|
|
|
@ -23,19 +23,18 @@ import java.io.FileOutputStream;
|
||||||
import java.io.OutputStreamWriter;
|
import java.io.OutputStreamWriter;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test Job History Log Analyzer.
|
* Test Job History Log Analyzer.
|
||||||
*
|
*
|
||||||
* @see JHLogAnalyzer
|
* @see JHLogAnalyzer
|
||||||
*/
|
*/
|
||||||
public class TestJHLA extends TestCase {
|
public class TestJHLA {
|
||||||
private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
|
private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
|
||||||
private String historyLog = System.getProperty("test.build.data",
|
private String historyLog = System.getProperty("test.build.data",
|
||||||
"build/test/data") + "/history/test.log";
|
"build/test/data") + "/history/test.log";
|
||||||
|
@ -133,6 +132,7 @@ public class TestJHLA extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Run log analyzer in test mode for file test.log.
|
* Run log analyzer in test mode for file test.log.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testJHLA() {
|
public void testJHLA() {
|
||||||
String[] args = {"-test", historyLog, "-jobDelimiter", ".!!FILE=.*!!"};
|
String[] args = {"-test", historyLog, "-jobDelimiter", ".!!FILE=.*!!"};
|
||||||
JHLogAnalyzer.main(args);
|
JHLogAnalyzer.main(args);
|
||||||
|
|
|
@ -32,21 +32,25 @@ import org.apache.hadoop.io.compress.CompressionCodec;
|
||||||
import org.apache.hadoop.io.compress.DefaultCodec;
|
import org.apache.hadoop.io.compress.DefaultCodec;
|
||||||
import org.apache.hadoop.mapred.*;
|
import org.apache.hadoop.mapred.*;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.apache.commons.logging.*;
|
import org.apache.commons.logging.*;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
public class TestSequenceFileMergeProgress extends TestCase {
|
public class TestSequenceFileMergeProgress {
|
||||||
private static final Log LOG = FileInputFormat.LOG;
|
private static final Log LOG = FileInputFormat.LOG;
|
||||||
private static final int RECORDS = 10000;
|
private static final int RECORDS = 10000;
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMergeProgressWithNoCompression() throws IOException {
|
public void testMergeProgressWithNoCompression() throws IOException {
|
||||||
runTest(SequenceFile.CompressionType.NONE);
|
runTest(SequenceFile.CompressionType.NONE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMergeProgressWithRecordCompression() throws IOException {
|
public void testMergeProgressWithRecordCompression() throws IOException {
|
||||||
runTest(SequenceFile.CompressionType.RECORD);
|
runTest(SequenceFile.CompressionType.RECORD);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMergeProgressWithBlockCompression() throws IOException {
|
public void testMergeProgressWithBlockCompression() throws IOException {
|
||||||
runTest(SequenceFile.CompressionType.BLOCK);
|
runTest(SequenceFile.CompressionType.BLOCK);
|
||||||
}
|
}
|
||||||
|
@ -92,7 +96,7 @@ public class TestSequenceFileMergeProgress extends TestCase {
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
assertEquals(RECORDS, count);
|
assertEquals(RECORDS, count);
|
||||||
assertEquals(1.0f, rIter.getProgress().get());
|
assertEquals(1.0f, rIter.getProgress().get(), 0.0000);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,10 +17,11 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -41,7 +42,7 @@ import java.util.Properties;
|
||||||
* <p/>
|
* <p/>
|
||||||
* The DFS filesystem is formated before the testcase starts and after it ends.
|
* The DFS filesystem is formated before the testcase starts and after it ends.
|
||||||
*/
|
*/
|
||||||
public abstract class ClusterMapReduceTestCase extends TestCase {
|
public abstract class ClusterMapReduceTestCase {
|
||||||
private MiniDFSCluster dfsCluster = null;
|
private MiniDFSCluster dfsCluster = null;
|
||||||
private MiniMRCluster mrCluster = null;
|
private MiniMRCluster mrCluster = null;
|
||||||
|
|
||||||
|
@ -50,9 +51,8 @@ public abstract class ClusterMapReduceTestCase extends TestCase {
|
||||||
*
|
*
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
protected void setUp() throws Exception {
|
@Before
|
||||||
super.setUp();
|
public void setUp() throws Exception {
|
||||||
|
|
||||||
startCluster(true, null);
|
startCluster(true, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,9 +139,9 @@ public abstract class ClusterMapReduceTestCase extends TestCase {
|
||||||
*
|
*
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
protected void tearDown() throws Exception {
|
@After
|
||||||
|
public void tearDown() throws Exception {
|
||||||
stopCluster();
|
stopCluster();
|
||||||
super.tearDown();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -28,13 +28,13 @@ import org.apache.hadoop.ipc.TestRPC.TestImpl;
|
||||||
import org.apache.hadoop.ipc.TestRPC.TestProtocol;
|
import org.apache.hadoop.ipc.TestRPC.TestProtocol;
|
||||||
import org.apache.hadoop.mapred.AuditLogger.Keys;
|
import org.apache.hadoop.mapred.AuditLogger.Keys;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
|
import org.junit.Test;
|
||||||
import junit.framework.TestCase;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests {@link AuditLogger}.
|
* Tests {@link AuditLogger}.
|
||||||
*/
|
*/
|
||||||
public class TestAuditLogger extends TestCase {
|
public class TestAuditLogger {
|
||||||
private static final String USER = "test";
|
private static final String USER = "test";
|
||||||
private static final String OPERATION = "oper";
|
private static final String OPERATION = "oper";
|
||||||
private static final String TARGET = "tgt";
|
private static final String TARGET = "tgt";
|
||||||
|
@ -44,6 +44,7 @@ public class TestAuditLogger extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Test the AuditLog format with key-val pair.
|
* Test the AuditLog format with key-val pair.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testKeyValLogFormat() {
|
public void testKeyValLogFormat() {
|
||||||
StringBuilder actLog = new StringBuilder();
|
StringBuilder actLog = new StringBuilder();
|
||||||
StringBuilder expLog = new StringBuilder();
|
StringBuilder expLog = new StringBuilder();
|
||||||
|
@ -114,6 +115,7 @@ public class TestAuditLogger extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Test {@link AuditLogger} without IP set.
|
* Test {@link AuditLogger} without IP set.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testAuditLoggerWithoutIP() throws Exception {
|
public void testAuditLoggerWithoutIP() throws Exception {
|
||||||
// test without ip
|
// test without ip
|
||||||
testSuccessLogFormat(false);
|
testSuccessLogFormat(false);
|
||||||
|
@ -137,6 +139,7 @@ public class TestAuditLogger extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Test {@link AuditLogger} with IP set.
|
* Test {@link AuditLogger} with IP set.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testAuditLoggerWithIP() throws Exception {
|
public void testAuditLoggerWithIP() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
// start the IPC server
|
// start the IPC server
|
||||||
|
|
|
@ -40,6 +40,11 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
@Ignore
|
@Ignore
|
||||||
public class TestBadRecords extends ClusterMapReduceTestCase {
|
public class TestBadRecords extends ClusterMapReduceTestCase {
|
||||||
|
|
||||||
|
@ -207,6 +212,7 @@ public class TestBadRecords extends ClusterMapReduceTestCase {
|
||||||
return processed;
|
return processed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBadMapRed() throws Exception {
|
public void testBadMapRed() throws Exception {
|
||||||
JobConf conf = createJobConf();
|
JobConf conf = createJobConf();
|
||||||
conf.setMapperClass(BadMapper.class);
|
conf.setMapperClass(BadMapper.class);
|
||||||
|
|
|
@ -29,6 +29,12 @@ import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNull;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
|
public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
|
||||||
public void _testMapReduce(boolean restart) throws Exception {
|
public void _testMapReduce(boolean restart) throws Exception {
|
||||||
OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
|
OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
|
||||||
|
@ -85,14 +91,17 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMapReduce() throws Exception {
|
public void testMapReduce() throws Exception {
|
||||||
_testMapReduce(false);
|
_testMapReduce(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMapReduceRestarting() throws Exception {
|
public void testMapReduceRestarting() throws Exception {
|
||||||
_testMapReduce(true);
|
_testMapReduce(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDFSRestart() throws Exception {
|
public void testDFSRestart() throws Exception {
|
||||||
Path file = new Path(getInputDir(), "text.txt");
|
Path file = new Path(getInputDir(), "text.txt");
|
||||||
OutputStream os = getFileSystem().create(file);
|
OutputStream os = getFileSystem().create(file);
|
||||||
|
@ -109,6 +118,7 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMRConfig() throws Exception {
|
public void testMRConfig() throws Exception {
|
||||||
JobConf conf = createJobConf();
|
JobConf conf = createJobConf();
|
||||||
assertNull(conf.get("xyz"));
|
assertNull(conf.get("xyz"));
|
||||||
|
|
|
@ -21,15 +21,15 @@ import org.apache.hadoop.fs.*;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.*;
|
||||||
import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat;
|
import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.MRConfig;
|
import org.apache.hadoop.mapreduce.MRConfig;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TestCollect checks if the collect can handle simultaneous invocations.
|
* TestCollect checks if the collect can handle simultaneous invocations.
|
||||||
*/
|
*/
|
||||||
public class TestCollect extends TestCase
|
public class TestCollect
|
||||||
{
|
{
|
||||||
final static Path OUTPUT_DIR = new Path("build/test/test.collect.output");
|
final static Path OUTPUT_DIR = new Path("build/test/test.collect.output");
|
||||||
static final int NUM_FEEDERS = 10;
|
static final int NUM_FEEDERS = 10;
|
||||||
|
@ -127,7 +127,7 @@ public class TestCollect extends TestCase
|
||||||
conf.setNumMapTasks(1);
|
conf.setNumMapTasks(1);
|
||||||
conf.setNumReduceTasks(1);
|
conf.setNumReduceTasks(1);
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testCollect() throws IOException {
|
public void testCollect() throws IOException {
|
||||||
JobConf conf = new JobConf();
|
JobConf conf = new JobConf();
|
||||||
configure(conf);
|
configure(conf);
|
||||||
|
@ -144,9 +144,5 @@ public class TestCollect extends TestCase
|
||||||
fs.delete(OUTPUT_DIR, true);
|
fs.delete(OUTPUT_DIR, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws IOException {
|
|
||||||
new TestCollect().testCollect();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,21 +21,21 @@ import java.io.File;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.*;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* check for the job submission options of
|
* check for the job submission options of
|
||||||
* -libjars -files -archives
|
* -libjars -files -archives
|
||||||
*/
|
*/
|
||||||
@Ignore
|
@Ignore
|
||||||
public class TestCommandLineJobSubmission extends TestCase {
|
public class TestCommandLineJobSubmission {
|
||||||
// Input output paths for this..
|
// Input output paths for this..
|
||||||
// these are all dummy and does not test
|
// these are all dummy and does not test
|
||||||
// much in map reduce except for the command line
|
// much in map reduce except for the command line
|
||||||
|
@ -43,6 +43,7 @@ public class TestCommandLineJobSubmission extends TestCase {
|
||||||
static final Path input = new Path("/test/input/");
|
static final Path input = new Path("/test/input/");
|
||||||
static final Path output = new Path("/test/output");
|
static final Path output = new Path("/test/output");
|
||||||
File buildDir = new File(System.getProperty("test.build.data", "/tmp"));
|
File buildDir = new File(System.getProperty("test.build.data", "/tmp"));
|
||||||
|
@Test
|
||||||
public void testJobShell() throws Exception {
|
public void testJobShell() throws Exception {
|
||||||
MiniDFSCluster dfs = null;
|
MiniDFSCluster dfs = null;
|
||||||
MiniMRCluster mr = null;
|
MiniMRCluster mr = null;
|
||||||
|
|
|
@ -23,11 +23,12 @@ import org.apache.hadoop.mapred.lib.*;
|
||||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper;
|
import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper;
|
||||||
import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection;
|
import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import java.text.NumberFormat;
|
import java.text.NumberFormat;
|
||||||
|
|
||||||
public class TestFieldSelection extends TestCase {
|
public class TestFieldSelection {
|
||||||
|
|
||||||
private static NumberFormat idFormat = NumberFormat.getInstance();
|
private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||||
static {
|
static {
|
||||||
|
@ -35,6 +36,7 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||||
idFormat.setGroupingUsed(false);
|
idFormat.setGroupingUsed(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFieldSelection() throws Exception {
|
public void testFieldSelection() throws Exception {
|
||||||
launch();
|
launch();
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,12 +17,14 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.PathFilter;
|
import org.apache.hadoop.fs.PathFilter;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
@ -30,7 +32,7 @@ import java.io.OutputStreamWriter;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
|
||||||
public class TestFileInputFormatPathFilter extends TestCase {
|
public class TestFileInputFormatPathFilter {
|
||||||
|
|
||||||
public static class DummyFileInputFormat extends FileInputFormat {
|
public static class DummyFileInputFormat extends FileInputFormat {
|
||||||
|
|
||||||
|
@ -55,12 +57,12 @@ public class TestFileInputFormatPathFilter extends TestCase {
|
||||||
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
||||||
"TestFileInputFormatPathFilter");
|
"TestFileInputFormatPathFilter");
|
||||||
|
|
||||||
|
@Before
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
tearDown();
|
tearDown();
|
||||||
localFs.mkdirs(workDir);
|
localFs.mkdirs(workDir);
|
||||||
}
|
}
|
||||||
|
@After
|
||||||
public void tearDown() throws Exception {
|
public void tearDown() throws Exception {
|
||||||
if (localFs.exists(workDir)) {
|
if (localFs.exists(workDir)) {
|
||||||
localFs.delete(workDir, true);
|
localFs.delete(workDir, true);
|
||||||
|
@ -129,18 +131,19 @@ public class TestFileInputFormatPathFilter extends TestCase {
|
||||||
assertEquals(createdFiles, computedFiles);
|
assertEquals(createdFiles, computedFiles);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWithoutPathFilterWithoutGlob() throws Exception {
|
public void testWithoutPathFilterWithoutGlob() throws Exception {
|
||||||
_testInputFiles(false, false);
|
_testInputFiles(false, false);
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testWithoutPathFilterWithGlob() throws Exception {
|
public void testWithoutPathFilterWithGlob() throws Exception {
|
||||||
_testInputFiles(false, true);
|
_testInputFiles(false, true);
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testWithPathFilterWithoutGlob() throws Exception {
|
public void testWithPathFilterWithoutGlob() throws Exception {
|
||||||
_testInputFiles(true, false);
|
_testInputFiles(true, false);
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testWithPathFilterWithGlob() throws Exception {
|
public void testWithPathFilterWithGlob() throws Exception {
|
||||||
_testInputFiles(true, true);
|
_testInputFiles(true, true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,10 +20,11 @@ package org.apache.hadoop.mapred;
|
||||||
import org.apache.hadoop.fs.BlockLocation;
|
import org.apache.hadoop.fs.BlockLocation;
|
||||||
import org.apache.hadoop.net.NetworkTopology;
|
import org.apache.hadoop.net.NetworkTopology;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
public class TestGetSplitHosts extends TestCase {
|
|
||||||
|
|
||||||
|
public class TestGetSplitHosts {
|
||||||
|
@Test
|
||||||
public void testGetSplitHosts() throws Exception {
|
public void testGetSplitHosts() throws Exception {
|
||||||
|
|
||||||
int numBlocks = 3;
|
int numBlocks = 3;
|
||||||
|
|
|
@ -21,11 +21,12 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.ChecksumException;
|
import org.apache.hadoop.fs.ChecksumException;
|
||||||
import org.apache.hadoop.io.DataInputBuffer;
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
import org.apache.hadoop.io.DataOutputBuffer;
|
import org.apache.hadoop.io.DataOutputBuffer;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
public class TestIFileStreams {
|
||||||
|
@Test
|
||||||
public class TestIFileStreams extends TestCase {
|
|
||||||
|
|
||||||
public void testIFileStream() throws Exception {
|
public void testIFileStream() throws Exception {
|
||||||
final int DLEN = 100;
|
final int DLEN = 100;
|
||||||
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
|
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
|
||||||
|
@ -42,7 +43,7 @@ public class TestIFileStreams extends TestCase {
|
||||||
}
|
}
|
||||||
ifis.close();
|
ifis.close();
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testBadIFileStream() throws Exception {
|
public void testBadIFileStream() throws Exception {
|
||||||
final int DLEN = 100;
|
final int DLEN = 100;
|
||||||
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
|
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
|
||||||
|
@ -73,7 +74,7 @@ public class TestIFileStreams extends TestCase {
|
||||||
}
|
}
|
||||||
fail("Did not detect bad data in checksum");
|
fail("Did not detect bad data in checksum");
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testBadLength() throws Exception {
|
public void testBadLength() throws Exception {
|
||||||
final int DLEN = 100;
|
final int DLEN = 100;
|
||||||
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
|
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
|
||||||
|
|
|
@ -17,14 +17,15 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.mapred.FileInputFormat;
|
import org.apache.hadoop.mapred.FileInputFormat;
|
||||||
import org.apache.hadoop.mapred.JobConf;
|
import org.apache.hadoop.mapred.JobConf;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
public class TestInputPath extends TestCase {
|
public class TestInputPath {
|
||||||
|
@Test
|
||||||
public void testInputPath() throws Exception {
|
public void testInputPath() throws Exception {
|
||||||
JobConf jobConf = new JobConf();
|
JobConf jobConf = new JobConf();
|
||||||
Path workingDir = jobConf.getWorkingDirectory();
|
Path workingDir = jobConf.getWorkingDirectory();
|
||||||
|
|
|
@ -26,8 +26,6 @@ import java.io.Writer;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.StringTokenizer;
|
import java.util.StringTokenizer;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
@ -36,8 +34,11 @@ import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
|
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
|
||||||
import org.apache.hadoop.mapreduce.MRConfig;
|
import org.apache.hadoop.mapreduce.MRConfig;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
public class TestJavaSerialization extends TestCase {
|
public class TestJavaSerialization {
|
||||||
|
|
||||||
private static String TEST_ROOT_DIR =
|
private static String TEST_ROOT_DIR =
|
||||||
new File(System.getProperty("test.build.data", "/tmp")).toURI()
|
new File(System.getProperty("test.build.data", "/tmp")).toURI()
|
||||||
|
@ -90,7 +91,7 @@ public class TestJavaSerialization extends TestCase {
|
||||||
wr.write("b a\n");
|
wr.write("b a\n");
|
||||||
wr.close();
|
wr.close();
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testMapReduceJob() throws Exception {
|
public void testMapReduceJob() throws Exception {
|
||||||
|
|
||||||
JobConf conf = new JobConf(TestJavaSerialization.class);
|
JobConf conf = new JobConf(TestJavaSerialization.class);
|
||||||
|
@ -149,6 +150,7 @@ public class TestJavaSerialization extends TestCase {
|
||||||
* coupled to Writable types, if so, the job will fail.
|
* coupled to Writable types, if so, the job will fail.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testWriteToSequencefile() throws Exception {
|
public void testWriteToSequencefile() throws Exception {
|
||||||
JobConf conf = new JobConf(TestJavaSerialization.class);
|
JobConf conf = new JobConf(TestJavaSerialization.class);
|
||||||
conf.setJobName("JavaSerialization");
|
conf.setJobName("JavaSerialization");
|
||||||
|
|
|
@ -29,8 +29,13 @@ import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNull;
|
||||||
|
|
||||||
public class TestJobName extends ClusterMapReduceTestCase {
|
public class TestJobName extends ClusterMapReduceTestCase {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testComplexName() throws Exception {
|
public void testComplexName() throws Exception {
|
||||||
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
||||||
"text.txt"));
|
"text.txt"));
|
||||||
|
@ -65,6 +70,7 @@ public class TestJobName extends ClusterMapReduceTestCase {
|
||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testComplexNameWithRegex() throws Exception {
|
public void testComplexNameWithRegex() throws Exception {
|
||||||
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
||||||
"text.txt"));
|
"text.txt"));
|
||||||
|
|
|
@ -21,8 +21,6 @@ package org.apache.hadoop.mapred;
|
||||||
import java.io.DataOutputStream;
|
import java.io.DataOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
@ -32,11 +30,15 @@ import org.apache.hadoop.io.IntWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A JUnit test to test Job System Directory with Mini-DFS.
|
* A JUnit test to test Job System Directory with Mini-DFS.
|
||||||
*/
|
*/
|
||||||
public class TestJobSysDirWithDFS extends TestCase {
|
public class TestJobSysDirWithDFS {
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(TestJobSysDirWithDFS.class.getName());
|
LogFactory.getLog(TestJobSysDirWithDFS.class.getName());
|
||||||
|
|
||||||
|
@ -115,7 +117,7 @@ public class TestJobSysDirWithDFS extends TestCase {
|
||||||
// between Job Client & Job Tracker
|
// between Job Client & Job Tracker
|
||||||
assertTrue(result.job.isSuccessful());
|
assertTrue(result.job.isSuccessful());
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testWithDFS() throws IOException {
|
public void testWithDFS() throws IOException {
|
||||||
MiniDFSCluster dfs = null;
|
MiniDFSCluster dfs = null;
|
||||||
MiniMRCluster mr = null;
|
MiniMRCluster mr = null;
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.*;
|
import org.apache.commons.logging.*;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.*;
|
||||||
|
@ -28,8 +27,11 @@ import org.apache.hadoop.io.*;
|
||||||
import org.apache.hadoop.io.compress.*;
|
import org.apache.hadoop.io.compress.*;
|
||||||
import org.apache.hadoop.util.LineReader;
|
import org.apache.hadoop.util.LineReader;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
|
||||||
public class TestKeyValueTextInputFormat extends TestCase {
|
public class TestKeyValueTextInputFormat {
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(TestKeyValueTextInputFormat.class.getName());
|
LogFactory.getLog(TestKeyValueTextInputFormat.class.getName());
|
||||||
|
|
||||||
|
@ -47,7 +49,7 @@ public class TestKeyValueTextInputFormat extends TestCase {
|
||||||
private static Path workDir =
|
private static Path workDir =
|
||||||
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
||||||
"TestKeyValueTextInputFormat");
|
"TestKeyValueTextInputFormat");
|
||||||
|
@Test
|
||||||
public void testFormat() throws Exception {
|
public void testFormat() throws Exception {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
Path file = new Path(workDir, "test.txt");
|
Path file = new Path(workDir, "test.txt");
|
||||||
|
@ -134,7 +136,7 @@ public class TestKeyValueTextInputFormat extends TestCase {
|
||||||
(str.getBytes("UTF-8")),
|
(str.getBytes("UTF-8")),
|
||||||
defaultConf);
|
defaultConf);
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testUTF8() throws Exception {
|
public void testUTF8() throws Exception {
|
||||||
LineReader in = null;
|
LineReader in = null;
|
||||||
|
|
||||||
|
@ -153,7 +155,7 @@ public class TestKeyValueTextInputFormat extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testNewLines() throws Exception {
|
public void testNewLines() throws Exception {
|
||||||
LineReader in = null;
|
LineReader in = null;
|
||||||
try {
|
try {
|
||||||
|
@ -219,7 +221,8 @@ public class TestKeyValueTextInputFormat extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Test using the gzip codec for reading
|
* Test using the gzip codec for reading
|
||||||
*/
|
*/
|
||||||
public static void testGzip() throws IOException {
|
@Test
|
||||||
|
public void testGzip() throws IOException {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
CompressionCodec gzip = new GzipCodec();
|
CompressionCodec gzip = new GzipCodec();
|
||||||
ReflectionUtils.setConf(gzip, job);
|
ReflectionUtils.setConf(gzip, job);
|
||||||
|
|
|
@ -35,14 +35,15 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.io.WritableComparable;
|
import org.apache.hadoop.io.WritableComparable;
|
||||||
import org.apache.hadoop.mapred.lib.LazyOutputFormat;
|
import org.apache.hadoop.mapred.lib.LazyOutputFormat;
|
||||||
import junit.framework.TestCase;
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A JUnit test to test the Map-Reduce framework's feature to create part
|
* A JUnit test to test the Map-Reduce framework's feature to create part
|
||||||
* files only if there is an explicit output.collect. This helps in preventing
|
* files only if there is an explicit output.collect. This helps in preventing
|
||||||
* 0 byte files
|
* 0 byte files
|
||||||
*/
|
*/
|
||||||
public class TestLazyOutput extends TestCase {
|
public class TestLazyOutput {
|
||||||
private static final int NUM_HADOOP_SLAVES = 3;
|
private static final int NUM_HADOOP_SLAVES = 3;
|
||||||
private static final int NUM_MAPS_PER_NODE = 2;
|
private static final int NUM_MAPS_PER_NODE = 2;
|
||||||
private static final Path INPUT = new Path("/testlazy/input");
|
private static final Path INPUT = new Path("/testlazy/input");
|
||||||
|
@ -132,7 +133,7 @@ public class TestLazyOutput extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testLazyOutput() throws Exception {
|
public void testLazyOutput() throws Exception {
|
||||||
MiniDFSCluster dfs = null;
|
MiniDFSCluster dfs = null;
|
||||||
MiniMRCluster mr = null;
|
MiniMRCluster mr = null;
|
||||||
|
|
|
@ -17,16 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import static org.mockito.Matchers.any;
|
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
import java.io.DataOutputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.concurrent.TimeoutException;
|
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.BlockLocation;
|
import org.apache.hadoop.fs.BlockLocation;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
@ -36,9 +26,21 @@ import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.DataOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.mockito.Matchers.any;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
public class TestMRCJCFileInputFormat extends TestCase {
|
public class TestMRCJCFileInputFormat {
|
||||||
|
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
MiniDFSCluster dfs = null;
|
MiniDFSCluster dfs = null;
|
||||||
|
@ -50,6 +52,7 @@ public class TestMRCJCFileInputFormat extends TestCase {
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testLocality() throws Exception {
|
public void testLocality() throws Exception {
|
||||||
JobConf job = new JobConf(conf);
|
JobConf job = new JobConf(conf);
|
||||||
dfs = newDFSCluster(job);
|
dfs = newDFSCluster(job);
|
||||||
|
@ -109,6 +112,7 @@ public class TestMRCJCFileInputFormat extends TestCase {
|
||||||
DFSTestUtil.waitReplication(fs, path, replication);
|
DFSTestUtil.waitReplication(fs, path, replication);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNumInputs() throws Exception {
|
public void testNumInputs() throws Exception {
|
||||||
JobConf job = new JobConf(conf);
|
JobConf job = new JobConf(conf);
|
||||||
dfs = newDFSCluster(job);
|
dfs = newDFSCluster(job);
|
||||||
|
@ -157,6 +161,7 @@ public class TestMRCJCFileInputFormat extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMultiLevelInput() throws Exception {
|
public void testMultiLevelInput() throws Exception {
|
||||||
JobConf job = new JobConf(conf);
|
JobConf job = new JobConf(conf);
|
||||||
|
|
||||||
|
@ -195,6 +200,7 @@ public class TestMRCJCFileInputFormat extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Test
|
||||||
public void testLastInputSplitAtSplitBoundary() throws Exception {
|
public void testLastInputSplitAtSplitBoundary() throws Exception {
|
||||||
FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024,
|
FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024,
|
||||||
128l * 1024 * 1024);
|
128l * 1024 * 1024);
|
||||||
|
@ -208,6 +214,7 @@ public class TestMRCJCFileInputFormat extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Test
|
||||||
public void testLastInputSplitExceedingSplitBoundary() throws Exception {
|
public void testLastInputSplitExceedingSplitBoundary() throws Exception {
|
||||||
FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024,
|
FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024,
|
||||||
128l * 1024 * 1024);
|
128l * 1024 * 1024);
|
||||||
|
@ -221,6 +228,7 @@ public class TestMRCJCFileInputFormat extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Test
|
||||||
public void testLastInputSplitSingleSplit() throws Exception {
|
public void testLastInputSplitSingleSplit() throws Exception {
|
||||||
FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024,
|
FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024,
|
||||||
128l * 1024 * 1024);
|
128l * 1024 * 1024);
|
||||||
|
@ -305,7 +313,7 @@ public class TestMRCJCFileInputFormat extends TestCase {
|
||||||
DFSTestUtil.waitReplication(fileSys, name, replication);
|
DFSTestUtil.waitReplication(fileSys, name, replication);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@After
|
||||||
public void tearDown() throws Exception {
|
public void tearDown() throws Exception {
|
||||||
if (dfs != null) {
|
if (dfs != null) {
|
||||||
dfs.shutdown();
|
dfs.shutdown();
|
||||||
|
|
|
@ -18,18 +18,25 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.*;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.RawLocalFileSystem;
|
||||||
|
import org.apache.hadoop.io.NullWritable;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.mapreduce.JobStatus;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.*;
|
public class TestMRCJCFileOutputCommitter {
|
||||||
import org.apache.hadoop.io.*;
|
|
||||||
import org.apache.hadoop.mapred.JobContextImpl;
|
|
||||||
import org.apache.hadoop.mapred.TaskAttemptContextImpl;
|
|
||||||
import org.apache.hadoop.mapreduce.JobStatus;
|
|
||||||
|
|
||||||
public class TestMRCJCFileOutputCommitter extends TestCase {
|
|
||||||
private static Path outDir = new Path(
|
private static Path outDir = new Path(
|
||||||
System.getProperty("test.build.data", "/tmp"), "output");
|
System.getProperty("test.build.data", "/tmp"), "output");
|
||||||
|
|
||||||
|
@ -67,6 +74,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@Test
|
||||||
public void testCommitter() throws Exception {
|
public void testCommitter() throws Exception {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
setConfForFileOutputCommitter(job);
|
setConfForFileOutputCommitter(job);
|
||||||
|
@ -108,6 +116,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
|
||||||
FileUtil.fullyDelete(new File(outDir.toString()));
|
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAbort() throws IOException {
|
public void testAbort() throws IOException {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
setConfForFileOutputCommitter(job);
|
setConfForFileOutputCommitter(job);
|
||||||
|
@ -161,6 +170,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFailAbort() throws IOException {
|
public void testFailAbort() throws IOException {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
|
job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
|
||||||
|
|
|
@ -22,8 +22,6 @@ import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
@ -40,6 +38,8 @@ import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
|
||||||
import org.apache.hadoop.mapreduce.split.JobSplitWriter;
|
import org.apache.hadoop.mapreduce.split.JobSplitWriter;
|
||||||
import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
|
import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validates map phase progress.
|
* Validates map phase progress.
|
||||||
|
@ -55,7 +55,7 @@ import org.apache.hadoop.util.ReflectionUtils;
|
||||||
* once mapTask.run() is finished. Sort phase progress in map task is not
|
* once mapTask.run() is finished. Sort phase progress in map task is not
|
||||||
* validated here.
|
* validated here.
|
||||||
*/
|
*/
|
||||||
public class TestMapProgress extends TestCase {
|
public class TestMapProgress {
|
||||||
public static final Log LOG = LogFactory.getLog(TestMapProgress.class);
|
public static final Log LOG = LogFactory.getLog(TestMapProgress.class);
|
||||||
private static String TEST_ROOT_DIR;
|
private static String TEST_ROOT_DIR;
|
||||||
static {
|
static {
|
||||||
|
@ -235,6 +235,7 @@ public class TestMapProgress extends TestCase {
|
||||||
* Validates map phase progress after each record is processed by map task
|
* Validates map phase progress after each record is processed by map task
|
||||||
* using custom task reporter.
|
* using custom task reporter.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testMapProgress() throws Exception {
|
public void testMapProgress() throws Exception {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
fs = FileSystem.getLocal(job);
|
fs = FileSystem.getLocal(job);
|
||||||
|
|
|
@ -44,8 +44,8 @@ import org.apache.hadoop.io.serializer.SerializationFactory;
|
||||||
import org.apache.hadoop.io.serializer.Serializer;
|
import org.apache.hadoop.io.serializer.Serializer;
|
||||||
|
|
||||||
import org.apache.hadoop.mapred.Task.TaskReporter;
|
import org.apache.hadoop.mapred.Task.TaskReporter;
|
||||||
|
import org.junit.Test;
|
||||||
import junit.framework.TestCase;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
@SuppressWarnings(value={"unchecked", "deprecation"})
|
@SuppressWarnings(value={"unchecked", "deprecation"})
|
||||||
/**
|
/**
|
||||||
|
@ -56,7 +56,7 @@ import junit.framework.TestCase;
|
||||||
* framework's merge on the reduce side will merge the partitions created to
|
* framework's merge on the reduce side will merge the partitions created to
|
||||||
* generate the final output which is sorted on the key.
|
* generate the final output which is sorted on the key.
|
||||||
*/
|
*/
|
||||||
public class TestMerge extends TestCase {
|
public class TestMerge {
|
||||||
private static final int NUM_HADOOP_DATA_NODES = 2;
|
private static final int NUM_HADOOP_DATA_NODES = 2;
|
||||||
// Number of input files is same as the number of mappers.
|
// Number of input files is same as the number of mappers.
|
||||||
private static final int NUM_MAPPERS = 10;
|
private static final int NUM_MAPPERS = 10;
|
||||||
|
@ -69,6 +69,7 @@ public class TestMerge extends TestCase {
|
||||||
// Where output goes.
|
// Where output goes.
|
||||||
private static final Path OUTPUT = new Path("/testplugin/output");
|
private static final Path OUTPUT = new Path("/testplugin/output");
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMerge() throws Exception {
|
public void testMerge() throws Exception {
|
||||||
MiniDFSCluster dfsCluster = null;
|
MiniDFSCluster dfsCluster = null;
|
||||||
MiniMRClientCluster mrCluster = null;
|
MiniMRClientCluster mrCluster = null;
|
||||||
|
|
|
@ -18,14 +18,16 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster.
|
* A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster.
|
||||||
*/
|
*/
|
||||||
public class TestMiniMRBringup extends TestCase {
|
public class TestMiniMRBringup {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBringUp() throws IOException {
|
public void testBringUp() throws IOException {
|
||||||
MiniMRCluster mr = null;
|
MiniMRCluster mr = null;
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -18,20 +18,23 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.*;
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.mapred.MRCaching.TestResult;
|
import org.apache.hadoop.mapred.MRCaching.TestResult;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A JUnit test to test caching with DFS
|
* A JUnit test to test caching with DFS
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@Ignore
|
@Ignore
|
||||||
public class TestMiniMRDFSCaching extends TestCase {
|
public class TestMiniMRDFSCaching {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWithDFS() throws IOException {
|
public void testWithDFS() throws IOException {
|
||||||
MiniMRCluster mr = null;
|
MiniMRCluster mr = null;
|
||||||
MiniDFSCluster dfs = null;
|
MiniDFSCluster dfs = null;
|
||||||
|
@ -70,9 +73,4 @@ public class TestMiniMRDFSCaching extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] argv) throws Exception {
|
|
||||||
TestMiniMRDFSCaching td = new TestMiniMRDFSCaching();
|
|
||||||
td.testWithDFS();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,17 +21,17 @@ import java.io.IOException;
|
||||||
import java.util.BitSet;
|
import java.util.BitSet;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
|
||||||
public class TestMultiFileInputFormat extends TestCase{
|
public class TestMultiFileInputFormat {
|
||||||
|
|
||||||
private static JobConf job = new JobConf();
|
private static JobConf job = new JobConf();
|
||||||
|
|
||||||
|
@ -80,6 +80,7 @@ public class TestMultiFileInputFormat extends TestCase{
|
||||||
return multiFileDir;
|
return multiFileDir;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFormat() throws IOException {
|
public void testFormat() throws IOException {
|
||||||
LOG.info("Test started");
|
LOG.info("Test started");
|
||||||
LOG.info("Max split count = " + MAX_SPLIT_COUNT);
|
LOG.info("Max split count = " + MAX_SPLIT_COUNT);
|
||||||
|
@ -123,6 +124,7 @@ public class TestMultiFileInputFormat extends TestCase{
|
||||||
LOG.info("Test Finished");
|
LOG.info("Test Finished");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFormatWithLessPathsThanSplits() throws Exception {
|
public void testFormatWithLessPathsThanSplits() throws Exception {
|
||||||
MultiFileInputFormat<Text,Text> format = new DummyMultiFileInputFormat();
|
MultiFileInputFormat<Text,Text> format = new DummyMultiFileInputFormat();
|
||||||
FileSystem fs = FileSystem.getLocal(job);
|
FileSystem fs = FileSystem.getLocal(job);
|
||||||
|
@ -135,9 +137,4 @@ public class TestMultiFileInputFormat extends TestCase{
|
||||||
initFiles(fs, 2, 500);
|
initFiles(fs, 2, 500);
|
||||||
assertEquals(2, format.getSplits(job, 4).length);
|
assertEquals(2, format.getSplits(job, 4).length);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception{
|
|
||||||
TestMultiFileInputFormat test = new TestMultiFileInputFormat();
|
|
||||||
test.testFormat();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,16 +27,19 @@ import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* test MultiFileSplit class
|
* test MultiFileSplit class
|
||||||
*/
|
*/
|
||||||
public class TestMultiFileSplit extends TestCase{
|
public class TestMultiFileSplit {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testReadWrite() throws Exception {
|
public void testReadWrite() throws Exception {
|
||||||
MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {new Path("/test/path/1"), new Path("/test/path/2")}, new long[] {100,200});
|
MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {new Path("/test/path/1"), new Path("/test/path/2")}, new long[] {100,200});
|
||||||
|
|
||||||
|
@ -70,6 +73,7 @@ public class TestMultiFileSplit extends TestCase{
|
||||||
* test method getLocations
|
* test method getLocations
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testgetLocations() throws IOException{
|
public void testgetLocations() throws IOException{
|
||||||
JobConf job= new JobConf();
|
JobConf job= new JobConf();
|
||||||
|
|
||||||
|
|
|
@ -17,10 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -32,12 +28,17 @@ import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||||
import org.apache.hadoop.mapreduce.JobCounter;
|
import org.apache.hadoop.mapreduce.JobCounter;
|
||||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This test checks whether the task caches are created and used properly.
|
* This test checks whether the task caches are created and used properly.
|
||||||
*/
|
*/
|
||||||
@Ignore
|
@Ignore
|
||||||
public class TestMultipleLevelCaching extends TestCase {
|
public class TestMultipleLevelCaching {
|
||||||
private static final int MAX_LEVEL = 5;
|
private static final int MAX_LEVEL = 5;
|
||||||
final Path inDir = new Path("/cachetesting");
|
final Path inDir = new Path("/cachetesting");
|
||||||
final Path outputPath = new Path("/output");
|
final Path outputPath = new Path("/output");
|
||||||
|
@ -71,6 +72,7 @@ public class TestMultipleLevelCaching extends TestCase {
|
||||||
return rack.toString();
|
return rack.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMultiLevelCaching() throws Exception {
|
public void testMultiLevelCaching() throws Exception {
|
||||||
for (int i = 1 ; i <= MAX_LEVEL; ++i) {
|
for (int i = 1 ; i <= MAX_LEVEL; ++i) {
|
||||||
testCachingAtLevel(i);
|
testCachingAtLevel(i);
|
||||||
|
|
|
@ -18,15 +18,19 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.*;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import junit.framework.TestCase;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.*;
|
import java.io.File;
|
||||||
import org.apache.hadoop.io.*;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.mapred.lib.*;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
public class TestMultipleTextOutputFormat extends TestCase {
|
public class TestMultipleTextOutputFormat {
|
||||||
private static JobConf defaultConf = new JobConf();
|
private static JobConf defaultConf = new JobConf();
|
||||||
|
|
||||||
private static FileSystem localFs = null;
|
private static FileSystem localFs = null;
|
||||||
|
@ -84,6 +88,7 @@ public class TestMultipleTextOutputFormat extends TestCase {
|
||||||
rw.close(null);
|
rw.close(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFormat() throws Exception {
|
public void testFormat() throws Exception {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
job.set(JobContext.TASK_ATTEMPT_ID, attempt);
|
job.set(JobContext.TASK_ATTEMPT_ID, attempt);
|
||||||
|
@ -145,8 +150,4 @@ public class TestMultipleTextOutputFormat extends TestCase {
|
||||||
//System.out.printf("File_2 output: %s\n", output);
|
//System.out.printf("File_2 output: %s\n", output);
|
||||||
assertEquals(output, expectedOutput.toString());
|
assertEquals(output, expectedOutput.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
new TestMultipleTextOutputFormat().testFormat();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,17 +19,18 @@
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
public class TestReduceFetch extends TestReduceFetchFromPartialMem {
|
public class TestReduceFetch extends TestReduceFetchFromPartialMem {
|
||||||
|
|
||||||
static {
|
|
||||||
setSuite(TestReduceFetch.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Verify that all segments are read from disk
|
* Verify that all segments are read from disk
|
||||||
* @throws Exception might be thrown
|
* @throws Exception might be thrown
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testReduceFromDisk() throws Exception {
|
public void testReduceFromDisk() throws Exception {
|
||||||
final int MAP_TASKS = 8;
|
final int MAP_TASKS = 8;
|
||||||
JobConf job = mrCluster.createJobConf();
|
JobConf job = mrCluster.createJobConf();
|
||||||
|
@ -53,6 +54,7 @@ public class TestReduceFetch extends TestReduceFetchFromPartialMem {
|
||||||
* Verify that no segment hits disk.
|
* Verify that no segment hits disk.
|
||||||
* @throws Exception might be thrown
|
* @throws Exception might be thrown
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testReduceFromMem() throws Exception {
|
public void testReduceFromMem() throws Exception {
|
||||||
final int MAP_TASKS = 3;
|
final int MAP_TASKS = 3;
|
||||||
JobConf job = mrCluster.createJobConf();
|
JobConf job = mrCluster.createJobConf();
|
||||||
|
|
|
@ -18,10 +18,6 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import junit.extensions.TestSetup;
|
|
||||||
import junit.framework.Test;
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import junit.framework.TestSuite;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -30,7 +26,9 @@ import org.apache.hadoop.io.NullWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.WritableComparator;
|
import org.apache.hadoop.io.WritableComparator;
|
||||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||||
import org.apache.hadoop.mapreduce.MRConfig;
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.DataInput;
|
import java.io.DataInput;
|
||||||
import java.io.DataOutput;
|
import java.io.DataOutput;
|
||||||
|
@ -39,35 +37,28 @@ import java.util.Arrays;
|
||||||
import java.util.Formatter;
|
import java.util.Formatter;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
|
||||||
public class TestReduceFetchFromPartialMem extends TestCase {
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
public class TestReduceFetchFromPartialMem {
|
||||||
|
|
||||||
protected static MiniMRCluster mrCluster = null;
|
protected static MiniMRCluster mrCluster = null;
|
||||||
protected static MiniDFSCluster dfsCluster = null;
|
protected static MiniDFSCluster dfsCluster = null;
|
||||||
protected static TestSuite mySuite;
|
|
||||||
|
|
||||||
protected static void setSuite(Class<? extends TestCase> klass) {
|
@Before
|
||||||
mySuite = new TestSuite(klass);
|
public void setUp() throws Exception {
|
||||||
}
|
|
||||||
|
|
||||||
static {
|
|
||||||
setSuite(TestReduceFetchFromPartialMem.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Test suite() {
|
|
||||||
TestSetup setup = new TestSetup(mySuite) {
|
|
||||||
protected void setUp() throws Exception {
|
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||||
mrCluster = new MiniMRCluster(2,
|
mrCluster = new MiniMRCluster(2,
|
||||||
dfsCluster.getFileSystem().getUri().toString(), 1);
|
dfsCluster.getFileSystem().getUri().toString(), 1);
|
||||||
}
|
}
|
||||||
protected void tearDown() throws Exception {
|
|
||||||
|
@After
|
||||||
|
public void tearDown() throws Exception {
|
||||||
if (dfsCluster != null) { dfsCluster.shutdown(); }
|
if (dfsCluster != null) { dfsCluster.shutdown(); }
|
||||||
if (mrCluster != null) { mrCluster.shutdown(); }
|
if (mrCluster != null) { mrCluster.shutdown(); }
|
||||||
}
|
}
|
||||||
};
|
|
||||||
return setup;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final String tagfmt = "%04d";
|
private static final String tagfmt = "%04d";
|
||||||
private static final String keyfmt = "KEYKEYKEYKEYKEYKEYKE";
|
private static final String keyfmt = "KEYKEYKEYKEYKEYKEYKE";
|
||||||
|
@ -78,6 +69,7 @@ public class TestReduceFetchFromPartialMem extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Verify that at least one segment does not hit disk */
|
/** Verify that at least one segment does not hit disk */
|
||||||
|
@Test
|
||||||
public void testReduceFromPartialMem() throws Exception {
|
public void testReduceFromPartialMem() throws Exception {
|
||||||
final int MAP_TASKS = 7;
|
final int MAP_TASKS = 7;
|
||||||
JobConf job = mrCluster.createJobConf();
|
JobConf job = mrCluster.createJobConf();
|
||||||
|
|
|
@ -17,10 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.LocalFileSystem;
|
import org.apache.hadoop.fs.LocalFileSystem;
|
||||||
|
@ -30,11 +26,17 @@ import org.apache.hadoop.io.WritableComparator;
|
||||||
import org.apache.hadoop.io.compress.CompressionCodec;
|
import org.apache.hadoop.io.compress.CompressionCodec;
|
||||||
import org.apache.hadoop.io.compress.DefaultCodec;
|
import org.apache.hadoop.io.compress.DefaultCodec;
|
||||||
import org.apache.hadoop.util.Progressable;
|
import org.apache.hadoop.util.Progressable;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This test exercises the ValueIterator.
|
* This test exercises the ValueIterator.
|
||||||
*/
|
*/
|
||||||
public class TestReduceTask extends TestCase {
|
public class TestReduceTask {
|
||||||
|
|
||||||
static class NullProgress implements Progressable {
|
static class NullProgress implements Progressable {
|
||||||
public void progress() { }
|
public void progress() { }
|
||||||
|
@ -119,9 +121,10 @@ public class TestReduceTask extends TestCase {
|
||||||
}
|
}
|
||||||
assertEquals(vals.length, i);
|
assertEquals(vals.length, i);
|
||||||
// make sure we have progress equal to 1.0
|
// make sure we have progress equal to 1.0
|
||||||
assertEquals(1.0f, rawItr.getProgress().get());
|
assertEquals(1.0f, rawItr.getProgress().get(),0.0000);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testValueIterator() throws Exception {
|
public void testValueIterator() throws Exception {
|
||||||
Path tmpDir = new Path("build/test/test.reduce.task");
|
Path tmpDir = new Path("build/test/test.reduce.task");
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
@ -130,6 +133,7 @@ public class TestReduceTask extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testValueIteratorWithCompression() throws Exception {
|
public void testValueIteratorWithCompression() throws Exception {
|
||||||
Path tmpDir = new Path("build/test/test.reduce.task.compression");
|
Path tmpDir = new Path("build/test/test.reduce.task.compression");
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
|
|
@ -18,19 +18,26 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.*;
|
import static org.junit.Assert.assertEquals;
|
||||||
import org.apache.hadoop.io.*;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
public class TestSequenceFileAsBinaryInputFormat {
|
||||||
import org.apache.commons.logging.*;
|
|
||||||
|
|
||||||
public class TestSequenceFileAsBinaryInputFormat extends TestCase {
|
|
||||||
private static final Log LOG = FileInputFormat.LOG;
|
private static final Log LOG = FileInputFormat.LOG;
|
||||||
private static final int RECORDS = 10000;
|
private static final int RECORDS = 10000;
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBinary() throws IOException {
|
public void testBinary() throws IOException {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
FileSystem fs = FileSystem.getLocal(job);
|
FileSystem fs = FileSystem.getLocal(job);
|
||||||
|
|
|
@ -18,24 +18,35 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.BooleanWritable;
|
||||||
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
|
import org.apache.hadoop.io.DataOutputBuffer;
|
||||||
|
import org.apache.hadoop.io.DoubleWritable;
|
||||||
|
import org.apache.hadoop.io.FloatWritable;
|
||||||
|
import org.apache.hadoop.io.IntWritable;
|
||||||
|
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.*;
|
import static org.junit.Assert.assertEquals;
|
||||||
import org.apache.hadoop.io.*;
|
import static org.junit.Assert.assertTrue;
|
||||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
public class TestSequenceFileAsBinaryOutputFormat {
|
||||||
import org.apache.commons.logging.*;
|
|
||||||
|
|
||||||
public class TestSequenceFileAsBinaryOutputFormat extends TestCase {
|
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(TestSequenceFileAsBinaryOutputFormat.class.getName());
|
LogFactory.getLog(TestSequenceFileAsBinaryOutputFormat.class.getName());
|
||||||
|
|
||||||
private static final int RECORDS = 10000;
|
private static final int RECORDS = 10000;
|
||||||
// A random task attempt id for testing.
|
// A random task attempt id for testing.
|
||||||
private static final String attempt = "attempt_200707121733_0001_m_000000_0";
|
private static final String attempt = "attempt_200707121733_0001_m_000000_0";
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBinary() throws IOException {
|
public void testBinary() throws IOException {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
FileSystem fs = FileSystem.getLocal(job);
|
FileSystem fs = FileSystem.getLocal(job);
|
||||||
|
@ -129,6 +140,7 @@ public class TestSequenceFileAsBinaryOutputFormat extends TestCase {
|
||||||
assertEquals("Some records not found", RECORDS, count);
|
assertEquals("Some records not found", RECORDS, count);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
|
@ -163,6 +175,7 @@ public class TestSequenceFileAsBinaryOutputFormat extends TestCase {
|
||||||
job));
|
job));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testcheckOutputSpecsForbidRecordCompression() throws IOException {
|
public void testcheckOutputSpecsForbidRecordCompression() throws IOException {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
FileSystem fs = FileSystem.getLocal(job);
|
FileSystem fs = FileSystem.getLocal(job);
|
||||||
|
|
|
@ -18,22 +18,29 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.*;
|
import org.apache.commons.logging.Log;
|
||||||
import java.util.*;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import junit.framework.TestCase;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.IntWritable;
|
||||||
|
import org.apache.hadoop.io.LongWritable;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.commons.logging.*;
|
import java.util.BitSet;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.*;
|
import static org.junit.Assert.assertEquals;
|
||||||
import org.apache.hadoop.io.*;
|
import static org.junit.Assert.assertFalse;
|
||||||
import org.apache.hadoop.conf.*;
|
|
||||||
|
|
||||||
public class TestSequenceFileAsTextInputFormat extends TestCase {
|
public class TestSequenceFileAsTextInputFormat {
|
||||||
private static final Log LOG = FileInputFormat.LOG;
|
private static final Log LOG = FileInputFormat.LOG;
|
||||||
|
|
||||||
private static int MAX_LENGTH = 10000;
|
private static int MAX_LENGTH = 10000;
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFormat() throws Exception {
|
public void testFormat() throws Exception {
|
||||||
JobConf job = new JobConf(conf);
|
JobConf job = new JobConf(conf);
|
||||||
FileSystem fs = FileSystem.getLocal(conf);
|
FileSystem fs = FileSystem.getLocal(conf);
|
||||||
|
@ -112,8 +119,4 @@ public class TestSequenceFileAsTextInputFormat extends TestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
new TestSequenceFileAsTextInputFormat().testFormat();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,17 +18,21 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.*;
|
import org.apache.commons.logging.Log;
|
||||||
import java.util.*;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import junit.framework.TestCase;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.commons.logging.*;
|
import java.io.IOException;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.*;
|
import static org.junit.Assert.assertEquals;
|
||||||
import org.apache.hadoop.io.*;
|
|
||||||
import org.apache.hadoop.conf.*;
|
|
||||||
|
|
||||||
public class TestSequenceFileInputFilter extends TestCase {
|
public class TestSequenceFileInputFilter {
|
||||||
private static final Log LOG = FileInputFormat.LOG;
|
private static final Log LOG = FileInputFormat.LOG;
|
||||||
|
|
||||||
private static final int MAX_LENGTH = 15000;
|
private static final int MAX_LENGTH = 15000;
|
||||||
|
@ -98,6 +102,7 @@ public class TestSequenceFileInputFilter extends TestCase {
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testRegexFilter() throws Exception {
|
public void testRegexFilter() throws Exception {
|
||||||
// set the filter class
|
// set the filter class
|
||||||
LOG.info("Testing Regex Filter with patter: \\A10*");
|
LOG.info("Testing Regex Filter with patter: \\A10*");
|
||||||
|
@ -121,6 +126,7 @@ public class TestSequenceFileInputFilter extends TestCase {
|
||||||
fs.delete(inDir, true);
|
fs.delete(inDir, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testPercentFilter() throws Exception {
|
public void testPercentFilter() throws Exception {
|
||||||
LOG.info("Testing Percent Filter with frequency: 1000");
|
LOG.info("Testing Percent Filter with frequency: 1000");
|
||||||
// set the filter class
|
// set the filter class
|
||||||
|
@ -148,6 +154,7 @@ public class TestSequenceFileInputFilter extends TestCase {
|
||||||
fs.delete(inDir, true);
|
fs.delete(inDir, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMD5Filter() throws Exception {
|
public void testMD5Filter() throws Exception {
|
||||||
// set the filter class
|
// set the filter class
|
||||||
LOG.info("Testing MD5 Filter with frequency: 1000");
|
LOG.info("Testing MD5 Filter with frequency: 1000");
|
||||||
|
@ -168,9 +175,4 @@ public class TestSequenceFileInputFilter extends TestCase {
|
||||||
// clean up
|
// clean up
|
||||||
fs.delete(inDir, true);
|
fs.delete(inDir, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
TestSequenceFileInputFilter filter = new TestSequenceFileInputFilter();
|
|
||||||
filter.testRegexFilter();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,22 +18,28 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.*;
|
import org.apache.commons.logging.Log;
|
||||||
import java.util.*;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import junit.framework.TestCase;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
|
import org.apache.hadoop.io.IntWritable;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.commons.logging.*;
|
import java.util.BitSet;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.*;
|
import static org.junit.Assert.assertEquals;
|
||||||
import org.apache.hadoop.io.*;
|
import static org.junit.Assert.assertFalse;
|
||||||
import org.apache.hadoop.conf.*;
|
|
||||||
|
|
||||||
public class TestSequenceFileInputFormat extends TestCase {
|
public class TestSequenceFileInputFormat {
|
||||||
private static final Log LOG = FileInputFormat.LOG;
|
private static final Log LOG = FileInputFormat.LOG;
|
||||||
|
|
||||||
private static int MAX_LENGTH = 10000;
|
private static int MAX_LENGTH = 10000;
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFormat() throws Exception {
|
public void testFormat() throws Exception {
|
||||||
JobConf job = new JobConf(conf);
|
JobConf job = new JobConf(conf);
|
||||||
FileSystem fs = FileSystem.getLocal(conf);
|
FileSystem fs = FileSystem.getLocal(conf);
|
||||||
|
@ -110,8 +116,4 @@ public class TestSequenceFileInputFormat extends TestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
new TestSequenceFileInputFormat().testFormat();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,18 +17,20 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.util.Iterator;
|
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.mapred.SortedRanges.Range;
|
import org.apache.hadoop.mapred.SortedRanges.Range;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestSortedRanges extends TestCase {
|
import java.util.Iterator;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
|
public class TestSortedRanges {
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(TestSortedRanges.class);
|
LogFactory.getLog(TestSortedRanges.class);
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAdd() {
|
public void testAdd() {
|
||||||
SortedRanges sr = new SortedRanges();
|
SortedRanges sr = new SortedRanges();
|
||||||
sr.add(new Range(2,9));
|
sr.add(new Range(2,9));
|
||||||
|
@ -67,6 +69,7 @@ public class TestSortedRanges extends TestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testRemove() {
|
public void testRemove() {
|
||||||
SortedRanges sr = new SortedRanges();
|
SortedRanges sr = new SortedRanges();
|
||||||
sr.add(new Range(2,19));
|
sr.add(new Range(2,19));
|
||||||
|
|
|
@ -18,12 +18,6 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.io.DataOutputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.net.URI;
|
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -34,14 +28,20 @@ import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||||
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||||
import org.apache.hadoop.mapreduce.MRConfig;
|
|
||||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
|
||||||
import org.apache.hadoop.util.Progressable;
|
import org.apache.hadoop.util.Progressable;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.DataOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URI;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795).
|
* A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795).
|
||||||
*/
|
*/
|
||||||
public class TestSpecialCharactersInOutputPath extends TestCase {
|
public class TestSpecialCharactersInOutputPath {
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(TestSpecialCharactersInOutputPath.class.getName());
|
LogFactory.getLog(TestSpecialCharactersInOutputPath.class.getName());
|
||||||
|
|
||||||
|
@ -97,6 +97,7 @@ public class TestSpecialCharactersInOutputPath extends TestCase {
|
||||||
return (runningJob.isSuccessful());
|
return (runningJob.isSuccessful());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testJobWithDFS() throws IOException {
|
public void testJobWithDFS() throws IOException {
|
||||||
String namenode = null;
|
String namenode = null;
|
||||||
MiniDFSCluster dfs = null;
|
MiniDFSCluster dfs = null;
|
||||||
|
|
|
@ -19,14 +19,18 @@ package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
|
import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
|
||||||
import org.apache.hadoop.mapred.StatisticsCollector.Stat;
|
import org.apache.hadoop.mapred.StatisticsCollector.Stat;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestStatisticsCollector extends TestCase{
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertNull;
|
||||||
|
|
||||||
|
public class TestStatisticsCollector {
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Test
|
||||||
public void testMovingWindow() throws Exception {
|
public void testMovingWindow() throws Exception {
|
||||||
StatisticsCollector collector = new StatisticsCollector(1);
|
StatisticsCollector collector = new StatisticsCollector(1);
|
||||||
TimeWindow window = new TimeWindow("test", 6, 2);
|
TimeWindow window = new TimeWindow("test", 6, 2);
|
||||||
|
|
|
@ -17,6 +17,15 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.LongWritable;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||||
|
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -26,18 +35,10 @@ import java.io.OutputStream;
|
||||||
import java.io.OutputStreamWriter;
|
import java.io.OutputStreamWriter;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.hadoop.io.LongWritable;
|
|
||||||
import org.apache.hadoop.io.Text;
|
|
||||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
|
||||||
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
|
||||||
|
|
||||||
public class TestUserDefinedCounters extends TestCase {
|
|
||||||
|
|
||||||
|
public class TestUserDefinedCounters {
|
||||||
private static String TEST_ROOT_DIR =
|
private static String TEST_ROOT_DIR =
|
||||||
new File(System.getProperty("test.build.data", "/tmp")).toURI()
|
new File(System.getProperty("test.build.data", "/tmp")).toURI()
|
||||||
.toString().replace(' ', '+')
|
.toString().replace(' ', '+')
|
||||||
|
@ -75,6 +76,7 @@ public class TestUserDefinedCounters extends TestCase {
|
||||||
wr.close();
|
wr.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMapReduceJob() throws Exception {
|
public void testMapReduceJob() throws Exception {
|
||||||
|
|
||||||
JobConf conf = new JobConf(TestUserDefinedCounters.class);
|
JobConf conf = new JobConf(TestUserDefinedCounters.class);
|
||||||
|
|
|
@ -18,12 +18,6 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.DataInputBuffer;
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
import org.apache.hadoop.io.DataOutputBuffer;
|
import org.apache.hadoop.io.DataOutputBuffer;
|
||||||
|
@ -31,8 +25,15 @@ import org.apache.hadoop.io.serializer.Deserializer;
|
||||||
import org.apache.hadoop.io.serializer.SerializationFactory;
|
import org.apache.hadoop.io.serializer.SerializationFactory;
|
||||||
import org.apache.hadoop.io.serializer.Serializer;
|
import org.apache.hadoop.io.serializer.Serializer;
|
||||||
import org.apache.hadoop.util.GenericsUtil;
|
import org.apache.hadoop.util.GenericsUtil;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestWritableJobConf extends TestCase {
|
import java.util.HashMap;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
public class TestWritableJobConf {
|
||||||
|
|
||||||
private static final Configuration CONF = new Configuration();
|
private static final Configuration CONF = new Configuration();
|
||||||
|
|
||||||
|
@ -78,15 +79,17 @@ public class TestWritableJobConf extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assertEquals(map1, map2);
|
assertTrue(map1.equals(map2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testEmptyConfiguration() throws Exception {
|
public void testEmptyConfiguration() throws Exception {
|
||||||
JobConf conf = new JobConf();
|
JobConf conf = new JobConf();
|
||||||
Configuration deser = serDeser(conf);
|
Configuration deser = serDeser(conf);
|
||||||
assertEquals(conf, deser);
|
assertEquals(conf, deser);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNonEmptyConfiguration() throws Exception {
|
public void testNonEmptyConfiguration() throws Exception {
|
||||||
JobConf conf = new JobConf();
|
JobConf conf = new JobConf();
|
||||||
conf.set("a", "A");
|
conf.set("a", "A");
|
||||||
|
@ -95,6 +98,7 @@ public class TestWritableJobConf extends TestCase {
|
||||||
assertEquals(conf, deser);
|
assertEquals(conf, deser);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConfigurationWithDefaults() throws Exception {
|
public void testConfigurationWithDefaults() throws Exception {
|
||||||
JobConf conf = new JobConf(false);
|
JobConf conf = new JobConf(false);
|
||||||
conf.set("a", "A");
|
conf.set("a", "A");
|
||||||
|
|
|
@ -18,6 +18,10 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.mockito.Matchers.any;
|
import static org.mockito.Matchers.any;
|
||||||
import static org.mockito.Mockito.doAnswer;
|
import static org.mockito.Mockito.doAnswer;
|
||||||
import static org.mockito.Mockito.doReturn;
|
import static org.mockito.Mockito.doReturn;
|
||||||
|
@ -38,8 +42,6 @@ import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -113,7 +115,7 @@ import org.mockito.stubbing.Answer;
|
||||||
* Test YarnRunner and make sure the client side plugin works
|
* Test YarnRunner and make sure the client side plugin works
|
||||||
* fine
|
* fine
|
||||||
*/
|
*/
|
||||||
public class TestYARNRunner extends TestCase {
|
public class TestYARNRunner {
|
||||||
private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
|
private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
|
||||||
private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
|
private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
|
||||||
|
|
||||||
|
|
|
@ -22,11 +22,6 @@ import java.io.DataOutput;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
|
||||||
import junit.framework.Test;
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import junit.framework.TestSuite;
|
|
||||||
import junit.extensions.TestSetup;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -54,24 +49,28 @@ import org.apache.hadoop.mapred.Utils;
|
||||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||||
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
|
||||||
public class TestDatamerge extends TestCase {
|
public class TestDatamerge {
|
||||||
|
|
||||||
private static MiniDFSCluster cluster = null;
|
private static MiniDFSCluster cluster = null;
|
||||||
public static Test suite() {
|
|
||||||
TestSetup setup = new TestSetup(new TestSuite(TestDatamerge.class)) {
|
@Before
|
||||||
protected void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||||
}
|
}
|
||||||
protected void tearDown() throws Exception {
|
@After
|
||||||
|
public void tearDown() throws Exception {
|
||||||
if (cluster != null) {
|
if (cluster != null) {
|
||||||
cluster.shutdown();
|
cluster.shutdown();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
|
||||||
return setup;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static SequenceFile.Writer[] createWriters(Path testdir,
|
private static SequenceFile.Writer[] createWriters(Path testdir,
|
||||||
Configuration conf, int srcs, Path[] src) throws IOException {
|
Configuration conf, int srcs, Path[] src) throws IOException {
|
||||||
|
@ -246,18 +245,22 @@ public class TestDatamerge extends TestCase {
|
||||||
base.getFileSystem(job).delete(base, true);
|
base.getFileSystem(job).delete(base, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSimpleInnerJoin() throws Exception {
|
public void testSimpleInnerJoin() throws Exception {
|
||||||
joinAs("inner", InnerJoinChecker.class);
|
joinAs("inner", InnerJoinChecker.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSimpleOuterJoin() throws Exception {
|
public void testSimpleOuterJoin() throws Exception {
|
||||||
joinAs("outer", OuterJoinChecker.class);
|
joinAs("outer", OuterJoinChecker.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSimpleOverride() throws Exception {
|
public void testSimpleOverride() throws Exception {
|
||||||
joinAs("override", OverrideChecker.class);
|
joinAs("override", OverrideChecker.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNestedJoin() throws Exception {
|
public void testNestedJoin() throws Exception {
|
||||||
// outer(inner(S1,...,Sn),outer(S1,...Sn))
|
// outer(inner(S1,...,Sn),outer(S1,...Sn))
|
||||||
final int SOURCES = 3;
|
final int SOURCES = 3;
|
||||||
|
@ -350,6 +353,7 @@ public class TestDatamerge extends TestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testEmptyJoin() throws Exception {
|
public void testEmptyJoin() throws Exception {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
|
Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
|
||||||
|
|
|
@ -26,8 +26,6 @@ import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.io.BooleanWritable;
|
import org.apache.hadoop.io.BooleanWritable;
|
||||||
import org.apache.hadoop.io.BytesWritable;
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
import org.apache.hadoop.io.FloatWritable;
|
import org.apache.hadoop.io.FloatWritable;
|
||||||
|
@ -36,8 +34,12 @@ import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.io.WritableUtils;
|
import org.apache.hadoop.io.WritableUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
|
||||||
public class TestTupleWritable extends TestCase {
|
public class TestTupleWritable {
|
||||||
|
|
||||||
private TupleWritable makeTuple(Writable[] writs) {
|
private TupleWritable makeTuple(Writable[] writs) {
|
||||||
Writable[] sub1 = { writs[1], writs[2] };
|
Writable[] sub1 = { writs[1], writs[2] };
|
||||||
|
@ -100,6 +102,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testIterable() throws Exception {
|
public void testIterable() throws Exception {
|
||||||
Random r = new Random();
|
Random r = new Random();
|
||||||
Writable[] writs = {
|
Writable[] writs = {
|
||||||
|
@ -121,6 +124,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
verifIter(writs, t, 0);
|
verifIter(writs, t, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNestedIterable() throws Exception {
|
public void testNestedIterable() throws Exception {
|
||||||
Random r = new Random();
|
Random r = new Random();
|
||||||
Writable[] writs = {
|
Writable[] writs = {
|
||||||
|
@ -139,6 +143,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
|
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWritable() throws Exception {
|
public void testWritable() throws Exception {
|
||||||
Random r = new Random();
|
Random r = new Random();
|
||||||
Writable[] writs = {
|
Writable[] writs = {
|
||||||
|
@ -162,6 +167,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWideWritable() throws Exception {
|
public void testWideWritable() throws Exception {
|
||||||
Writable[] manyWrits = makeRandomWritables(131);
|
Writable[] manyWrits = makeRandomWritables(131);
|
||||||
|
|
||||||
|
@ -181,6 +187,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
assertEquals("All tuple data has not been read from the stream",-1,in.read());
|
assertEquals("All tuple data has not been read from the stream",-1,in.read());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWideWritable2() throws Exception {
|
public void testWideWritable2() throws Exception {
|
||||||
Writable[] manyWrits = makeRandomWritables(71);
|
Writable[] manyWrits = makeRandomWritables(71);
|
||||||
|
|
||||||
|
@ -202,6 +209,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
* Tests a tuple writable with more than 64 values and the values set written
|
* Tests a tuple writable with more than 64 values and the values set written
|
||||||
* spread far apart.
|
* spread far apart.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testSparseWideWritable() throws Exception {
|
public void testSparseWideWritable() throws Exception {
|
||||||
Writable[] manyWrits = makeRandomWritables(131);
|
Writable[] manyWrits = makeRandomWritables(131);
|
||||||
|
|
||||||
|
@ -220,7 +228,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
||||||
assertEquals("All tuple data has not been read from the stream",-1,in.read());
|
assertEquals("All tuple data has not been read from the stream",-1,in.read());
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testWideTuple() throws Exception {
|
public void testWideTuple() throws Exception {
|
||||||
Text emptyText = new Text("Should be empty");
|
Text emptyText = new Text("Should be empty");
|
||||||
Writable[] values = new Writable[64];
|
Writable[] values = new Writable[64];
|
||||||
|
@ -240,7 +248,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testWideTuple2() throws Exception {
|
public void testWideTuple2() throws Exception {
|
||||||
Text emptyText = new Text("Should be empty");
|
Text emptyText = new Text("Should be empty");
|
||||||
Writable[] values = new Writable[64];
|
Writable[] values = new Writable[64];
|
||||||
|
@ -264,6 +272,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Tests that we can write more than 64 values.
|
* Tests that we can write more than 64 values.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testWideTupleBoundary() throws Exception {
|
public void testWideTupleBoundary() throws Exception {
|
||||||
Text emptyText = new Text("Should not be set written");
|
Text emptyText = new Text("Should not be set written");
|
||||||
Writable[] values = new Writable[65];
|
Writable[] values = new Writable[65];
|
||||||
|
@ -287,6 +296,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Tests compatibility with pre-0.21 versions of TupleWritable
|
* Tests compatibility with pre-0.21 versions of TupleWritable
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testPreVersion21Compatibility() throws Exception {
|
public void testPreVersion21Compatibility() throws Exception {
|
||||||
Writable[] manyWrits = makeRandomWritables(64);
|
Writable[] manyWrits = makeRandomWritables(64);
|
||||||
PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
|
PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
|
||||||
|
@ -304,7 +314,7 @@ public class TestTupleWritable extends TestCase {
|
||||||
assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple));
|
assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple));
|
||||||
assertEquals("All tuple data has not been read from the stream",-1,in.read());
|
assertEquals("All tuple data has not been read from the stream",-1,in.read());
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testPreVersion21CompatibilityEmptyTuple() throws Exception {
|
public void testPreVersion21CompatibilityEmptyTuple() throws Exception {
|
||||||
Writable[] manyWrits = new Writable[0];
|
Writable[] manyWrits = new Writable[0];
|
||||||
PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
|
PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
|
||||||
|
|
|
@ -21,8 +21,6 @@ import java.io.DataInput;
|
||||||
import java.io.DataOutput;
|
import java.io.DataOutput;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.NullWritable;
|
import org.apache.hadoop.io.NullWritable;
|
||||||
|
@ -35,13 +33,16 @@ import org.apache.hadoop.mapred.JobConfigurable;
|
||||||
import org.apache.hadoop.mapred.RecordReader;
|
import org.apache.hadoop.mapred.RecordReader;
|
||||||
import org.apache.hadoop.mapred.Reporter;
|
import org.apache.hadoop.mapred.Reporter;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
public class TestWrappedRecordReaderClassloader extends TestCase {
|
public class TestWrappedRecordReaderClassloader {
|
||||||
/**
|
/**
|
||||||
* Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)}
|
* Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)}
|
||||||
* is inherited by any {@link WrappedRecordReader}s created by
|
* is inherited by any {@link WrappedRecordReader}s created by
|
||||||
* {@link CompositeRecordReader}
|
* {@link CompositeRecordReader}
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testClassLoader() throws Exception {
|
public void testClassLoader() throws Exception {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
Fake_ClassLoader classLoader = new Fake_ClassLoader();
|
Fake_ClassLoader classLoader = new Fake_ClassLoader();
|
||||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.hadoop.mapred.lib;
|
||||||
import java.io.DataOutputStream;
|
import java.io.DataOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
@ -32,9 +30,12 @@ import org.apache.hadoop.mapred.Mapper;
|
||||||
import org.apache.hadoop.mapred.OutputCollector;
|
import org.apache.hadoop.mapred.OutputCollector;
|
||||||
import org.apache.hadoop.mapred.Reporter;
|
import org.apache.hadoop.mapred.Reporter;
|
||||||
import org.apache.hadoop.mapred.TextInputFormat;
|
import org.apache.hadoop.mapred.TextInputFormat;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
public class TestDelegatingInputFormat extends TestCase {
|
public class TestDelegatingInputFormat {
|
||||||
|
@Test
|
||||||
public void testSplitting() throws Exception {
|
public void testSplitting() throws Exception {
|
||||||
JobConf conf = new JobConf();
|
JobConf conf = new JobConf();
|
||||||
MiniDFSCluster dfs = null;
|
MiniDFSCluster dfs = null;
|
||||||
|
|
|
@ -20,13 +20,14 @@ package org.apache.hadoop.mapred.lib;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.*;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.*;
|
||||||
import org.apache.hadoop.mapred.*;
|
import org.apache.hadoop.mapred.*;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
public class TestLineInputFormat extends TestCase {
|
public class TestLineInputFormat {
|
||||||
private static int MAX_LENGTH = 200;
|
private static int MAX_LENGTH = 200;
|
||||||
|
|
||||||
private static JobConf defaultConf = new JobConf();
|
private static JobConf defaultConf = new JobConf();
|
||||||
|
@ -43,7 +44,7 @@ public class TestLineInputFormat extends TestCase {
|
||||||
private static Path workDir =
|
private static Path workDir =
|
||||||
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
||||||
"TestLineInputFormat");
|
"TestLineInputFormat");
|
||||||
|
@Test
|
||||||
public void testFormat() throws Exception {
|
public void testFormat() throws Exception {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
Path file = new Path(workDir, "test.txt");
|
Path file = new Path(workDir, "test.txt");
|
||||||
|
|
|
@ -36,7 +36,6 @@ import static org.junit.Assert.assertEquals;
|
||||||
* @see TestDelegatingInputFormat
|
* @see TestDelegatingInputFormat
|
||||||
*/
|
*/
|
||||||
public class TestMultipleInputs {
|
public class TestMultipleInputs {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAddInputPathWithFormat() {
|
public void testAddInputPathWithFormat() {
|
||||||
final JobConf conf = new JobConf();
|
final JobConf conf = new JobConf();
|
||||||
|
@ -49,7 +48,6 @@ public class TestMultipleInputs {
|
||||||
assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
|
assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
|
||||||
.getClass());
|
.getClass());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAddInputPathWithMapper() {
|
public void testAddInputPathWithMapper() {
|
||||||
final JobConf conf = new JobConf();
|
final JobConf conf = new JobConf();
|
||||||
|
|
|
@ -22,13 +22,14 @@ import org.apache.hadoop.io.*;
|
||||||
import org.apache.hadoop.mapred.*;
|
import org.apache.hadoop.mapred.*;
|
||||||
import org.apache.hadoop.mapred.lib.*;
|
import org.apache.hadoop.mapred.lib.*;
|
||||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.text.NumberFormat;
|
import java.text.NumberFormat;
|
||||||
|
|
||||||
public class TestAggregates extends TestCase {
|
public class TestAggregates {
|
||||||
|
|
||||||
private static NumberFormat idFormat = NumberFormat.getInstance();
|
private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||||
static {
|
static {
|
||||||
|
@ -36,7 +37,7 @@ public class TestAggregates extends TestCase {
|
||||||
idFormat.setGroupingUsed(false);
|
idFormat.setGroupingUsed(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAggregates() throws Exception {
|
public void testAggregates() throws Exception {
|
||||||
launch();
|
launch();
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,13 +19,13 @@ package org.apache.hadoop.mapred.lib.db;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.io.NullWritable;
|
import org.apache.hadoop.io.NullWritable;
|
||||||
import org.apache.hadoop.mapred.JobConf;
|
import org.apache.hadoop.mapred.JobConf;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNull;
|
||||||
|
|
||||||
public class TestConstructQuery extends TestCase {
|
public class TestConstructQuery {
|
||||||
|
|
||||||
private String[] fieldNames = new String[] { "id", "name", "value" };
|
private String[] fieldNames = new String[] { "id", "name", "value" };
|
||||||
private String[] nullFieldNames = new String[] { null, null, null };
|
private String[] nullFieldNames = new String[] { null, null, null };
|
||||||
private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);";
|
private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);";
|
||||||
|
@ -33,7 +33,7 @@ public class TestConstructQuery extends TestCase {
|
||||||
|
|
||||||
private DBOutputFormat<DBWritable, NullWritable> format
|
private DBOutputFormat<DBWritable, NullWritable> format
|
||||||
= new DBOutputFormat<DBWritable, NullWritable>();
|
= new DBOutputFormat<DBWritable, NullWritable>();
|
||||||
|
@Test
|
||||||
public void testConstructQuery() {
|
public void testConstructQuery() {
|
||||||
String actual = format.constructQuery("hadoop_output", fieldNames);
|
String actual = format.constructQuery("hadoop_output", fieldNames);
|
||||||
assertEquals(expected, actual);
|
assertEquals(expected, actual);
|
||||||
|
@ -41,7 +41,7 @@ public class TestConstructQuery extends TestCase {
|
||||||
actual = format.constructQuery("hadoop_output", nullFieldNames);
|
actual = format.constructQuery("hadoop_output", nullFieldNames);
|
||||||
assertEquals(nullExpected, actual);
|
assertEquals(nullExpected, actual);
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testSetOutput() throws IOException {
|
public void testSetOutput() throws IOException {
|
||||||
JobConf job = new JobConf();
|
JobConf job = new JobConf();
|
||||||
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
|
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
|
||||||
|
|
|
@ -44,10 +44,13 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
|
import org.junit.Test;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
@Ignore
|
@Ignore
|
||||||
public class TestPipes extends TestCase {
|
public class TestPipes {
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(TestPipes.class.getName());
|
LogFactory.getLog(TestPipes.class.getName());
|
||||||
|
|
||||||
|
@ -66,7 +69,7 @@ public class TestPipes extends TestCase {
|
||||||
fs.delete(p, true);
|
fs.delete(p, true);
|
||||||
assertFalse("output not cleaned up", fs.exists(p));
|
assertFalse("output not cleaned up", fs.exists(p));
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testPipes() throws IOException {
|
public void testPipes() throws IOException {
|
||||||
if (System.getProperty("compile.c++") == null) {
|
if (System.getProperty("compile.c++") == null) {
|
||||||
LOG.info("compile.c++ is not defined, so skipping TestPipes");
|
LOG.info("compile.c++ is not defined, so skipping TestPipes");
|
||||||
|
|
|
@ -17,36 +17,42 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapreduce;
|
package org.apache.hadoop.mapreduce;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.BufferedWriter;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.io.OutputStream;
|
|
||||||
import java.io.OutputStreamWriter;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.NullWritable;
|
import org.apache.hadoop.io.NullWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.fs.*;
|
|
||||||
import org.apache.hadoop.mapred.LocalJobRunner;
|
import org.apache.hadoop.mapred.LocalJobRunner;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
|
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.BufferedWriter;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.io.OutputStream;
|
||||||
|
import java.io.OutputStreamWriter;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stress tests for the LocalJobRunner
|
* Stress tests for the LocalJobRunner
|
||||||
*/
|
*/
|
||||||
public class TestLocalRunner extends TestCase {
|
public class TestLocalRunner {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(TestLocalRunner.class);
|
private static final Log LOG = LogFactory.getLog(TestLocalRunner.class);
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,23 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapreduce;
|
package org.apache.hadoop.mapreduce;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.LocatedFileStatus;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.RemoteIterator;
|
||||||
|
import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
|
||||||
|
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||||
|
import org.apache.hadoop.mapreduce.tools.CLI;
|
||||||
|
import org.apache.hadoop.util.ExitUtil;
|
||||||
|
import org.apache.hadoop.util.Tool;
|
||||||
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
|
@ -31,23 +48,11 @@ import java.io.PipedOutputStream;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.LocatedFileStatus;
|
import static org.junit.Assert.assertEquals;
|
||||||
import org.apache.hadoop.fs.RemoteIterator;
|
import static org.junit.Assert.assertFalse;
|
||||||
import org.codehaus.jettison.json.JSONException;
|
import static org.junit.Assert.assertNotNull;
|
||||||
import org.codehaus.jettison.json.JSONObject;
|
import static org.junit.Assert.assertTrue;
|
||||||
import org.junit.Assert;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
|
|
||||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
|
||||||
import org.apache.hadoop.mapreduce.tools.CLI;
|
|
||||||
import org.apache.hadoop.util.ExitUtil;
|
|
||||||
import org.apache.hadoop.util.Tool;
|
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
test CLI class. CLI class implemented the Tool interface.
|
test CLI class. CLI class implemented the Tool interface.
|
||||||
|
@ -103,7 +108,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
|
||||||
throw new IOException();
|
throw new IOException();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
public void testJobSubmissionSpecsAndFiles() throws Exception {
|
public void testJobSubmissionSpecsAndFiles() throws Exception {
|
||||||
Configuration conf = createJobConf();
|
Configuration conf = createJobConf();
|
||||||
Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(),
|
Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(),
|
||||||
|
@ -127,7 +132,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
|
||||||
/**
|
/**
|
||||||
* main test method
|
* main test method
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testJobClient() throws Exception {
|
public void testJobClient() throws Exception {
|
||||||
Configuration conf = createJobConf();
|
Configuration conf = createJobConf();
|
||||||
Job job = runJob(conf);
|
Job job = runJob(conf);
|
||||||
|
@ -180,8 +185,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
|
||||||
|
|
||||||
runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out);
|
runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out);
|
||||||
String answer = new String(out.toByteArray(), "UTF-8");
|
String answer = new String(out.toByteArray(), "UTF-8");
|
||||||
Assert
|
assertTrue(answer.contains("Killed task " + taid + " by failing it"));
|
||||||
.assertTrue(answer.contains("Killed task " + taid + " by failing it"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -199,7 +203,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
|
||||||
|
|
||||||
runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out);
|
runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out);
|
||||||
String answer = new String(out.toByteArray(), "UTF-8");
|
String answer = new String(out.toByteArray(), "UTF-8");
|
||||||
Assert.assertTrue(answer.contains("Killed task " + taid));
|
assertTrue(answer.contains("Killed task " + taid));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -686,6 +690,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
|
||||||
* Test -list option displays job name.
|
* Test -list option displays job name.
|
||||||
* The name is capped to 20 characters for display.
|
* The name is capped to 20 characters for display.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testJobName() throws Exception {
|
public void testJobName() throws Exception {
|
||||||
Configuration conf = createJobConf();
|
Configuration conf = createJobConf();
|
||||||
CLI jc = createJobClient();
|
CLI jc = createJobClient();
|
||||||
|
|
|
@ -25,8 +25,6 @@ import java.io.Writer;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
@ -42,13 +40,16 @@ import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A JUnit test to test the Map-Reduce framework's feature to create part
|
* A JUnit test to test the Map-Reduce framework's feature to create part
|
||||||
* files only if there is an explicit output.collect. This helps in preventing
|
* files only if there is an explicit output.collect. This helps in preventing
|
||||||
* 0 byte files
|
* 0 byte files
|
||||||
*/
|
*/
|
||||||
public class TestMapReduceLazyOutput extends TestCase {
|
public class TestMapReduceLazyOutput {
|
||||||
private static final int NUM_HADOOP_SLAVES = 3;
|
private static final int NUM_HADOOP_SLAVES = 3;
|
||||||
private static final int NUM_MAPS_PER_NODE = 2;
|
private static final int NUM_MAPS_PER_NODE = 2;
|
||||||
private static final Path INPUT = new Path("/testlazy/input");
|
private static final Path INPUT = new Path("/testlazy/input");
|
||||||
|
@ -122,7 +123,7 @@ public class TestMapReduceLazyOutput extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testLazyOutput() throws Exception {
|
public void testLazyOutput() throws Exception {
|
||||||
MiniDFSCluster dfs = null;
|
MiniDFSCluster dfs = null;
|
||||||
MiniMRCluster mr = null;
|
MiniMRCluster mr = null;
|
||||||
|
|
|
@ -27,8 +27,6 @@ import java.io.Writer;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.StringTokenizer;
|
import java.util.StringTokenizer;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -43,12 +41,15 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A JUnit test to test the Map-Reduce framework's support for the
|
* A JUnit test to test the Map-Reduce framework's support for the
|
||||||
* "mark-reset" functionality in Reduce Values Iterator
|
* "mark-reset" functionality in Reduce Values Iterator
|
||||||
*/
|
*/
|
||||||
public class TestValueIterReset extends TestCase {
|
public class TestValueIterReset {
|
||||||
private static final int NUM_MAPS = 1;
|
private static final int NUM_MAPS = 1;
|
||||||
private static final int NUM_TESTS = 4;
|
private static final int NUM_TESTS = 4;
|
||||||
private static final int NUM_VALUES = 40;
|
private static final int NUM_VALUES = 40;
|
||||||
|
@ -518,6 +519,7 @@ public class TestValueIterReset extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testValueIterReset() {
|
public void testValueIterReset() {
|
||||||
try {
|
try {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapreduce;
|
package org.apache.hadoop.mapreduce;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.mockito.Matchers.any;
|
import static org.mockito.Matchers.any;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
@ -26,7 +27,6 @@ import static org.mockito.Mockito.doNothing;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
@ -44,8 +44,7 @@ import org.apache.hadoop.yarn.factories.RecordFactory;
|
||||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestYarnClientProtocolProvider extends TestCase {
|
public class TestYarnClientProtocolProvider {
|
||||||
|
|
||||||
private static final RecordFactory recordFactory = RecordFactoryProvider.
|
private static final RecordFactory recordFactory = RecordFactoryProvider.
|
||||||
getRecordFactory(null);
|
getRecordFactory(null);
|
||||||
|
|
||||||
|
|
|
@ -18,22 +18,24 @@
|
||||||
package org.apache.hadoop.mapreduce.lib.aggregate;
|
package org.apache.hadoop.mapreduce.lib.aggregate;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.mapred.Utils;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapreduce.Job;
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import java.io.*;
|
|
||||||
import java.text.NumberFormat;
|
import java.text.NumberFormat;
|
||||||
|
|
||||||
public class TestMapReduceAggregates extends TestCase {
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
public class TestMapReduceAggregates {
|
||||||
|
|
||||||
private static NumberFormat idFormat = NumberFormat.getInstance();
|
private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||||
static {
|
static {
|
||||||
|
@ -41,7 +43,7 @@ public class TestMapReduceAggregates extends TestCase {
|
||||||
idFormat.setGroupingUsed(false);
|
idFormat.setGroupingUsed(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAggregates() throws Exception {
|
public void testAggregates() throws Exception {
|
||||||
launch();
|
launch();
|
||||||
}
|
}
|
||||||
|
@ -122,11 +124,4 @@ public class TestMapReduceAggregates extends TestCase {
|
||||||
fs.delete(OUTPUT_DIR, true);
|
fs.delete(OUTPUT_DIR, true);
|
||||||
fs.delete(INPUT_DIR, true);
|
fs.delete(INPUT_DIR, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Launches all the tasks in order.
|
|
||||||
*/
|
|
||||||
public static void main(String[] argv) throws Exception {
|
|
||||||
launch();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,14 +19,15 @@ package org.apache.hadoop.mapreduce.lib.db;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.NullWritable;
|
import org.apache.hadoop.io.NullWritable;
|
||||||
import org.apache.hadoop.mapreduce.Job;
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestDBOutputFormat extends TestCase {
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNull;
|
||||||
|
|
||||||
|
public class TestDBOutputFormat {
|
||||||
private String[] fieldNames = new String[] { "id", "name", "value" };
|
private String[] fieldNames = new String[] { "id", "name", "value" };
|
||||||
private String[] nullFieldNames = new String[] { null, null, null };
|
private String[] nullFieldNames = new String[] { null, null, null };
|
||||||
private String expected = "INSERT INTO hadoop_output " +
|
private String expected = "INSERT INTO hadoop_output " +
|
||||||
|
@ -36,6 +37,7 @@ public class TestDBOutputFormat extends TestCase {
|
||||||
private DBOutputFormat<DBWritable, NullWritable> format
|
private DBOutputFormat<DBWritable, NullWritable> format
|
||||||
= new DBOutputFormat<DBWritable, NullWritable>();
|
= new DBOutputFormat<DBWritable, NullWritable>();
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConstructQuery() {
|
public void testConstructQuery() {
|
||||||
String actual = format.constructQuery("hadoop_output", fieldNames);
|
String actual = format.constructQuery("hadoop_output", fieldNames);
|
||||||
assertEquals(expected, actual);
|
assertEquals(expected, actual);
|
||||||
|
@ -44,6 +46,7 @@ public class TestDBOutputFormat extends TestCase {
|
||||||
assertEquals(nullExpected, actual);
|
assertEquals(nullExpected, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSetOutput() throws IOException {
|
public void testSetOutput() throws IOException {
|
||||||
Job job = Job.getInstance(new Configuration());
|
Job job = Job.getInstance(new Configuration());
|
||||||
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
|
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
|
||||||
|
|
|
@ -17,15 +17,15 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapreduce.lib.db;
|
package org.apache.hadoop.mapreduce.lib.db;
|
||||||
|
|
||||||
import java.io.IOException;
|
import org.junit.Test;
|
||||||
import java.math.BigDecimal;
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
public class TestIntegerSplitter extends TestCase {
|
public class TestIntegerSplitter {
|
||||||
private long [] toLongArray(List<Long> in) {
|
private long [] toLongArray(List<Long> in) {
|
||||||
long [] out = new long[in.size()];
|
long [] out = new long[in.size()];
|
||||||
for (int i = 0; i < in.size(); i++) {
|
for (int i = 0; i < in.size(); i++) {
|
||||||
|
@ -70,12 +70,14 @@ public class TestIntegerSplitter extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testEvenSplits() throws SQLException {
|
public void testEvenSplits() throws SQLException {
|
||||||
List<Long> splits = new IntegerSplitter().split(10, 0, 100);
|
List<Long> splits = new IntegerSplitter().split(10, 0, 100);
|
||||||
long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 };
|
long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 };
|
||||||
assertLongArrayEquals(expected, toLongArray(splits));
|
assertLongArrayEquals(expected, toLongArray(splits));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testOddSplits() throws SQLException {
|
public void testOddSplits() throws SQLException {
|
||||||
List<Long> splits = new IntegerSplitter().split(10, 0, 95);
|
List<Long> splits = new IntegerSplitter().split(10, 0, 95);
|
||||||
long [] expected = { 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 95 };
|
long [] expected = { 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 95 };
|
||||||
|
@ -83,12 +85,14 @@ public class TestIntegerSplitter extends TestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSingletonSplit() throws SQLException {
|
public void testSingletonSplit() throws SQLException {
|
||||||
List<Long> splits = new IntegerSplitter().split(1, 5, 5);
|
List<Long> splits = new IntegerSplitter().split(1, 5, 5);
|
||||||
long [] expected = { 5, 5 };
|
long [] expected = { 5, 5 };
|
||||||
assertLongArrayEquals(expected, toLongArray(splits));
|
assertLongArrayEquals(expected, toLongArray(splits));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSingletonSplit2() throws SQLException {
|
public void testSingletonSplit2() throws SQLException {
|
||||||
// Same test, but overly-high numSplits
|
// Same test, but overly-high numSplits
|
||||||
List<Long> splits = new IntegerSplitter().split(5, 5, 5);
|
List<Long> splits = new IntegerSplitter().split(5, 5, 5);
|
||||||
|
@ -96,6 +100,7 @@ public class TestIntegerSplitter extends TestCase {
|
||||||
assertLongArrayEquals(expected, toLongArray(splits));
|
assertLongArrayEquals(expected, toLongArray(splits));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testTooManySplits() throws SQLException {
|
public void testTooManySplits() throws SQLException {
|
||||||
List<Long> splits = new IntegerSplitter().split(5, 3, 5);
|
List<Long> splits = new IntegerSplitter().split(5, 3, 5);
|
||||||
long [] expected = { 3, 4, 5 };
|
long [] expected = { 3, 4, 5 };
|
||||||
|
|
|
@ -17,15 +17,16 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapreduce.lib.db;
|
package org.apache.hadoop.mapreduce.lib.db;
|
||||||
|
|
||||||
import java.io.IOException;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
public class TestTextSplitter extends TestCase {
|
public class TestTextSplitter {
|
||||||
|
|
||||||
public String formatArray(Object [] ar) {
|
public String formatArray(Object [] ar) {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
|
@ -62,48 +63,56 @@ public class TestTextSplitter extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testStringConvertEmpty() {
|
public void testStringConvertEmpty() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
BigDecimal emptyBigDec = splitter.stringToBigDecimal("");
|
BigDecimal emptyBigDec = splitter.stringToBigDecimal("");
|
||||||
assertEquals(BigDecimal.ZERO, emptyBigDec);
|
assertEquals(BigDecimal.ZERO, emptyBigDec);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBigDecConvertEmpty() {
|
public void testBigDecConvertEmpty() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO);
|
String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO);
|
||||||
assertEquals("", emptyStr);
|
assertEquals("", emptyStr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertA() {
|
public void testConvertA() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A"));
|
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A"));
|
||||||
assertEquals("A", out);
|
assertEquals("A", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertZ() {
|
public void testConvertZ() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z"));
|
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z"));
|
||||||
assertEquals("Z", out);
|
assertEquals("Z", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertThreeChars() {
|
public void testConvertThreeChars() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("abc"));
|
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("abc"));
|
||||||
assertEquals("abc", out);
|
assertEquals("abc", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertStr() {
|
public void testConvertStr() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("big str"));
|
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("big str"));
|
||||||
assertEquals("big str", out);
|
assertEquals("big str", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertChomped() {
|
public void testConvertChomped() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("AVeryLongStringIndeed"));
|
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("AVeryLongStringIndeed"));
|
||||||
assertEquals("AVeryLon", out);
|
assertEquals("AVeryLon", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAlphabetSplit() throws SQLException {
|
public void testAlphabetSplit() throws SQLException {
|
||||||
// This should give us 25 splits, one per letter.
|
// This should give us 25 splits, one per letter.
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
|
@ -113,6 +122,7 @@ public class TestTextSplitter extends TestCase {
|
||||||
assertArrayEquals(expected, splits.toArray(new String [0]));
|
assertArrayEquals(expected, splits.toArray(new String [0]));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testCommonPrefix() throws SQLException {
|
public void testCommonPrefix() throws SQLException {
|
||||||
// Splits between 'Hand' and 'Hardy'
|
// Splits between 'Hand' and 'Hardy'
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
|
|
|
@ -18,15 +18,19 @@
|
||||||
package org.apache.hadoop.mapreduce.lib.fieldsel;
|
package org.apache.hadoop.mapreduce.lib.fieldsel;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapreduce.Job;
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import java.text.NumberFormat;
|
import java.text.NumberFormat;
|
||||||
|
|
||||||
public class TestMRFieldSelection extends TestCase {
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
public class TestMRFieldSelection {
|
||||||
|
|
||||||
private static NumberFormat idFormat = NumberFormat.getInstance();
|
private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||||
static {
|
static {
|
||||||
|
@ -34,6 +38,7 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||||
idFormat.setGroupingUsed(false);
|
idFormat.setGroupingUsed(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFieldSelection() throws Exception {
|
public void testFieldSelection() throws Exception {
|
||||||
launch();
|
launch();
|
||||||
}
|
}
|
||||||
|
@ -114,11 +119,4 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||||
System.out.println("ExpectedData:");
|
System.out.println("ExpectedData:");
|
||||||
System.out.println(expectedOutput.toString());
|
System.out.println(expectedOutput.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Launches all the tasks in order.
|
|
||||||
*/
|
|
||||||
public static void main(String[] argv) throws Exception {
|
|
||||||
launch();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,11 +18,12 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapreduce.lib.input;
|
package org.apache.hadoop.mapreduce.lib.input;
|
||||||
|
|
||||||
import java.io.IOException;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import java.util.Random;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapreduce.InputFormat;
|
import org.apache.hadoop.mapreduce.InputFormat;
|
||||||
import org.apache.hadoop.mapreduce.InputSplit;
|
import org.apache.hadoop.mapreduce.InputSplit;
|
||||||
import org.apache.hadoop.mapreduce.Job;
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
|
@ -31,12 +32,18 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
import org.apache.hadoop.mapreduce.RecordReader;
|
import org.apache.hadoop.mapreduce.RecordReader;
|
||||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import java.io.IOException;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
public class TestMRSequenceFileAsBinaryInputFormat extends TestCase {
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
public class TestMRSequenceFileAsBinaryInputFormat {
|
||||||
private static final int RECORDS = 10000;
|
private static final int RECORDS = 10000;
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBinary() throws IOException, InterruptedException {
|
public void testBinary() throws IOException, InterruptedException {
|
||||||
Job job = Job.getInstance();
|
Job job = Job.getInstance();
|
||||||
FileSystem fs = FileSystem.getLocal(job.getConfiguration());
|
FileSystem fs = FileSystem.getLocal(job.getConfiguration());
|
||||||
|
|
|
@ -18,11 +18,13 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapreduce.lib.input;
|
package org.apache.hadoop.mapreduce.lib.input;
|
||||||
|
|
||||||
import java.util.*;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import junit.framework.TestCase;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.io.IntWritable;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapreduce.InputFormat;
|
import org.apache.hadoop.mapreduce.InputFormat;
|
||||||
import org.apache.hadoop.mapreduce.InputSplit;
|
import org.apache.hadoop.mapreduce.InputSplit;
|
||||||
import org.apache.hadoop.mapreduce.Job;
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
|
@ -31,12 +33,19 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
import org.apache.hadoop.mapreduce.RecordReader;
|
import org.apache.hadoop.mapreduce.RecordReader;
|
||||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||||
import org.apache.hadoop.conf.*;
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestMRSequenceFileAsTextInputFormat extends TestCase {
|
import java.util.BitSet;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
|
||||||
|
public class TestMRSequenceFileAsTextInputFormat {
|
||||||
private static int MAX_LENGTH = 10000;
|
private static int MAX_LENGTH = 10000;
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFormat() throws Exception {
|
public void testFormat() throws Exception {
|
||||||
Job job = Job.getInstance(conf);
|
Job job = Job.getInstance(conf);
|
||||||
FileSystem fs = FileSystem.getLocal(conf);
|
FileSystem fs = FileSystem.getLocal(conf);
|
||||||
|
@ -112,8 +121,4 @@ public class TestMRSequenceFileAsTextInputFormat extends TestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
new TestMRSequenceFileAsTextInputFormat().testFormat();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,14 +18,14 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapreduce.lib.input;
|
package org.apache.hadoop.mapreduce.lib.input;
|
||||||
|
|
||||||
import java.io.*;
|
import org.apache.commons.logging.Log;
|
||||||
import java.util.*;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import junit.framework.TestCase;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.commons.logging.*;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapreduce.InputFormat;
|
import org.apache.hadoop.mapreduce.InputFormat;
|
||||||
import org.apache.hadoop.mapreduce.InputSplit;
|
import org.apache.hadoop.mapreduce.InputSplit;
|
||||||
import org.apache.hadoop.mapreduce.Job;
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
|
@ -34,9 +34,14 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
import org.apache.hadoop.mapreduce.RecordReader;
|
import org.apache.hadoop.mapreduce.RecordReader;
|
||||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||||
import org.apache.hadoop.conf.*;
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestMRSequenceFileInputFilter extends TestCase {
|
import java.io.IOException;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
|
public class TestMRSequenceFileInputFilter {
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(TestMRSequenceFileInputFilter.class.getName());
|
LogFactory.getLog(TestMRSequenceFileInputFilter.class.getName());
|
||||||
|
|
||||||
|
@ -114,6 +119,7 @@ public class TestMRSequenceFileInputFilter extends TestCase {
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testRegexFilter() throws Exception {
|
public void testRegexFilter() throws Exception {
|
||||||
// set the filter class
|
// set the filter class
|
||||||
LOG.info("Testing Regex Filter with patter: \\A10*");
|
LOG.info("Testing Regex Filter with patter: \\A10*");
|
||||||
|
@ -138,6 +144,7 @@ public class TestMRSequenceFileInputFilter extends TestCase {
|
||||||
fs.delete(inDir, true);
|
fs.delete(inDir, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testPercentFilter() throws Exception {
|
public void testPercentFilter() throws Exception {
|
||||||
LOG.info("Testing Percent Filter with frequency: 1000");
|
LOG.info("Testing Percent Filter with frequency: 1000");
|
||||||
// set the filter class
|
// set the filter class
|
||||||
|
@ -166,6 +173,7 @@ public class TestMRSequenceFileInputFilter extends TestCase {
|
||||||
fs.delete(inDir, true);
|
fs.delete(inDir, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMD5Filter() throws Exception {
|
public void testMD5Filter() throws Exception {
|
||||||
// set the filter class
|
// set the filter class
|
||||||
LOG.info("Testing MD5 Filter with frequency: 1000");
|
LOG.info("Testing MD5 Filter with frequency: 1000");
|
||||||
|
@ -187,9 +195,4 @@ public class TestMRSequenceFileInputFilter extends TestCase {
|
||||||
// clean up
|
// clean up
|
||||||
fs.delete(inDir, true);
|
fs.delete(inDir, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
TestMRSequenceFileInputFilter filter = new TestMRSequenceFileInputFilter();
|
|
||||||
filter.testRegexFilter();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,17 +18,28 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapreduce.lib.input;
|
package org.apache.hadoop.mapreduce.lib.input;
|
||||||
|
|
||||||
import java.io.*;
|
|
||||||
import java.util.*;
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.mapreduce.*;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.mapreduce.InputSplit;
|
||||||
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.MapContext;
|
||||||
|
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
|
import org.apache.hadoop.mapreduce.RecordReader;
|
||||||
|
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestNLineInputFormat extends TestCase {
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStreamWriter;
|
||||||
|
import java.io.Writer;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
|
public class TestNLineInputFormat {
|
||||||
private static int MAX_LENGTH = 200;
|
private static int MAX_LENGTH = 200;
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
|
@ -46,6 +57,7 @@ public class TestNLineInputFormat extends TestCase {
|
||||||
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
||||||
"TestNLineInputFormat");
|
"TestNLineInputFormat");
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testFormat() throws Exception {
|
public void testFormat() throws Exception {
|
||||||
Job job = Job.getInstance(conf);
|
Job job = Job.getInstance(conf);
|
||||||
Path file = new Path(workDir, "test.txt");
|
Path file = new Path(workDir, "test.txt");
|
||||||
|
@ -116,8 +128,4 @@ public class TestNLineInputFormat extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
new TestNLineInputFormat().testFormat();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,11 +19,6 @@ package org.apache.hadoop.mapreduce.lib.join;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import junit.framework.Test;
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import junit.framework.TestSuite;
|
|
||||||
import junit.extensions.TestSetup;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -37,24 +32,32 @@ import org.apache.hadoop.mapreduce.*;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestJoinDatamerge extends TestCase {
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
public class TestJoinDatamerge {
|
||||||
|
|
||||||
private static MiniDFSCluster cluster = null;
|
private static MiniDFSCluster cluster = null;
|
||||||
public static Test suite() {
|
|
||||||
TestSetup setup = new TestSetup(new TestSuite(TestJoinDatamerge.class)) {
|
@BeforeClass
|
||||||
protected void setUp() throws Exception {
|
public static void setUp() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||||
}
|
}
|
||||||
protected void tearDown() throws Exception {
|
|
||||||
|
@AfterClass
|
||||||
|
public static void tearDown() throws Exception {
|
||||||
if (cluster != null) {
|
if (cluster != null) {
|
||||||
cluster.shutdown();
|
cluster.shutdown();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
|
||||||
return setup;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static SequenceFile.Writer[] createWriters(Path testdir,
|
private static SequenceFile.Writer[] createWriters(Path testdir,
|
||||||
Configuration conf, int srcs, Path[] src) throws IOException {
|
Configuration conf, int srcs, Path[] src) throws IOException {
|
||||||
|
@ -272,10 +275,12 @@ public class TestJoinDatamerge extends TestCase {
|
||||||
base.getFileSystem(conf).delete(base, true);
|
base.getFileSystem(conf).delete(base, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSimpleInnerJoin() throws Exception {
|
public void testSimpleInnerJoin() throws Exception {
|
||||||
joinAs("inner", InnerJoinMapChecker.class, InnerJoinReduceChecker.class);
|
joinAs("inner", InnerJoinMapChecker.class, InnerJoinReduceChecker.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSimpleOuterJoin() throws Exception {
|
public void testSimpleOuterJoin() throws Exception {
|
||||||
joinAs("outer", OuterJoinMapChecker.class, OuterJoinReduceChecker.class);
|
joinAs("outer", OuterJoinMapChecker.class, OuterJoinReduceChecker.class);
|
||||||
}
|
}
|
||||||
|
@ -323,10 +328,12 @@ public class TestJoinDatamerge extends TestCase {
|
||||||
return product;
|
return product;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSimpleOverride() throws Exception {
|
public void testSimpleOverride() throws Exception {
|
||||||
joinAs("override", OverrideMapChecker.class, OverrideReduceChecker.class);
|
joinAs("override", OverrideMapChecker.class, OverrideReduceChecker.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNestedJoin() throws Exception {
|
public void testNestedJoin() throws Exception {
|
||||||
// outer(inner(S1,...,Sn),outer(S1,...Sn))
|
// outer(inner(S1,...,Sn),outer(S1,...Sn))
|
||||||
final int SOURCES = 3;
|
final int SOURCES = 3;
|
||||||
|
@ -422,6 +429,7 @@ public class TestJoinDatamerge extends TestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testEmptyJoin() throws Exception {
|
public void testEmptyJoin() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
|
Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
|
||||||
|
|
|
@ -20,11 +20,6 @@ package org.apache.hadoop.mapreduce.lib.join;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import junit.framework.Test;
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import junit.framework.TestSuite;
|
|
||||||
import junit.extensions.TestSetup;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
@ -36,8 +31,14 @@ import org.apache.hadoop.io.WritableComparable;
|
||||||
import org.apache.hadoop.mapreduce.*;
|
import org.apache.hadoop.mapreduce.*;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestJoinProperties extends TestCase {
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
public class TestJoinProperties {
|
||||||
|
|
||||||
private static MiniDFSCluster cluster = null;
|
private static MiniDFSCluster cluster = null;
|
||||||
final static int SOURCES = 3;
|
final static int SOURCES = 3;
|
||||||
|
@ -46,22 +47,20 @@ public class TestJoinProperties extends TestCase {
|
||||||
static Path[] src;
|
static Path[] src;
|
||||||
static Path base;
|
static Path base;
|
||||||
|
|
||||||
public static Test suite() {
|
@BeforeClass
|
||||||
TestSetup setup = new TestSetup(new TestSuite(TestJoinProperties.class)) {
|
public static void setUp() throws Exception {
|
||||||
protected void setUp() throws Exception {
|
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||||
base = cluster.getFileSystem().makeQualified(new Path("/nested"));
|
base = cluster.getFileSystem().makeQualified(new Path("/nested"));
|
||||||
src = generateSources(conf);
|
src = generateSources(conf);
|
||||||
}
|
}
|
||||||
protected void tearDown() throws Exception {
|
|
||||||
|
@AfterClass
|
||||||
|
public static void tearDown() throws Exception {
|
||||||
if (cluster != null) {
|
if (cluster != null) {
|
||||||
cluster.shutdown();
|
cluster.shutdown();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
|
||||||
return setup;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sources from 0 to srcs-2 have IntWritable key and IntWritable value
|
// Sources from 0 to srcs-2 have IntWritable key and IntWritable value
|
||||||
// src-1 source has IntWritable key and LongWritable value.
|
// src-1 source has IntWritable key and LongWritable value.
|
||||||
|
@ -233,6 +232,7 @@ public class TestJoinProperties extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// outer(outer(A, B), C) == outer(A,outer(B, C)) == outer(A, B, C)
|
// outer(outer(A, B), C) == outer(A,outer(B, C)) == outer(A, B, C)
|
||||||
|
@Test
|
||||||
public void testOuterAssociativity() throws Exception {
|
public void testOuterAssociativity() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
testExpr1(conf, "outer", TestType.OUTER_ASSOCIATIVITY, 33);
|
testExpr1(conf, "outer", TestType.OUTER_ASSOCIATIVITY, 33);
|
||||||
|
@ -241,6 +241,7 @@ public class TestJoinProperties extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// inner(inner(A, B), C) == inner(A,inner(B, C)) == inner(A, B, C)
|
// inner(inner(A, B), C) == inner(A,inner(B, C)) == inner(A, B, C)
|
||||||
|
@Test
|
||||||
public void testInnerAssociativity() throws Exception {
|
public void testInnerAssociativity() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
testExpr1(conf, "inner", TestType.INNER_ASSOCIATIVITY, 2);
|
testExpr1(conf, "inner", TestType.INNER_ASSOCIATIVITY, 2);
|
||||||
|
@ -249,6 +250,7 @@ public class TestJoinProperties extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// override(inner(A, B), A) == A
|
// override(inner(A, B), A) == A
|
||||||
|
@Test
|
||||||
public void testIdentity() throws Exception {
|
public void testIdentity() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
testExpr4(conf);
|
testExpr4(conf);
|
||||||
|
|
|
@ -24,8 +24,6 @@ import java.io.DataOutputStream;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.io.BooleanWritable;
|
import org.apache.hadoop.io.BooleanWritable;
|
||||||
import org.apache.hadoop.io.BytesWritable;
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
import org.apache.hadoop.io.FloatWritable;
|
import org.apache.hadoop.io.FloatWritable;
|
||||||
|
@ -33,8 +31,13 @@ import org.apache.hadoop.io.IntWritable;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestJoinTupleWritable extends TestCase {
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
public class TestJoinTupleWritable {
|
||||||
|
|
||||||
private TupleWritable makeTuple(Writable[] writs) {
|
private TupleWritable makeTuple(Writable[] writs) {
|
||||||
Writable[] sub1 = { writs[1], writs[2] };
|
Writable[] sub1 = { writs[1], writs[2] };
|
||||||
|
@ -97,6 +100,7 @@ public class TestJoinTupleWritable extends TestCase {
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testIterable() throws Exception {
|
public void testIterable() throws Exception {
|
||||||
Random r = new Random();
|
Random r = new Random();
|
||||||
Writable[] writs = {
|
Writable[] writs = {
|
||||||
|
@ -118,6 +122,7 @@ public class TestJoinTupleWritable extends TestCase {
|
||||||
verifIter(writs, t, 0);
|
verifIter(writs, t, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNestedIterable() throws Exception {
|
public void testNestedIterable() throws Exception {
|
||||||
Random r = new Random();
|
Random r = new Random();
|
||||||
Writable[] writs = {
|
Writable[] writs = {
|
||||||
|
@ -136,6 +141,7 @@ public class TestJoinTupleWritable extends TestCase {
|
||||||
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
|
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWritable() throws Exception {
|
public void testWritable() throws Exception {
|
||||||
Random r = new Random();
|
Random r = new Random();
|
||||||
Writable[] writs = {
|
Writable[] writs = {
|
||||||
|
@ -159,6 +165,7 @@ public class TestJoinTupleWritable extends TestCase {
|
||||||
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWideWritable() throws Exception {
|
public void testWideWritable() throws Exception {
|
||||||
Writable[] manyWrits = makeRandomWritables(131);
|
Writable[] manyWrits = makeRandomWritables(131);
|
||||||
|
|
||||||
|
@ -179,6 +186,7 @@ public class TestJoinTupleWritable extends TestCase {
|
||||||
-1, in.read());
|
-1, in.read());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWideWritable2() throws Exception {
|
public void testWideWritable2() throws Exception {
|
||||||
Writable[] manyWrits = makeRandomWritables(71);
|
Writable[] manyWrits = makeRandomWritables(71);
|
||||||
|
|
||||||
|
@ -201,6 +209,7 @@ public class TestJoinTupleWritable extends TestCase {
|
||||||
* Tests a tuple writable with more than 64 values and the values set written
|
* Tests a tuple writable with more than 64 values and the values set written
|
||||||
* spread far apart.
|
* spread far apart.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testSparseWideWritable() throws Exception {
|
public void testSparseWideWritable() throws Exception {
|
||||||
Writable[] manyWrits = makeRandomWritables(131);
|
Writable[] manyWrits = makeRandomWritables(131);
|
||||||
|
|
||||||
|
@ -221,6 +230,7 @@ public class TestJoinTupleWritable extends TestCase {
|
||||||
-1, in.read());
|
-1, in.read());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWideTuple() throws Exception {
|
public void testWideTuple() throws Exception {
|
||||||
Text emptyText = new Text("Should be empty");
|
Text emptyText = new Text("Should be empty");
|
||||||
Writable[] values = new Writable[64];
|
Writable[] values = new Writable[64];
|
||||||
|
@ -242,6 +252,7 @@ public class TestJoinTupleWritable extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testWideTuple2() throws Exception {
|
public void testWideTuple2() throws Exception {
|
||||||
Text emptyText = new Text("Should be empty");
|
Text emptyText = new Text("Should be empty");
|
||||||
Writable[] values = new Writable[64];
|
Writable[] values = new Writable[64];
|
||||||
|
@ -266,6 +277,7 @@ public class TestJoinTupleWritable extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Tests that we can write more than 64 values.
|
* Tests that we can write more than 64 values.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testWideTupleBoundary() throws Exception {
|
public void testWideTupleBoundary() throws Exception {
|
||||||
Text emptyText = new Text("Should not be set written");
|
Text emptyText = new Text("Should not be set written");
|
||||||
Writable[] values = new Writable[65];
|
Writable[] values = new Writable[65];
|
||||||
|
|
|
@ -17,23 +17,32 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapreduce.lib.join;
|
package org.apache.hadoop.mapreduce.lib.join;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.NullWritable;
|
import org.apache.hadoop.io.NullWritable;
|
||||||
import org.apache.hadoop.mapreduce.*;
|
import org.apache.hadoop.mapreduce.InputSplit;
|
||||||
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
|
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR;
|
import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR;
|
||||||
|
import org.apache.hadoop.mapreduce.RecordReader;
|
||||||
|
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||||
|
import org.apache.hadoop.mapreduce.TaskAttemptID;
|
||||||
|
import org.apache.hadoop.mapreduce.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
|
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestWrappedRRClassloader extends TestCase {
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
public class TestWrappedRRClassloader {
|
||||||
/**
|
/**
|
||||||
* Tests the class loader set by
|
* Tests the class loader set by
|
||||||
* {@link Configuration#setClassLoader(ClassLoader)}
|
* {@link Configuration#setClassLoader(ClassLoader)}
|
||||||
* is inherited by any {@link WrappedRecordReader}s created by
|
* is inherited by any {@link WrappedRecordReader}s created by
|
||||||
* {@link CompositeRecordReader}
|
* {@link CompositeRecordReader}
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testClassLoader() throws Exception {
|
public void testClassLoader() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
Fake_ClassLoader classLoader = new Fake_ClassLoader();
|
Fake_ClassLoader classLoader = new Fake_ClassLoader();
|
||||||
|
|
|
@ -18,12 +18,17 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapreduce.lib.output;
|
package org.apache.hadoop.mapreduce.lib.output;
|
||||||
|
|
||||||
import java.io.IOException;
|
import org.apache.commons.logging.Log;
|
||||||
import java.util.Random;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.BooleanWritable;
|
||||||
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
|
import org.apache.hadoop.io.DataOutputBuffer;
|
||||||
|
import org.apache.hadoop.io.DoubleWritable;
|
||||||
|
import org.apache.hadoop.io.FloatWritable;
|
||||||
|
import org.apache.hadoop.io.IntWritable;
|
||||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||||
import org.apache.hadoop.mapred.InvalidJobConfException;
|
import org.apache.hadoop.mapred.InvalidJobConfException;
|
||||||
import org.apache.hadoop.mapreduce.InputFormat;
|
import org.apache.hadoop.mapreduce.InputFormat;
|
||||||
|
@ -38,16 +43,22 @@ import org.apache.hadoop.mapreduce.RecordWriter;
|
||||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import java.io.IOException;
|
||||||
import org.apache.commons.logging.*;
|
import java.util.Random;
|
||||||
|
|
||||||
public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase {
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
public class TestMRSequenceFileAsBinaryOutputFormat {
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName());
|
LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName());
|
||||||
|
|
||||||
private static final int RECORDS = 10000;
|
private static final int RECORDS = 10000;
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBinary() throws IOException, InterruptedException {
|
public void testBinary() throws IOException, InterruptedException {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
Job job = Job.getInstance(conf);
|
Job job = Job.getInstance(conf);
|
||||||
|
@ -144,6 +155,7 @@ public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase {
|
||||||
assertEquals("Some records not found", RECORDS, count);
|
assertEquals("Some records not found", RECORDS, count);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Job job = Job.getInstance();
|
Job job = Job.getInstance();
|
||||||
|
@ -172,6 +184,7 @@ public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase {
|
||||||
SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job));
|
SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testcheckOutputSpecsForbidRecordCompression()
|
public void testcheckOutputSpecsForbidRecordCompression()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Job job = Job.getInstance();
|
Job job = Job.getInstance();
|
||||||
|
|
|
@ -22,11 +22,14 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.BinaryComparable;
|
import org.apache.hadoop.io.BinaryComparable;
|
||||||
import org.apache.hadoop.io.BytesWritable;
|
import org.apache.hadoop.io.BytesWritable;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
public class TestBinaryPartitioner extends TestCase {
|
public class TestBinaryPartitioner {
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDefaultOffsets() {
|
public void testDefaultOffsets() {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
BinaryPartitioner<?> partitioner =
|
BinaryPartitioner<?> partitioner =
|
||||||
|
@ -51,6 +54,7 @@ public class TestBinaryPartitioner extends TestCase {
|
||||||
assertTrue(partition1 != partition2);
|
assertTrue(partition1 != partition2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testCustomOffsets() {
|
public void testCustomOffsets() {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 });
|
BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 });
|
||||||
|
@ -76,6 +80,7 @@ public class TestBinaryPartitioner extends TestCase {
|
||||||
assertEquals(partition1, partition2);
|
assertEquals(partition1, partition2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testLowerBound() {
|
public void testLowerBound() {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
BinaryPartitioner.setLeftOffset(conf, 0);
|
BinaryPartitioner.setLeftOffset(conf, 0);
|
||||||
|
@ -88,6 +93,7 @@ public class TestBinaryPartitioner extends TestCase {
|
||||||
assertTrue(partition1 != partition2);
|
assertTrue(partition1 != partition2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testUpperBound() {
|
public void testUpperBound() {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
BinaryPartitioner.setRightOffset(conf, 4);
|
BinaryPartitioner.setRightOffset(conf, 4);
|
||||||
|
|
|
@ -19,14 +19,17 @@ package org.apache.hadoop.mapreduce.lib.partition;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
public class TestKeyFieldHelper extends TestCase {
|
public class TestKeyFieldHelper {
|
||||||
private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class);
|
private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class);
|
||||||
/**
|
/**
|
||||||
* Test is key-field-helper's parse option.
|
* Test is key-field-helper's parse option.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testparseOption() throws Exception {
|
public void testparseOption() throws Exception {
|
||||||
KeyFieldHelper helper = new KeyFieldHelper();
|
KeyFieldHelper helper = new KeyFieldHelper();
|
||||||
helper.setKeyFieldSeparator("\t");
|
helper.setKeyFieldSeparator("\t");
|
||||||
|
@ -212,6 +215,7 @@ public class TestKeyFieldHelper extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Test is key-field-helper's getWordLengths.
|
* Test is key-field-helper's getWordLengths.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testGetWordLengths() throws Exception {
|
public void testGetWordLengths() throws Exception {
|
||||||
KeyFieldHelper helper = new KeyFieldHelper();
|
KeyFieldHelper helper = new KeyFieldHelper();
|
||||||
helper.setKeyFieldSeparator("\t");
|
helper.setKeyFieldSeparator("\t");
|
||||||
|
@ -270,6 +274,7 @@ public class TestKeyFieldHelper extends TestCase {
|
||||||
/**
|
/**
|
||||||
* Test is key-field-helper's getStartOffset/getEndOffset.
|
* Test is key-field-helper's getStartOffset/getEndOffset.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testgetStartEndOffset() throws Exception {
|
public void testgetStartEndOffset() throws Exception {
|
||||||
KeyFieldHelper helper = new KeyFieldHelper();
|
KeyFieldHelper helper = new KeyFieldHelper();
|
||||||
helper.setKeyFieldSeparator("\t");
|
helper.setKeyFieldSeparator("\t");
|
||||||
|
|
|
@ -19,14 +19,16 @@ package org.apache.hadoop.mapreduce.lib.partition;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
public class TestMRKeyFieldBasedPartitioner extends TestCase {
|
public class TestMRKeyFieldBasedPartitioner {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test is key-field-based partitioned works with empty key.
|
* Test is key-field-based partitioned works with empty key.
|
||||||
*/
|
*/
|
||||||
|
@Test
|
||||||
public void testEmptyKey() throws Exception {
|
public void testEmptyKey() throws Exception {
|
||||||
int numReducers = 10;
|
int numReducers = 10;
|
||||||
KeyFieldBasedPartitioner<Text, Text> kfbp =
|
KeyFieldBasedPartitioner<Text, Text> kfbp =
|
||||||
|
|
|
@ -23,8 +23,6 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
@ -41,8 +39,11 @@ import org.apache.hadoop.io.serializer.JavaSerializationComparator;
|
||||||
import org.apache.hadoop.io.serializer.Serialization;
|
import org.apache.hadoop.io.serializer.Serialization;
|
||||||
import org.apache.hadoop.io.serializer.WritableSerialization;
|
import org.apache.hadoop.io.serializer.WritableSerialization;
|
||||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestTotalOrderPartitioner extends TestCase {
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
|
public class TestTotalOrderPartitioner {
|
||||||
|
|
||||||
private static final Text[] splitStrings = new Text[] {
|
private static final Text[] splitStrings = new Text[] {
|
||||||
// -inf // 0
|
// -inf // 0
|
||||||
|
@ -140,6 +141,7 @@ public class TestTotalOrderPartitioner extends TestCase {
|
||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testTotalOrderWithCustomSerialization() throws Exception {
|
public void testTotalOrderWithCustomSerialization() throws Exception {
|
||||||
TotalOrderPartitioner<String, NullWritable> partitioner =
|
TotalOrderPartitioner<String, NullWritable> partitioner =
|
||||||
new TotalOrderPartitioner<String, NullWritable>();
|
new TotalOrderPartitioner<String, NullWritable>();
|
||||||
|
@ -165,6 +167,7 @@ public class TestTotalOrderPartitioner extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testTotalOrderMemCmp() throws Exception {
|
public void testTotalOrderMemCmp() throws Exception {
|
||||||
TotalOrderPartitioner<Text,NullWritable> partitioner =
|
TotalOrderPartitioner<Text,NullWritable> partitioner =
|
||||||
new TotalOrderPartitioner<Text,NullWritable>();
|
new TotalOrderPartitioner<Text,NullWritable>();
|
||||||
|
@ -184,6 +187,7 @@ public class TestTotalOrderPartitioner extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testTotalOrderBinarySearch() throws Exception {
|
public void testTotalOrderBinarySearch() throws Exception {
|
||||||
TotalOrderPartitioner<Text,NullWritable> partitioner =
|
TotalOrderPartitioner<Text,NullWritable> partitioner =
|
||||||
new TotalOrderPartitioner<Text,NullWritable>();
|
new TotalOrderPartitioner<Text,NullWritable>();
|
||||||
|
@ -216,6 +220,7 @@ public class TestTotalOrderPartitioner extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testTotalOrderCustomComparator() throws Exception {
|
public void testTotalOrderCustomComparator() throws Exception {
|
||||||
TotalOrderPartitioner<Text,NullWritable> partitioner =
|
TotalOrderPartitioner<Text,NullWritable> partitioner =
|
||||||
new TotalOrderPartitioner<Text,NullWritable>();
|
new TotalOrderPartitioner<Text,NullWritable>();
|
||||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.hadoop.mapreduce.util;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
@ -30,20 +28,27 @@ import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
|
import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
|
||||||
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A test for MRAsyncDiskService.
|
* A test for MRAsyncDiskService.
|
||||||
*/
|
*/
|
||||||
public class TestMRAsyncDiskService extends TestCase {
|
public class TestMRAsyncDiskService {
|
||||||
|
|
||||||
public static final Log LOG = LogFactory.getLog(TestMRAsyncDiskService.class);
|
public static final Log LOG = LogFactory.getLog(TestMRAsyncDiskService.class);
|
||||||
|
|
||||||
private static String TEST_ROOT_DIR = new Path(System.getProperty(
|
private static String TEST_ROOT_DIR = new Path(System.getProperty(
|
||||||
"test.build.data", "/tmp")).toString();
|
"test.build.data", "/tmp")).toString();
|
||||||
|
|
||||||
@Override
|
@Before
|
||||||
protected void setUp() {
|
public void setUp() {
|
||||||
FileUtil.fullyDelete(new File(TEST_ROOT_DIR));
|
FileUtil.fullyDelete(new File(TEST_ROOT_DIR));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapreduce.v2;
|
package org.apache.hadoop.mapreduce.v2;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
|
@ -29,22 +28,25 @@ import org.apache.hadoop.mapred.MiniMRCluster;
|
||||||
import org.apache.hadoop.mapred.RunningJob;
|
import org.apache.hadoop.mapred.RunningJob;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.authorize.ProxyUsers;
|
import org.apache.hadoop.security.authorize.ProxyUsers;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.net.InetAddress;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileOutputStream;
|
|
||||||
import java.io.OutputStream;
|
|
||||||
import java.io.OutputStreamWriter;
|
import java.io.OutputStreamWriter;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
import java.net.InetAddress;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
|
||||||
public class TestMiniMRProxyUser extends TestCase {
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
public class TestMiniMRProxyUser {
|
||||||
|
|
||||||
private MiniDFSCluster dfsCluster = null;
|
private MiniDFSCluster dfsCluster = null;
|
||||||
private MiniMRCluster mrCluster = null;
|
private MiniMRCluster mrCluster = null;
|
||||||
|
|
||||||
protected void setUp() throws Exception {
|
@Before
|
||||||
super.setUp();
|
public void setUp() throws Exception {
|
||||||
if (System.getProperty("hadoop.log.dir") == null) {
|
if (System.getProperty("hadoop.log.dir") == null) {
|
||||||
System.setProperty("hadoop.log.dir", "/tmp");
|
System.setProperty("hadoop.log.dir", "/tmp");
|
||||||
}
|
}
|
||||||
|
@ -91,15 +93,14 @@ public class TestMiniMRProxyUser extends TestCase {
|
||||||
return mrCluster.createJobConf();
|
return mrCluster.createJobConf();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@After
|
||||||
protected void tearDown() throws Exception {
|
public void tearDown() throws Exception {
|
||||||
if (mrCluster != null) {
|
if (mrCluster != null) {
|
||||||
mrCluster.shutdown();
|
mrCluster.shutdown();
|
||||||
}
|
}
|
||||||
if (dfsCluster != null) {
|
if (dfsCluster != null) {
|
||||||
dfsCluster.shutdown();
|
dfsCluster.shutdown();
|
||||||
}
|
}
|
||||||
super.tearDown();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void mrRun() throws Exception {
|
private void mrRun() throws Exception {
|
||||||
|
@ -126,10 +127,12 @@ public class TestMiniMRProxyUser extends TestCase {
|
||||||
assertTrue(runJob.isSuccessful());
|
assertTrue(runJob.isSuccessful());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void __testCurrentUser() throws Exception {
|
public void __testCurrentUser() throws Exception {
|
||||||
mrRun();
|
mrRun();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testValidProxyUser() throws Exception {
|
public void testValidProxyUser() throws Exception {
|
||||||
UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", UserGroupInformation.getLoginUser());
|
UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", UserGroupInformation.getLoginUser());
|
||||||
ugi.doAs(new PrivilegedExceptionAction<Void>() {
|
ugi.doAs(new PrivilegedExceptionAction<Void>() {
|
||||||
|
@ -142,6 +145,7 @@ public class TestMiniMRProxyUser extends TestCase {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void ___testInvalidProxyUser() throws Exception {
|
public void ___testInvalidProxyUser() throws Exception {
|
||||||
UserGroupInformation ugi = UserGroupInformation.createProxyUser("u2", UserGroupInformation.getLoginUser());
|
UserGroupInformation ugi = UserGroupInformation.createProxyUser("u2", UserGroupInformation.getLoginUser());
|
||||||
ugi.doAs(new PrivilegedExceptionAction<Void>() {
|
ugi.doAs(new PrivilegedExceptionAction<Void>() {
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapreduce.v2;
|
package org.apache.hadoop.mapreduce.v2;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
|
@ -28,17 +27,22 @@ import org.apache.hadoop.mapred.JobID;
|
||||||
import org.apache.hadoop.mapred.MiniMRCluster;
|
import org.apache.hadoop.mapred.MiniMRCluster;
|
||||||
import org.apache.hadoop.mapred.RunningJob;
|
import org.apache.hadoop.mapred.RunningJob;
|
||||||
import org.apache.hadoop.security.authorize.ProxyUsers;
|
import org.apache.hadoop.security.authorize.ProxyUsers;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
|
|
||||||
public class TestNonExistentJob extends TestCase {
|
import static org.junit.Assert.assertNull;
|
||||||
|
|
||||||
|
public class TestNonExistentJob {
|
||||||
|
|
||||||
private MiniDFSCluster dfsCluster = null;
|
private MiniDFSCluster dfsCluster = null;
|
||||||
private MiniMRCluster mrCluster = null;
|
private MiniMRCluster mrCluster = null;
|
||||||
|
|
||||||
protected void setUp() throws Exception {
|
@Before
|
||||||
super.setUp();
|
public void setUp() throws Exception {
|
||||||
if (System.getProperty("hadoop.log.dir") == null) {
|
if (System.getProperty("hadoop.log.dir") == null) {
|
||||||
System.setProperty("hadoop.log.dir", "/tmp");
|
System.setProperty("hadoop.log.dir", "/tmp");
|
||||||
}
|
}
|
||||||
|
@ -78,17 +82,17 @@ public class TestNonExistentJob extends TestCase {
|
||||||
return mrCluster.createJobConf();
|
return mrCluster.createJobConf();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@After
|
||||||
protected void tearDown() throws Exception {
|
public void tearDown() throws Exception {
|
||||||
if (mrCluster != null) {
|
if (mrCluster != null) {
|
||||||
mrCluster.shutdown();
|
mrCluster.shutdown();
|
||||||
}
|
}
|
||||||
if (dfsCluster != null) {
|
if (dfsCluster != null) {
|
||||||
dfsCluster.shutdown();
|
dfsCluster.shutdown();
|
||||||
}
|
}
|
||||||
super.tearDown();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetInvalidJob() throws Exception {
|
public void testGetInvalidJob() throws Exception {
|
||||||
RunningJob runJob = new JobClient(getJobConf()).getJob(JobID.forName("job_0_0"));
|
RunningJob runJob = new JobClient(getJobConf()).getJob(JobID.forName("job_0_0"));
|
||||||
assertNull(runJob);
|
assertNull(runJob);
|
||||||
|
|
|
@ -42,6 +42,11 @@ import org.apache.hadoop.mapred.RunningJob;
|
||||||
import org.apache.hadoop.mapred.SkipBadRecords;
|
import org.apache.hadoop.mapred.SkipBadRecords;
|
||||||
import org.apache.hadoop.mapred.Utils;
|
import org.apache.hadoop.mapred.Utils;
|
||||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
public class TestStreamingBadRecords extends ClusterMapReduceTestCase
|
public class TestStreamingBadRecords extends ClusterMapReduceTestCase
|
||||||
{
|
{
|
||||||
|
@ -68,7 +73,8 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
|
||||||
utilTest.redirectIfAntJunit();
|
utilTest.redirectIfAntJunit();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void setUp() throws Exception {
|
@Before
|
||||||
|
public void setUp() throws Exception {
|
||||||
Properties props = new Properties();
|
Properties props = new Properties();
|
||||||
props.setProperty(JTConfig.JT_RETIREJOBS, "false");
|
props.setProperty(JTConfig.JT_RETIREJOBS, "false");
|
||||||
props.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "false");
|
props.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "false");
|
||||||
|
@ -242,6 +248,7 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNoOp() {
|
public void testNoOp() {
|
||||||
// Added to avoid warnings when running this disabled test
|
// Added to avoid warnings when running this disabled test
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue