diff --git a/BUILDING.txt b/BUILDING.txt index 408cae13055..c7a91da5d0d 100644 --- a/BUILDING.txt +++ b/BUILDING.txt @@ -75,6 +75,7 @@ Optional packages: $ sudo apt-get install snappy libsnappy-dev * Intel ISA-L library for erasure coding Please refer to https://01.org/intel%C2%AE-storage-acceleration-library-open-source-version + (OR https://github.com/01org/isa-l) * Bzip2 $ sudo apt-get install bzip2 libbz2-dev * Jansson (C Library for JSON) @@ -188,11 +189,12 @@ Maven build goals: Intel ISA-L build options: - Intel ISA-L is a erasure coding library that can be utilized by the native code. + Intel ISA-L is an erasure coding library that can be utilized by the native code. It is currently an optional component, meaning that Hadoop can be built with or without this dependency. Note the library is used via dynamic module. Please reference the official site for the library details. https://01.org/intel%C2%AE-storage-acceleration-library-open-source-version + (OR https://github.com/01org/isa-l) * Use -Drequire.isal to fail the build if libisal.so is not found. If this option is not specified and the isal library is missing, diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java index 4bdc80826aa..5c93fd37374 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java @@ -61,9 +61,9 @@ import java.util.*; *
  • [#PREFIX#.]type: simple|kerberos|#CLASS#, 'simple' is short for the * {@link PseudoAuthenticationHandler}, 'kerberos' is short for {@link KerberosAuthenticationHandler}, otherwise * the full class name of the {@link AuthenticationHandler} must be specified.
  • - *
  • [#PREFIX#.]signature.secret: when signer.secret.provider is set to - * "string" or not specified, this is the value for the secret used to sign the - * HTTP cookie.
  • + *
  • [#PREFIX#.]signature.secret.file: when signer.secret.provider is set to + * "file" or not specified, this is the location of file including the secret + * used to sign the HTTP cookie.
  • *
  • [#PREFIX#.]token.validity: time -in seconds- that the generated token is * valid before a new authentication is triggered, default value is * 3600 seconds. This is also used for the rollover interval for @@ -79,17 +79,16 @@ import java.util.*; *

    *

    * Out of the box it provides 3 signer secret provider implementations: - * "string", "random", and "zookeeper" + * "file", "random" and "zookeeper" *

    * Additional signer secret providers are supported via the * {@link SignerSecretProvider} class. *

    * For the HTTP cookies mentioned above, the SignerSecretProvider is used to * determine the secret to use for signing the cookies. Different - * implementations can have different behaviors. The "string" implementation - * simply uses the string set in the [#PREFIX#.]signature.secret property - * mentioned above. The "random" implementation uses a randomly generated - * secret that rolls over at the interval specified by the + * implementations can have different behaviors. The "file" implementation + * loads the secret from a specified file. The "random" implementation uses a + * randomly generated secret that rolls over at the interval specified by the * [#PREFIX#.]token.validity mentioned above. The "zookeeper" implementation * is like the "random" one, except that it synchronizes the random secret * and rollovers between multiple servers; it's meant for HA services. @@ -97,12 +96,12 @@ import java.util.*; * The relevant configuration properties are: *

    */ @Ignore -public class DFSCIOTest extends TestCase { +public class DFSCIOTest { // Constants private static final Log LOG = LogFactory.getLog(DFSCIOTest.class); private static final int TEST_TYPE_READ = 0; @@ -98,6 +97,7 @@ public class DFSCIOTest extends TestCase { * * @throws Exception */ + @Test public void testIOs() throws Exception { testIOs(10, 10); } @@ -450,7 +450,7 @@ public class DFSCIOTest extends TestCase { } //Copy the executables over to the remote filesystem - String hadoopHome = System.getenv("HADOOP_PREFIX"); + String hadoopHome = System.getenv("HADOOP_HOME"); fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/libhdfs.so." + HDFS_LIB_VERSION), HDFS_SHLIB); fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/hdfs_read"), HDFS_READ); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java index 4146b139c50..f0300b368a6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java @@ -34,8 +34,6 @@ import java.util.HashMap; import java.net.InetSocketAddress; import java.net.URI; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; @@ -50,8 +48,15 @@ import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapred.lib.LongSumReducer; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.fail; -public class TestFileSystem extends TestCase { + +public class TestFileSystem { private static final Log LOG = FileSystem.LOG; private static Configuration conf = new Configuration(); @@ -66,6 +71,7 @@ public class TestFileSystem extends TestCase { private static Path READ_DIR = new Path(ROOT, "fs_read"); private static Path DATA_DIR = new Path(ROOT, "fs_data"); + @Test public void testFs() throws Exception { testFs(10 * MEGA, 100, 0); } @@ -90,7 +96,8 @@ public class TestFileSystem extends TestCase { fs.delete(READ_DIR, true); } - public static void testCommandFormat() throws Exception { + @Test + public void testCommandFormat() throws Exception { // This should go to TestFsShell.java when it is added. CommandFormat cf; cf= new CommandFormat("copyToLocal", 2,2,"crc","ignoreCrc"); @@ -488,6 +495,7 @@ public class TestFileSystem extends TestCase { } } + @Test public void testFsCache() throws Exception { { long now = System.currentTimeMillis(); @@ -561,6 +569,7 @@ public class TestFileSystem extends TestCase { + StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort())); } + @Test public void testFsClose() throws Exception { { Configuration conf = new Configuration(); @@ -569,6 +578,7 @@ public class TestFileSystem extends TestCase { } } + @Test public void testFsShutdownHook() throws Exception { final Set closed = Collections.synchronizedSet(new HashSet()); Configuration conf = new Configuration(); @@ -600,7 +610,7 @@ public class TestFileSystem extends TestCase { assertTrue(closed.contains(fsWithoutAuto)); } - + @Test public void testCacheKeysAreCaseInsensitive() throws Exception { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java index f2bc4edc46d..31950fd6104 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java @@ -23,19 +23,18 @@ import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.File; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.junit.After; import org.junit.Before; +import org.junit.Test; /** * Test Job History Log Analyzer. * * @see JHLogAnalyzer */ -public class TestJHLA extends TestCase { +public class TestJHLA { private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class); private String historyLog = System.getProperty("test.build.data", "build/test/data") + "/history/test.log"; @@ -133,6 +132,7 @@ public class TestJHLA extends TestCase { /** * Run log analyzer in test mode for file test.log. */ + @Test public void testJHLA() { String[] args = {"-test", historyLog, "-jobDelimiter", ".!!FILE=.*!!"}; JHLogAnalyzer.main(args); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java index 96c4710b48c..ee3cc00fd1f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java @@ -25,7 +25,6 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintStream; -import java.net.InetAddress; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Iterator; @@ -33,6 +32,7 @@ import java.util.StringTokenizer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FSDataInputStream; @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile.CompressionType; +import org.apache.hadoop.io.SequenceFile.Writer; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; @@ -54,6 +55,8 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reducer; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.SequenceFileInputFormat; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; /** * This program executes a specified operation that applies load to @@ -74,49 +77,48 @@ import org.apache.hadoop.mapred.SequenceFileInputFormat; * must be run before running the other operations. */ -public class NNBench { +public class NNBench extends Configured implements Tool { private static final Log LOG = LogFactory.getLog( "org.apache.hadoop.hdfs.NNBench"); - protected static String CONTROL_DIR_NAME = "control"; - protected static String OUTPUT_DIR_NAME = "output"; - protected static String DATA_DIR_NAME = "data"; - protected static final String DEFAULT_RES_FILE_NAME = "NNBench_results.log"; - protected static final String NNBENCH_VERSION = "NameNode Benchmark 0.4"; - - public static String operation = "none"; - public static long numberOfMaps = 1l; // default is 1 - public static long numberOfReduces = 1l; // default is 1 - public static long startTime = + private static String CONTROL_DIR_NAME = "control"; + private static String OUTPUT_DIR_NAME = "output"; + private static String DATA_DIR_NAME = "data"; + static final String DEFAULT_RES_FILE_NAME = "NNBench_results.log"; + private static final String NNBENCH_VERSION = "NameNode Benchmark 0.4"; + + private String operation = "none"; + private long numberOfMaps = 1l; // default is 1 + private long numberOfReduces = 1l; // default is 1 + private long startTime = System.currentTimeMillis() + (120 * 1000); // default is 'now' + 2min - public static long blockSize = 1l; // default is 1 - public static int bytesToWrite = 0; // default is 0 - public static long bytesPerChecksum = 1l; // default is 1 - public static long numberOfFiles = 1l; // default is 1 - public static short replicationFactorPerFile = 1; // default is 1 - public static String baseDir = "/benchmarks/NNBench"; // default - public static boolean readFileAfterOpen = false; // default is to not read - + private long blockSize = 1l; // default is 1 + private int bytesToWrite = 0; // default is 0 + private long bytesPerChecksum = 1l; // default is 1 + private long numberOfFiles = 1l; // default is 1 + private short replicationFactorPerFile = 1; // default is 1 + private String baseDir = "/benchmarks/NNBench"; // default + private boolean readFileAfterOpen = false; // default is to not read + private boolean isHelpMessage = false; // Supported operations private static final String OP_CREATE_WRITE = "create_write"; private static final String OP_OPEN_READ = "open_read"; private static final String OP_RENAME = "rename"; private static final String OP_DELETE = "delete"; + private static final int MAX_OPERATION_EXCEPTIONS = 1000; // To display in the format that matches the NN and DN log format // Example: 2007-10-26 00:01:19,853 static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd' 'HH:mm:ss','S"); - - private static Configuration config = new Configuration(); /** * Clean up the files before a test run * * @throws IOException on error */ - private static void cleanupBeforeTestrun() throws IOException { - FileSystem tempFS = FileSystem.get(config); + private void cleanupBeforeTestrun() throws IOException { + FileSystem tempFS = FileSystem.get(getConf()); // Delete the data directory only if it is the create/write operation if (operation.equals(OP_CREATE_WRITE)) { @@ -133,8 +135,7 @@ public class NNBench { * * @throws IOException on error */ - private static void createControlFiles() throws IOException { - FileSystem tempFS = FileSystem.get(config); + private void createControlFiles() throws IOException { LOG.info("Creating " + numberOfMaps + " control files"); for (int i = 0; i < numberOfMaps; i++) { @@ -144,8 +145,9 @@ public class NNBench { SequenceFile.Writer writer = null; try { - writer = SequenceFile.createWriter(tempFS, config, filePath, Text.class, - LongWritable.class, CompressionType.NONE); + writer = SequenceFile.createWriter(getConf(), Writer.file(filePath), + Writer.keyClass(Text.class), Writer.valueClass(LongWritable.class), + Writer.compression(CompressionType.NONE)); writer.append(new Text(strFileName), new LongWritable(i)); } finally { if (writer != null) { @@ -208,23 +210,23 @@ public class NNBench { * line's arguments * @param length total number of arguments */ - public static void checkArgs(final int index, final int length) { + private static void checkArgs(final int index, final int length) { if (index == length) { displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException("Not enough arguments"); } } /** * Parse input arguments + * @param args array of command line's parameters to be parsed * - * @param args array of command line's parameters to be parsed */ - public static void parseInputs(final String[] args) { + private void parseInputs(final String[] args) { // If there are no command line arguments, exit if (args.length == 0) { displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException("Give valid inputs"); } // Parse command line args @@ -263,7 +265,7 @@ public class NNBench { readFileAfterOpen = Boolean.parseBoolean(args[++i]); } else if (args[i].equals("-help")) { displayUsage(); - System.exit(-1); + isHelpMessage = true; } } @@ -281,31 +283,30 @@ public class NNBench { LOG.info(" Read file after open: " + readFileAfterOpen); // Set user-defined parameters, so the map method can access the values - config.set("test.nnbench.operation", operation); - config.setLong("test.nnbench.maps", numberOfMaps); - config.setLong("test.nnbench.reduces", numberOfReduces); - config.setLong("test.nnbench.starttime", startTime); - config.setLong("test.nnbench.blocksize", blockSize); - config.setInt("test.nnbench.bytestowrite", bytesToWrite); - config.setLong("test.nnbench.bytesperchecksum", bytesPerChecksum); - config.setLong("test.nnbench.numberoffiles", numberOfFiles); - config.setInt("test.nnbench.replicationfactor", + getConf().set("test.nnbench.operation", operation); + getConf().setLong("test.nnbench.maps", numberOfMaps); + getConf().setLong("test.nnbench.reduces", numberOfReduces); + getConf().setLong("test.nnbench.starttime", startTime); + getConf().setLong("test.nnbench.blocksize", blockSize); + getConf().setInt("test.nnbench.bytestowrite", bytesToWrite); + getConf().setLong("test.nnbench.bytesperchecksum", bytesPerChecksum); + getConf().setLong("test.nnbench.numberoffiles", numberOfFiles); + getConf().setInt("test.nnbench.replicationfactor", (int) replicationFactorPerFile); - config.set("test.nnbench.basedir", baseDir); - config.setBoolean("test.nnbench.readFileAfterOpen", readFileAfterOpen); + getConf().set("test.nnbench.basedir", baseDir); + getConf().setBoolean("test.nnbench.readFileAfterOpen", readFileAfterOpen); - config.set("test.nnbench.datadir.name", DATA_DIR_NAME); - config.set("test.nnbench.outputdir.name", OUTPUT_DIR_NAME); - config.set("test.nnbench.controldir.name", CONTROL_DIR_NAME); + getConf().set("test.nnbench.datadir.name", DATA_DIR_NAME); + getConf().set("test.nnbench.outputdir.name", OUTPUT_DIR_NAME); + getConf().set("test.nnbench.controldir.name", CONTROL_DIR_NAME); } /** * Analyze the results - * * @throws IOException on error */ - private static void analyzeResults() throws IOException { - final FileSystem fs = FileSystem.get(config); + private int analyzeResults() throws IOException { + final FileSystem fs = FileSystem.get(getConf()); Path reduceDir = new Path(baseDir, OUTPUT_DIR_NAME); long totalTimeAL1 = 0l; @@ -322,32 +323,31 @@ public class NNBench { for (FileStatus status : fss) { Path reduceFile = status.getPath(); - DataInputStream in; - in = new DataInputStream(fs.open(reduceFile)); + try (DataInputStream in = new DataInputStream(fs.open(reduceFile)); + BufferedReader lines = + new BufferedReader(new InputStreamReader(in))) { - BufferedReader lines; - lines = new BufferedReader(new InputStreamReader(in)); - - String line; - while ((line = lines.readLine()) != null) { - StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%;"); - String attr = tokens.nextToken(); - if (attr.endsWith(":totalTimeAL1")) { - totalTimeAL1 = Long.parseLong(tokens.nextToken()); - } else if (attr.endsWith(":totalTimeAL2")) { - totalTimeAL2 = Long.parseLong(tokens.nextToken()); - } else if (attr.endsWith(":totalTimeTPmS")) { - totalTimeTPmS = Long.parseLong(tokens.nextToken()); - } else if (attr.endsWith(":latemaps")) { - lateMaps = Long.parseLong(tokens.nextToken()); - } else if (attr.endsWith(":numOfExceptions")) { - numOfExceptions = Long.parseLong(tokens.nextToken()); - } else if (attr.endsWith(":successfulFileOps")) { - successfulFileOps = Long.parseLong(tokens.nextToken()); - } else if (attr.endsWith(":mapStartTimeTPmS")) { - mapStartTimeTPmS = Long.parseLong(tokens.nextToken()); - } else if (attr.endsWith(":mapEndTimeTPmS")) { - mapEndTimeTPmS = Long.parseLong(tokens.nextToken()); + String line; + while ((line = lines.readLine()) != null) { + StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%;"); + String attr = tokens.nextToken(); + if (attr.endsWith(":totalTimeAL1")) { + totalTimeAL1 = Long.parseLong(tokens.nextToken()); + } else if (attr.endsWith(":totalTimeAL2")) { + totalTimeAL2 = Long.parseLong(tokens.nextToken()); + } else if (attr.endsWith(":totalTimeTPmS")) { + totalTimeTPmS = Long.parseLong(tokens.nextToken()); + } else if (attr.endsWith(":latemaps")) { + lateMaps = Long.parseLong(tokens.nextToken()); + } else if (attr.endsWith(":numOfExceptions")) { + numOfExceptions = Long.parseLong(tokens.nextToken()); + } else if (attr.endsWith(":successfulFileOps")) { + successfulFileOps = Long.parseLong(tokens.nextToken()); + } else if (attr.endsWith(":mapStartTimeTPmS")) { + mapStartTimeTPmS = Long.parseLong(tokens.nextToken()); + } else if (attr.endsWith(":mapEndTimeTPmS")) { + mapEndTimeTPmS = Long.parseLong(tokens.nextToken()); + } } } } @@ -444,25 +444,29 @@ public class NNBench { " RAW DATA: # of exceptions: " + numOfExceptions, "" }; - PrintStream res = new PrintStream(new FileOutputStream( - new File(DEFAULT_RES_FILE_NAME), true)); - - // Write to a file and also dump to log - for(int i = 0; i < resultLines.length; i++) { - LOG.info(resultLines[i]); - res.println(resultLines[i]); + try (PrintStream res = new PrintStream( + new FileOutputStream(new File(DEFAULT_RES_FILE_NAME), true))) { + // Write to a file and also dump to log + for (String resultLine : resultLines) { + LOG.info(resultLine); + res.println(resultLine); + } } + if(numOfExceptions >= MAX_OPERATION_EXCEPTIONS){ + return -1; + } + return 0; } - + /** * Run the test * * @throws IOException on error */ - public static void runTests() throws IOException { - config.setLong("io.bytes.per.checksum", bytesPerChecksum); + private void runTests() throws IOException { + getConf().setLong("io.bytes.per.checksum", bytesPerChecksum); - JobConf job = new JobConf(config, NNBench.class); + JobConf job = new JobConf(getConf(), NNBench.class); job.setJobName("NNBench-" + operation); FileInputFormat.setInputPaths(job, new Path(baseDir, CONTROL_DIR_NAME)); @@ -487,7 +491,7 @@ public class NNBench { /** * Validate the inputs */ - public static void validateInputs() { + private void validateInputs() { // If it is not one of the four operations, then fail if (!operation.equals(OP_CREATE_WRITE) && !operation.equals(OP_OPEN_READ) && @@ -495,7 +499,8 @@ public class NNBench { !operation.equals(OP_DELETE)) { System.err.println("Error: Unknown operation: " + operation); displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException( + "Error: Unknown operation: " + operation); } // If number of maps is a negative number, then fail @@ -503,57 +508,66 @@ public class NNBench { if (numberOfMaps < 0) { System.err.println("Error: Number of maps must be a positive number"); displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException( + "Error: Number of maps must be a positive number"); } // If number of reduces is a negative number or 0, then fail if (numberOfReduces <= 0) { System.err.println("Error: Number of reduces must be a positive number"); displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException( + "Error: Number of reduces must be a positive number"); } // If blocksize is a negative number or 0, then fail if (blockSize <= 0) { System.err.println("Error: Block size must be a positive number"); displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException( + "Error: Block size must be a positive number"); } // If bytes to write is a negative number, then fail if (bytesToWrite < 0) { System.err.println("Error: Bytes to write must be a positive number"); displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException( + "Error: Bytes to write must be a positive number"); } // If bytes per checksum is a negative number, then fail if (bytesPerChecksum < 0) { System.err.println("Error: Bytes per checksum must be a positive number"); displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException( + "Error: Bytes per checksum must be a positive number"); } // If number of files is a negative number, then fail if (numberOfFiles < 0) { System.err.println("Error: Number of files must be a positive number"); displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException( + "Error: Number of files must be a positive number"); } // If replication factor is a negative number, then fail if (replicationFactorPerFile < 0) { System.err.println("Error: Replication factor must be a positive number"); displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException( + "Error: Replication factor must be a positive number"); } // If block size is not a multiple of bytesperchecksum, fail if (blockSize % bytesPerChecksum != 0) { - System.err.println("Error: Block Size in bytes must be a multiple of " + - "bytes per checksum: "); + System.err.println("Error: Block Size in bytes must be a multiple of " + + "bytes per checksum: "); displayUsage(); - System.exit(-1); + throw new HadoopIllegalArgumentException( + "Error: Block Size in bytes must be a multiple of " + + "bytes per checksum:"); } } /** @@ -562,13 +576,22 @@ public class NNBench { * @param args array of command line arguments * @throws IOException indicates a problem with test startup */ - public static void main(String[] args) throws IOException { + public static void main(String[] args) throws Exception { + int res = ToolRunner.run(new NNBench(), args); + System.exit(res); + } + + @Override + public int run(String[] args) throws Exception { // Display the application version string displayVersion(); // Parse the inputs parseInputs(args); - + if (isHelpMessage) { + return 0; + } + // Validate inputs validateInputs(); @@ -582,7 +605,7 @@ public class NNBench { runTests(); // Analyze results - analyzeResults(); + return analyzeResults(); } @@ -592,7 +615,6 @@ public class NNBench { static class NNBenchMapper extends Configured implements Mapper { FileSystem filesystem = null; - private String hostName = null; long numberOfFiles = 1l; long blkSize = 1l; @@ -602,7 +624,6 @@ public class NNBench { String dataDirName = null; String op = null; boolean readFile = false; - final int MAX_OPERATION_EXCEPTIONS = 1000; // Data to collect from the operation int numOfExceptions = 0; @@ -628,12 +649,6 @@ public class NNBench { } catch(Exception e) { throw new RuntimeException("Cannot get file system.", e); } - - try { - hostName = InetAddress.getLocalHost().getHostName(); - } catch(Exception e) { - throw new RuntimeException("Error getting hostname", e); - } } /** @@ -678,7 +693,7 @@ public class NNBench { LongWritable value, OutputCollector output, Reporter reporter) throws IOException { - Configuration conf = filesystem.getConf(); + Configuration conf = getConf(); numberOfFiles = conf.getLong("test.nnbench.numberoffiles", 1l); blkSize = conf.getLong("test.nnbench.blocksize", 1l); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java new file mode 100644 index 00000000000..9f9814d7226 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapred.HadoopTestCase; +import org.apache.hadoop.util.Time; +import org.apache.hadoop.util.ToolRunner; +import org.junit.After; +import org.junit.Test; + +public class TestNNBench extends HadoopTestCase { + private static final String BASE_DIR = + new File(System.getProperty("test.build.data", "build/test/data"), + "NNBench").getAbsolutePath(); + + public TestNNBench() throws IOException { + super(LOCAL_MR, LOCAL_FS, 1, 1); + } + + @After + public void tearDown() throws Exception { + getFileSystem().delete(new Path(BASE_DIR), true); + getFileSystem().delete(new Path(NNBench.DEFAULT_RES_FILE_NAME), true); + super.tearDown(); + } + + @Test(timeout = 30000) + public void testNNBenchCreateReadAndDelete() throws Exception { + runNNBench(createJobConf(), "create_write"); + Path path = new Path(BASE_DIR + "/data/file_0_0"); + assertTrue("create_write should create the file", + getFileSystem().exists(path)); + runNNBench(createJobConf(), "open_read"); + runNNBench(createJobConf(), "delete"); + assertFalse("Delete operation should delete the file", + getFileSystem().exists(path)); + } + + @Test(timeout = 30000) + public void testNNBenchCreateAndRename() throws Exception { + runNNBench(createJobConf(), "create_write"); + Path path = new Path(BASE_DIR + "/data/file_0_0"); + assertTrue("create_write should create the file", + getFileSystem().exists(path)); + runNNBench(createJobConf(), "rename"); + Path renamedPath = new Path(BASE_DIR + "/data/file_0_r_0"); + assertFalse("Rename should rename the file", getFileSystem().exists(path)); + assertTrue("Rename should rename the file", + getFileSystem().exists(renamedPath)); + } + + private void runNNBench(Configuration conf, String operation) + throws Exception { + String[] genArgs = { "-operation", operation, "-baseDir", BASE_DIR, + "-startTime", "" + (Time.now() / 1000 + 3) }; + + assertEquals(0, ToolRunner.run(conf, new NNBench(), genArgs)); + } + +} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java index 1d7b98a6719..97dfa26acf4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java @@ -32,21 +32,25 @@ import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.mapred.*; -import junit.framework.TestCase; import org.apache.commons.logging.*; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -public class TestSequenceFileMergeProgress extends TestCase { +public class TestSequenceFileMergeProgress { private static final Log LOG = FileInputFormat.LOG; private static final int RECORDS = 10000; - + + @Test public void testMergeProgressWithNoCompression() throws IOException { runTest(SequenceFile.CompressionType.NONE); } + @Test public void testMergeProgressWithRecordCompression() throws IOException { runTest(SequenceFile.CompressionType.RECORD); } + @Test public void testMergeProgressWithBlockCompression() throws IOException { runTest(SequenceFile.CompressionType.BLOCK); } @@ -92,7 +96,7 @@ public class TestSequenceFileMergeProgress extends TestCase { count++; } assertEquals(RECORDS, count); - assertEquals(1.0f, rIter.getProgress().get()); + assertEquals(1.0f, rIter.getProgress().get(), 0.0000); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java index 5bf4ff11b89..8d33b1580a8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.mapred; -import junit.framework.TestCase; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.junit.After; +import org.junit.Before; import java.io.IOException; import java.util.Map; @@ -41,7 +42,7 @@ import java.util.Properties; *

    * The DFS filesystem is formated before the testcase starts and after it ends. */ -public abstract class ClusterMapReduceTestCase extends TestCase { +public abstract class ClusterMapReduceTestCase { private MiniDFSCluster dfsCluster = null; private MiniMRCluster mrCluster = null; @@ -50,9 +51,8 @@ public abstract class ClusterMapReduceTestCase extends TestCase { * * @throws Exception */ - protected void setUp() throws Exception { - super.setUp(); - + @Before + public void setUp() throws Exception { startCluster(true, null); } @@ -139,9 +139,9 @@ public abstract class ClusterMapReduceTestCase extends TestCase { * * @throws Exception */ - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { stopCluster(); - super.tearDown(); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java index e6e12ebbe4f..ecac83af5b3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java @@ -179,7 +179,7 @@ public class ReliabilityTest extends Configured implements Tool { private String normalizeCommandPath(String command) { final String hadoopHome; - if ((hadoopHome = System.getenv("HADOOP_PREFIX")) != null) { + if ((hadoopHome = System.getenv("HADOOP_HOME")) != null) { command = hadoopHome + "/" + command; } return command; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java index 353185b59e3..bc85703bc84 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestAuditLogger.java @@ -28,13 +28,13 @@ import org.apache.hadoop.ipc.TestRPC.TestImpl; import org.apache.hadoop.ipc.TestRPC.TestProtocol; import org.apache.hadoop.mapred.AuditLogger.Keys; import org.apache.hadoop.net.NetUtils; - -import junit.framework.TestCase; +import org.junit.Test; +import static org.junit.Assert.assertEquals; /** * Tests {@link AuditLogger}. */ -public class TestAuditLogger extends TestCase { +public class TestAuditLogger { private static final String USER = "test"; private static final String OPERATION = "oper"; private static final String TARGET = "tgt"; @@ -44,6 +44,7 @@ public class TestAuditLogger extends TestCase { /** * Test the AuditLog format with key-val pair. */ + @Test public void testKeyValLogFormat() { StringBuilder actLog = new StringBuilder(); StringBuilder expLog = new StringBuilder(); @@ -114,6 +115,7 @@ public class TestAuditLogger extends TestCase { /** * Test {@link AuditLogger} without IP set. */ + @Test public void testAuditLoggerWithoutIP() throws Exception { // test without ip testSuccessLogFormat(false); @@ -137,6 +139,7 @@ public class TestAuditLogger extends TestCase { /** * Test {@link AuditLogger} with IP set. */ + @Test public void testAuditLoggerWithIP() throws Exception { Configuration conf = new Configuration(); // start the IPC server diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java index ea9f3d3f989..c2d6257823e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java @@ -40,6 +40,11 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.util.ReflectionUtils; import org.junit.Ignore; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNotNull; @Ignore public class TestBadRecords extends ClusterMapReduceTestCase { @@ -206,7 +211,8 @@ public class TestBadRecords extends ClusterMapReduceTestCase { } return processed; } - + + @Test public void testBadMapRed() throws Exception { JobConf conf = createJobConf(); conf.setMapperClass(BadMapper.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java index ada2d0c634b..f04fbd7a29a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java @@ -29,6 +29,12 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertFalse; public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase { public void _testMapReduce(boolean restart) throws Exception { OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); @@ -85,14 +91,17 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase { } + @Test public void testMapReduce() throws Exception { _testMapReduce(false); } + @Test public void testMapReduceRestarting() throws Exception { _testMapReduce(true); } + @Test public void testDFSRestart() throws Exception { Path file = new Path(getInputDir(), "text.txt"); OutputStream os = getFileSystem().create(file); @@ -109,6 +118,7 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase { } + @Test public void testMRConfig() throws Exception { JobConf conf = createJobConf(); assertNull(conf.get("xyz")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java index 4bd20d54ad5..595d09cc2a0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java @@ -21,15 +21,15 @@ import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat; import org.apache.hadoop.mapreduce.MRConfig; +import org.junit.Test; -import junit.framework.TestCase; import java.io.*; import java.util.*; /** * TestCollect checks if the collect can handle simultaneous invocations. */ -public class TestCollect extends TestCase +public class TestCollect { final static Path OUTPUT_DIR = new Path("build/test/test.collect.output"); static final int NUM_FEEDERS = 10; @@ -127,7 +127,7 @@ public class TestCollect extends TestCase conf.setNumMapTasks(1); conf.setNumReduceTasks(1); } - + @Test public void testCollect() throws IOException { JobConf conf = new JobConf(); configure(conf); @@ -144,9 +144,5 @@ public class TestCollect extends TestCase fs.delete(OUTPUT_DIR, true); } } - - public static void main(String[] args) throws IOException { - new TestCollect().testCollect(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java index 69353871cf4..7cf5e71e1a5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java @@ -21,28 +21,29 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.Ignore; +import org.junit.Test; +import static org.junit.Assert.assertTrue; /** * check for the job submission options of * -libjars -files -archives */ @Ignore -public class TestCommandLineJobSubmission extends TestCase { - // Input output paths for this.. +public class TestCommandLineJobSubmission { + // Input output paths for this.. // these are all dummy and does not test // much in map reduce except for the command line // params static final Path input = new Path("/test/input/"); static final Path output = new Path("/test/output"); File buildDir = new File(System.getProperty("test.build.data", "/tmp")); + @Test public void testJobShell() throws Exception { MiniDFSCluster dfs = null; MiniMRCluster mr = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java index 239c239230e..868896815ef 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java @@ -23,11 +23,12 @@ import org.apache.hadoop.mapred.lib.*; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper; import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -import junit.framework.TestCase; import java.text.NumberFormat; -public class TestFieldSelection extends TestCase { +public class TestFieldSelection { private static NumberFormat idFormat = NumberFormat.getInstance(); static { @@ -35,6 +36,7 @@ private static NumberFormat idFormat = NumberFormat.getInstance(); idFormat.setGroupingUsed(false); } + @Test public void testFieldSelection() throws Exception { launch(); } @@ -44,8 +46,9 @@ private static NumberFormat idFormat = NumberFormat.getInstance(); FileSystem fs = FileSystem.get(conf); int numOfInputLines = 10; - Path OUTPUT_DIR = new Path("build/test/output_for_field_selection_test"); - Path INPUT_DIR = new Path("build/test/input_for_field_selection_test"); + String baseDir = System.getProperty("test.build.data", "build/test/data"); + Path OUTPUT_DIR = new Path(baseDir + "/output_for_field_selection_test"); + Path INPUT_DIR = new Path(baseDir + "/input_for_field_selection_test"); String inputFile = "input.txt"; fs.delete(INPUT_DIR, true); fs.mkdirs(INPUT_DIR); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java index 1c8be66d084..d87f6fd91a9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java @@ -17,12 +17,14 @@ */ package org.apache.hadoop.mapred; -import junit.framework.TestCase; - import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.assertEquals; import java.io.IOException; import java.io.Writer; @@ -30,7 +32,7 @@ import java.io.OutputStreamWriter; import java.util.Set; import java.util.HashSet; -public class TestFileInputFormatPathFilter extends TestCase { +public class TestFileInputFormatPathFilter { public static class DummyFileInputFormat extends FileInputFormat { @@ -55,12 +57,12 @@ public class TestFileInputFormatPathFilter extends TestCase { new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestFileInputFormatPathFilter"); - + @Before public void setUp() throws Exception { tearDown(); localFs.mkdirs(workDir); } - + @After public void tearDown() throws Exception { if (localFs.exists(workDir)) { localFs.delete(workDir, true); @@ -129,18 +131,19 @@ public class TestFileInputFormatPathFilter extends TestCase { assertEquals(createdFiles, computedFiles); } + @Test public void testWithoutPathFilterWithoutGlob() throws Exception { _testInputFiles(false, false); } - + @Test public void testWithoutPathFilterWithGlob() throws Exception { _testInputFiles(false, true); } - + @Test public void testWithPathFilterWithoutGlob() throws Exception { _testInputFiles(true, false); } - + @Test public void testWithPathFilterWithGlob() throws Exception { _testInputFiles(true, true); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java index 7891bca7990..3d1c2e71bff 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java @@ -20,10 +20,11 @@ package org.apache.hadoop.mapred; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.net.NetworkTopology; -import junit.framework.TestCase; - -public class TestGetSplitHosts extends TestCase { +import org.junit.Test; +import static org.junit.Assert.assertTrue; +public class TestGetSplitHosts { + @Test public void testGetSplitHosts() throws Exception { int numBlocks = 3; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java index 86431e5c135..2b97d3b95ad 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java @@ -21,11 +21,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; +import org.junit.Test; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; -import junit.framework.TestCase; - -public class TestIFileStreams extends TestCase { - +public class TestIFileStreams { + @Test public void testIFileStream() throws Exception { final int DLEN = 100; DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4); @@ -42,7 +43,7 @@ public class TestIFileStreams extends TestCase { } ifis.close(); } - + @Test public void testBadIFileStream() throws Exception { final int DLEN = 100; DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4); @@ -73,7 +74,7 @@ public class TestIFileStreams extends TestCase { } fail("Did not detect bad data in checksum"); } - + @Test public void testBadLength() throws Exception { final int DLEN = 100; DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java index 1398f9e5aaa..0c20c335d89 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java @@ -17,14 +17,15 @@ */ package org.apache.hadoop.mapred; -import junit.framework.TestCase; - import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.StringUtils; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -public class TestInputPath extends TestCase { +public class TestInputPath { + @Test public void testInputPath() throws Exception { JobConf jobConf = new JobConf(); Path workingDir = jobConf.getWorkingDirectory(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java index 265118a70f6..a787e68c124 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java @@ -26,8 +26,6 @@ import java.io.Writer; import java.util.Iterator; import java.util.StringTokenizer; -import junit.framework.TestCase; - import org.apache.commons.io.FileUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -36,8 +34,11 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.mapreduce.MRConfig; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; -public class TestJavaSerialization extends TestCase { +public class TestJavaSerialization { private static String TEST_ROOT_DIR = new File(System.getProperty("test.build.data", "/tmp")).toURI() @@ -90,7 +91,7 @@ public class TestJavaSerialization extends TestCase { wr.write("b a\n"); wr.close(); } - + @Test public void testMapReduceJob() throws Exception { JobConf conf = new JobConf(TestJavaSerialization.class); @@ -149,6 +150,7 @@ public class TestJavaSerialization extends TestCase { * coupled to Writable types, if so, the job will fail. * */ + @Test public void testWriteToSequencefile() throws Exception { JobConf conf = new JobConf(TestJavaSerialization.class); conf.setJobName("JavaSerialization"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java index 4b62b4a1d8e..2659a14a70b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java @@ -29,8 +29,13 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.lib.IdentityMapper; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + public class TestJobName extends ClusterMapReduceTestCase { + @Test public void testComplexName() throws Exception { OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); @@ -65,6 +70,7 @@ public class TestJobName extends ClusterMapReduceTestCase { reader.close(); } + @Test public void testComplexNameWithRegex() throws Exception { OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java index 109c781c2b0..3dbc5777bd5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java @@ -21,8 +21,6 @@ package org.apache.hadoop.mapred; import java.io.DataOutputStream; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -32,11 +30,15 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; /** * A JUnit test to test Job System Directory with Mini-DFS. */ -public class TestJobSysDirWithDFS extends TestCase { +public class TestJobSysDirWithDFS { private static final Log LOG = LogFactory.getLog(TestJobSysDirWithDFS.class.getName()); @@ -115,7 +117,7 @@ public class TestJobSysDirWithDFS extends TestCase { // between Job Client & Job Tracker assertTrue(result.job.isSuccessful()); } - + @Test public void testWithDFS() throws IOException { MiniDFSCluster dfs = null; MiniMRCluster mr = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java index 27070783e14..bacc196008e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java @@ -20,7 +20,6 @@ package org.apache.hadoop.mapred; import java.io.*; import java.util.*; -import junit.framework.TestCase; import org.apache.commons.logging.*; import org.apache.hadoop.fs.*; @@ -28,8 +27,11 @@ import org.apache.hadoop.io.*; import org.apache.hadoop.io.compress.*; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestKeyValueTextInputFormat extends TestCase { +public class TestKeyValueTextInputFormat { private static final Log LOG = LogFactory.getLog(TestKeyValueTextInputFormat.class.getName()); @@ -47,7 +49,7 @@ public class TestKeyValueTextInputFormat extends TestCase { private static Path workDir = new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestKeyValueTextInputFormat"); - + @Test public void testFormat() throws Exception { JobConf job = new JobConf(); Path file = new Path(workDir, "test.txt"); @@ -134,7 +136,7 @@ public class TestKeyValueTextInputFormat extends TestCase { (str.getBytes("UTF-8")), defaultConf); } - + @Test public void testUTF8() throws Exception { LineReader in = null; @@ -153,7 +155,7 @@ public class TestKeyValueTextInputFormat extends TestCase { } } } - + @Test public void testNewLines() throws Exception { LineReader in = null; try { @@ -219,7 +221,8 @@ public class TestKeyValueTextInputFormat extends TestCase { /** * Test using the gzip codec for reading */ - public static void testGzip() throws IOException { + @Test + public void testGzip() throws IOException { JobConf job = new JobConf(); CompressionCodec gzip = new GzipCodec(); ReflectionUtils.setConf(gzip, job); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java index 7412832d5c2..dde9310607f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java @@ -35,14 +35,15 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.lib.LazyOutputFormat; -import junit.framework.TestCase; +import org.junit.Test; +import static org.junit.Assert.assertTrue; /** * A JUnit test to test the Map-Reduce framework's feature to create part * files only if there is an explicit output.collect. This helps in preventing * 0 byte files */ -public class TestLazyOutput extends TestCase { +public class TestLazyOutput { private static final int NUM_HADOOP_SLAVES = 3; private static final int NUM_MAPS_PER_NODE = 2; private static final Path INPUT = new Path("/testlazy/input"); @@ -132,7 +133,7 @@ public class TestLazyOutput extends TestCase { } } - + @Test public void testLazyOutput() throws Exception { MiniDFSCluster dfs = null; MiniMRCluster mr = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java index fb9e8fcce3a..20d0173cc81 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java @@ -17,16 +17,6 @@ */ package org.apache.hadoop.mapred; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.concurrent.TimeoutException; - -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataOutputStream; @@ -36,9 +26,21 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.io.Text; +import org.junit.After; +import org.junit.Test; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.concurrent.TimeoutException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; @SuppressWarnings("deprecation") -public class TestMRCJCFileInputFormat extends TestCase { +public class TestMRCJCFileInputFormat { Configuration conf = new Configuration(); MiniDFSCluster dfs = null; @@ -50,6 +52,7 @@ public class TestMRCJCFileInputFormat extends TestCase { .build(); } + @Test public void testLocality() throws Exception { JobConf job = new JobConf(conf); dfs = newDFSCluster(job); @@ -109,6 +112,7 @@ public class TestMRCJCFileInputFormat extends TestCase { DFSTestUtil.waitReplication(fs, path, replication); } + @Test public void testNumInputs() throws Exception { JobConf job = new JobConf(conf); dfs = newDFSCluster(job); @@ -157,6 +161,7 @@ public class TestMRCJCFileInputFormat extends TestCase { } } + @Test public void testMultiLevelInput() throws Exception { JobConf job = new JobConf(conf); @@ -195,6 +200,7 @@ public class TestMRCJCFileInputFormat extends TestCase { } @SuppressWarnings("rawtypes") + @Test public void testLastInputSplitAtSplitBoundary() throws Exception { FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024, 128l * 1024 * 1024); @@ -208,6 +214,7 @@ public class TestMRCJCFileInputFormat extends TestCase { } @SuppressWarnings("rawtypes") + @Test public void testLastInputSplitExceedingSplitBoundary() throws Exception { FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024, 128l * 1024 * 1024); @@ -221,6 +228,7 @@ public class TestMRCJCFileInputFormat extends TestCase { } @SuppressWarnings("rawtypes") + @Test public void testLastInputSplitSingleSplit() throws Exception { FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024, 128l * 1024 * 1024); @@ -305,7 +313,7 @@ public class TestMRCJCFileInputFormat extends TestCase { DFSTestUtil.waitReplication(fileSys, name, replication); } - @Override + @After public void tearDown() throws Exception { if (dfs != null) { dfs.shutdown(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java index 3b86f81cc23..74b6d77f6a0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java @@ -18,18 +18,25 @@ package org.apache.hadoop.mapred; -import java.io.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RawLocalFileSystem; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.JobStatus; +import org.junit.Test; + +import java.io.File; +import java.io.IOException; import java.net.URI; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.mapred.JobContextImpl; -import org.apache.hadoop.mapred.TaskAttemptContextImpl; -import org.apache.hadoop.mapreduce.JobStatus; - -public class TestMRCJCFileOutputCommitter extends TestCase { +public class TestMRCJCFileOutputCommitter { private static Path outDir = new Path( System.getProperty("test.build.data", "/tmp"), "output"); @@ -67,6 +74,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase { } @SuppressWarnings("unchecked") + @Test public void testCommitter() throws Exception { JobConf job = new JobConf(); setConfForFileOutputCommitter(job); @@ -108,6 +116,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase { FileUtil.fullyDelete(new File(outDir.toString())); } + @Test public void testAbort() throws IOException { JobConf job = new JobConf(); setConfForFileOutputCommitter(job); @@ -161,6 +170,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase { } } + @Test public void testFailAbort() throws IOException { JobConf job = new JobConf(); job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java index a82007f97c9..40ed9ad1160 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java @@ -97,9 +97,10 @@ public class TestMRTimelineEventHandling { + cluster.getApplicationHistoryServer().getPort()); TimelineStore ts = cluster.getApplicationHistoryServer() .getTimelineStore(); - - Path inDir = new Path("input"); - Path outDir = new Path("output"); + String localPathRoot = System.getProperty("test.build.data", + "build/test/data"); + Path inDir = new Path(localPathRoot, "input"); + Path outDir = new Path(localPathRoot, "output"); RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); Assert.assertEquals(JobStatus.SUCCEEDED, @@ -155,8 +156,10 @@ public class TestMRTimelineEventHandling { TimelineStore ts = cluster.getApplicationHistoryServer() .getTimelineStore(); - Path inDir = new Path("input"); - Path outDir = new Path("output"); + String localPathRoot = System.getProperty("test.build.data", + "build/test/data"); + Path inDir = new Path(localPathRoot, "input"); + Path outDir = new Path(localPathRoot, "output"); RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); Assert.assertEquals(JobStatus.SUCCEEDED, @@ -195,8 +198,10 @@ public class TestMRTimelineEventHandling { TimelineStore ts = cluster.getApplicationHistoryServer() .getTimelineStore(); - Path inDir = new Path("input"); - Path outDir = new Path("output"); + String localPathRoot = System.getProperty("test.build.data", + "build/test/data"); + Path inDir = new Path(localPathRoot, "input"); + Path outDir = new Path(localPathRoot, "output"); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false); RunningJob job = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java index db6348ba440..b8ff016d6af 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java @@ -22,8 +22,6 @@ import java.io.File; import java.io.IOException; import java.util.List; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; @@ -40,6 +38,8 @@ import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.split.JobSplitWriter; import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.Test; +import static org.junit.Assert.assertTrue; /** * Validates map phase progress. @@ -55,7 +55,7 @@ import org.apache.hadoop.util.ReflectionUtils; * once mapTask.run() is finished. Sort phase progress in map task is not * validated here. */ -public class TestMapProgress extends TestCase { +public class TestMapProgress { public static final Log LOG = LogFactory.getLog(TestMapProgress.class); private static String TEST_ROOT_DIR; static { @@ -234,7 +234,8 @@ public class TestMapProgress extends TestCase { /** * Validates map phase progress after each record is processed by map task * using custom task reporter. - */ + */ + @Test public void testMapProgress() throws Exception { JobConf job = new JobConf(); fs = FileSystem.getLocal(job); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java index e19ff589fa4..a9e7f64c0b8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java @@ -44,8 +44,8 @@ import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.mapred.Task.TaskReporter; - -import junit.framework.TestCase; +import org.junit.Test; +import static org.junit.Assert.assertEquals; @SuppressWarnings(value={"unchecked", "deprecation"}) /** @@ -56,7 +56,7 @@ import junit.framework.TestCase; * framework's merge on the reduce side will merge the partitions created to * generate the final output which is sorted on the key. */ -public class TestMerge extends TestCase { +public class TestMerge { private static final int NUM_HADOOP_DATA_NODES = 2; // Number of input files is same as the number of mappers. private static final int NUM_MAPPERS = 10; @@ -69,6 +69,7 @@ public class TestMerge extends TestCase { // Where output goes. private static final Path OUTPUT = new Path("/testplugin/output"); + @Test public void testMerge() throws Exception { MiniDFSCluster dfsCluster = null; MiniMRClientCluster mrCluster = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java index 8b7b8f51b96..b608d756a49 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java @@ -18,14 +18,16 @@ package org.apache.hadoop.mapred; +import org.junit.Test; + import java.io.IOException; -import junit.framework.TestCase; /** * A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster. */ -public class TestMiniMRBringup extends TestCase { +public class TestMiniMRBringup { + @Test public void testBringUp() throws IOException { MiniMRCluster mr = null; try { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java index 74be8a758db..cbeeccffc7a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java @@ -436,8 +436,10 @@ public class TestMiniMRChildTask { JobConf conf = new JobConf(mr.getConfig()); // initialize input, output directories - Path inDir = new Path("input"); - Path outDir = new Path("output"); + Path rootDir = new Path(System.getProperty("test.build.data", + "build/test/data")); + Path inDir = new Path(rootDir, "input"); + Path outDir = new Path(rootDir, "output"); String input = "The input"; // set config to use the ExecutionEnvCheckMapClass map class @@ -462,9 +464,10 @@ public class TestMiniMRChildTask { public void testTaskEnv(){ try { JobConf conf = new JobConf(mr.getConfig()); + String baseDir = System.getProperty("test.build.data", "build/test/data"); // initialize input, output directories - Path inDir = new Path("testing/wc/input1"); - Path outDir = new Path("testing/wc/output1"); + Path inDir = new Path(baseDir + "/testing/wc/input1"); + Path outDir = new Path(baseDir + "/testing/wc/output1"); FileSystem outFs = outDir.getFileSystem(conf); runTestTaskEnv(conf, inDir, outDir, false); outFs.delete(outDir, true); @@ -485,9 +488,10 @@ public class TestMiniMRChildTask { public void testTaskOldEnv(){ try { JobConf conf = new JobConf(mr.getConfig()); + String baseDir = System.getProperty("test.build.data", "build/test/data"); // initialize input, output directories - Path inDir = new Path("testing/wc/input1"); - Path outDir = new Path("testing/wc/output1"); + Path inDir = new Path(baseDir + "/testing/wc/input1"); + Path outDir = new Path(baseDir + "/testing/wc/output1"); FileSystem outFs = outDir.getFileSystem(conf); runTestTaskEnv(conf, inDir, outDir, true); outFs.delete(outDir, true); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java index 45879aff623..3f64f7a35b9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java @@ -18,20 +18,23 @@ package org.apache.hadoop.mapred; -import java.io.*; -import junit.framework.TestCase; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.mapred.MRCaching.TestResult; import org.junit.Ignore; +import org.junit.Test; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; /** * A JUnit test to test caching with DFS * */ @Ignore -public class TestMiniMRDFSCaching extends TestCase { +public class TestMiniMRDFSCaching { + @Test public void testWithDFS() throws IOException { MiniMRCluster mr = null; MiniDFSCluster dfs = null; @@ -70,9 +73,4 @@ public class TestMiniMRDFSCaching extends TestCase { } } } - - public static void main(String[] argv) throws Exception { - TestMiniMRDFSCaching td = new TestMiniMRDFSCaching(); - td.testWithDFS(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java index 49825e99f57..1bd29542fcd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java @@ -21,17 +21,17 @@ import java.io.IOException; import java.util.BitSet; import java.util.HashMap; import java.util.Random; - -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestMultiFileInputFormat extends TestCase{ +public class TestMultiFileInputFormat { private static JobConf job = new JobConf(); @@ -79,7 +79,8 @@ public class TestMultiFileInputFormat extends TestCase{ FileInputFormat.setInputPaths(job, multiFileDir); return multiFileDir; } - + + @Test public void testFormat() throws IOException { LOG.info("Test started"); LOG.info("Max split count = " + MAX_SPLIT_COUNT); @@ -122,7 +123,8 @@ public class TestMultiFileInputFormat extends TestCase{ } LOG.info("Test Finished"); } - + + @Test public void testFormatWithLessPathsThanSplits() throws Exception { MultiFileInputFormat format = new DummyMultiFileInputFormat(); FileSystem fs = FileSystem.getLocal(job); @@ -135,9 +137,4 @@ public class TestMultiFileInputFormat extends TestCase{ initFiles(fs, 2, 500); assertEquals(2, format.getSplits(job, 4).length); } - - public static void main(String[] args) throws Exception{ - TestMultiFileInputFormat test = new TestMultiFileInputFormat(); - test.testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java index 16ff6af9271..5bb336e4e81 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java @@ -27,16 +27,19 @@ import java.io.IOException; import java.io.OutputStream; import java.util.Arrays; -import junit.framework.TestCase; - import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + /** * * test MultiFileSplit class */ -public class TestMultiFileSplit extends TestCase{ +public class TestMultiFileSplit { + @Test public void testReadWrite() throws Exception { MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {new Path("/test/path/1"), new Path("/test/path/2")}, new long[] {100,200}); @@ -70,6 +73,7 @@ public class TestMultiFileSplit extends TestCase{ * test method getLocations * @throws IOException */ + @Test public void testgetLocations() throws IOException{ JobConf job= new JobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java index 294723a9c87..7e8dfef03f1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java @@ -17,10 +17,6 @@ */ package org.apache.hadoop.mapred; -import java.io.IOException; - -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -32,12 +28,17 @@ import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.junit.Ignore; +import org.junit.Test; + +import java.io.IOException; + +import static org.junit.Assert.assertEquals; /** * This test checks whether the task caches are created and used properly. */ @Ignore -public class TestMultipleLevelCaching extends TestCase { +public class TestMultipleLevelCaching { private static final int MAX_LEVEL = 5; final Path inDir = new Path("/cachetesting"); final Path outputPath = new Path("/output"); @@ -71,6 +72,7 @@ public class TestMultipleLevelCaching extends TestCase { return rack.toString(); } + @Test public void testMultiLevelCaching() throws Exception { for (int i = 1 ; i <= MAX_LEVEL; ++i) { testCachingAtLevel(i); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java index 14c097d77e1..b5047fc8331 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java @@ -18,15 +18,19 @@ package org.apache.hadoop.mapred; -import java.io.*; -import junit.framework.TestCase; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat; +import org.junit.Test; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import java.io.File; +import java.io.IOException; -import org.apache.hadoop.mapred.lib.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -public class TestMultipleTextOutputFormat extends TestCase { +public class TestMultipleTextOutputFormat { private static JobConf defaultConf = new JobConf(); private static FileSystem localFs = null; @@ -83,7 +87,8 @@ public class TestMultipleTextOutputFormat extends TestCase { writeData(rw); rw.close(null); } - + + @Test public void testFormat() throws Exception { JobConf job = new JobConf(); job.set(JobContext.TASK_ATTEMPT_ID, attempt); @@ -145,8 +150,4 @@ public class TestMultipleTextOutputFormat extends TestCase { //System.out.printf("File_2 output: %s\n", output); assertEquals(output, expectedOutput.toString()); } - - public static void main(String[] args) throws Exception { - new TestMultipleTextOutputFormat().testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java index 80a670209d2..046c2d37eed 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java @@ -37,8 +37,9 @@ import java.util.Set; import java.util.UUID; public class TestOldCombinerGrouping { - private static String TEST_ROOT_DIR = - new File("build", UUID.randomUUID().toString()).getAbsolutePath(); + private static String TEST_ROOT_DIR = new File(System.getProperty( + "test.build.data", "build/test/data"), UUID.randomUUID().toString()) + .getAbsolutePath(); public static class Map implements Mapper { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java index 586df38dcfc..767459f88b4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java @@ -19,17 +19,18 @@ package org.apache.hadoop.mapred; import org.apache.hadoop.mapreduce.TaskCounter; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; public class TestReduceFetch extends TestReduceFetchFromPartialMem { - static { - setSuite(TestReduceFetch.class); - } - /** * Verify that all segments are read from disk * @throws Exception might be thrown */ + @Test public void testReduceFromDisk() throws Exception { final int MAP_TASKS = 8; JobConf job = mrCluster.createJobConf(); @@ -53,6 +54,7 @@ public class TestReduceFetch extends TestReduceFetchFromPartialMem { * Verify that no segment hits disk. * @throws Exception might be thrown */ + @Test public void testReduceFromMem() throws Exception { final int MAP_TASKS = 3; JobConf job = mrCluster.createJobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java index 3a1a275ab91..9b04f64ac60 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java @@ -18,10 +18,6 @@ package org.apache.hadoop.mapred; -import junit.extensions.TestSetup; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -30,7 +26,9 @@ import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.mapreduce.TaskCounter; -import org.apache.hadoop.mapreduce.MRConfig; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import java.io.DataInput; import java.io.DataOutput; @@ -39,34 +37,27 @@ import java.util.Arrays; import java.util.Formatter; import java.util.Iterator; -public class TestReduceFetchFromPartialMem extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class TestReduceFetchFromPartialMem { protected static MiniMRCluster mrCluster = null; protected static MiniDFSCluster dfsCluster = null; - protected static TestSuite mySuite; - protected static void setSuite(Class klass) { - mySuite = new TestSuite(klass); + @Before + public void setUp() throws Exception { + Configuration conf = new Configuration(); + dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); + mrCluster = new MiniMRCluster(2, + dfsCluster.getFileSystem().getUri().toString(), 1); } - static { - setSuite(TestReduceFetchFromPartialMem.class); - } - - public static Test suite() { - TestSetup setup = new TestSetup(mySuite) { - protected void setUp() throws Exception { - Configuration conf = new Configuration(); - dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); - mrCluster = new MiniMRCluster(2, - dfsCluster.getFileSystem().getUri().toString(), 1); - } - protected void tearDown() throws Exception { - if (dfsCluster != null) { dfsCluster.shutdown(); } - if (mrCluster != null) { mrCluster.shutdown(); } - } - }; - return setup; + @After + public void tearDown() throws Exception { + if (dfsCluster != null) { dfsCluster.shutdown(); } + if (mrCluster != null) { mrCluster.shutdown(); } } private static final String tagfmt = "%04d"; @@ -78,6 +69,7 @@ public class TestReduceFetchFromPartialMem extends TestCase { } /** Verify that at least one segment does not hit disk */ + @Test public void testReduceFromPartialMem() throws Exception { final int MAP_TASKS = 7; JobConf job = mrCluster.createJobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java index 43fd94871a2..69546a6cba2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java @@ -17,10 +17,6 @@ */ package org.apache.hadoop.mapred; -import java.io.IOException; - -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; @@ -30,11 +26,17 @@ import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.util.Progressable; +import org.junit.Test; + +import java.io.IOException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; /** * This test exercises the ValueIterator. */ -public class TestReduceTask extends TestCase { +public class TestReduceTask { static class NullProgress implements Progressable { public void progress() { } @@ -119,9 +121,10 @@ public class TestReduceTask extends TestCase { } assertEquals(vals.length, i); // make sure we have progress equal to 1.0 - assertEquals(1.0f, rawItr.getProgress().get()); + assertEquals(1.0f, rawItr.getProgress().get(),0.0000); } + @Test public void testValueIterator() throws Exception { Path tmpDir = new Path("build/test/test.reduce.task"); Configuration conf = new Configuration(); @@ -129,7 +132,8 @@ public class TestReduceTask extends TestCase { runValueIterator(tmpDir, testCase, conf, null); } } - + + @Test public void testValueIteratorWithCompression() throws Exception { Path tmpDir = new Path("build/test/test.reduce.task.compression"); Configuration conf = new Configuration(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java index b8be7400070..64b0983a5d6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java @@ -18,19 +18,26 @@ package org.apache.hadoop.mapred; +import org.apache.commons.logging.Log; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.DataInputBuffer; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; +import org.junit.Test; + import java.io.IOException; import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; -import junit.framework.TestCase; -import org.apache.commons.logging.*; - -public class TestSequenceFileAsBinaryInputFormat extends TestCase { +public class TestSequenceFileAsBinaryInputFormat { private static final Log LOG = FileInputFormat.LOG; private static final int RECORDS = 10000; + @Test public void testBinary() throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java index abe21f223ef..03dc6a69003 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java @@ -18,24 +18,35 @@ package org.apache.hadoop.mapred; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BooleanWritable; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.DataInputBuffer; +import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.FloatWritable; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.SequenceFile.CompressionType; +import org.junit.Test; + import java.io.IOException; import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.io.SequenceFile.CompressionType; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; -import junit.framework.TestCase; -import org.apache.commons.logging.*; - -public class TestSequenceFileAsBinaryOutputFormat extends TestCase { +public class TestSequenceFileAsBinaryOutputFormat { private static final Log LOG = LogFactory.getLog(TestSequenceFileAsBinaryOutputFormat.class.getName()); - private static final int RECORDS = 10000; // A random task attempt id for testing. private static final String attempt = "attempt_200707121733_0001_m_000000_0"; + @Test public void testBinary() throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); @@ -129,7 +140,8 @@ public class TestSequenceFileAsBinaryOutputFormat extends TestCase { assertEquals("Some records not found", RECORDS, count); } - public void testSequenceOutputClassDefaultsToMapRedOutputClass() + @Test + public void testSequenceOutputClassDefaultsToMapRedOutputClass() throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); @@ -163,6 +175,7 @@ public class TestSequenceFileAsBinaryOutputFormat extends TestCase { job)); } + @Test public void testcheckOutputSpecsForbidRecordCompression() throws IOException { JobConf job = new JobConf(); FileSystem fs = FileSystem.getLocal(job); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java index 4cfd59af745..d4e5e17e11f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java @@ -18,22 +18,29 @@ package org.apache.hadoop.mapred; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; +import org.apache.commons.logging.Log; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; +import org.junit.Test; -import org.apache.commons.logging.*; +import java.util.BitSet; +import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.conf.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestSequenceFileAsTextInputFormat extends TestCase { +public class TestSequenceFileAsTextInputFormat { private static final Log LOG = FileInputFormat.LOG; private static int MAX_LENGTH = 10000; private static Configuration conf = new Configuration(); + @Test public void testFormat() throws Exception { JobConf job = new JobConf(conf); FileSystem fs = FileSystem.getLocal(conf); @@ -112,8 +119,4 @@ public class TestSequenceFileAsTextInputFormat extends TestCase { } } - - public static void main(String[] args) throws Exception { - new TestSequenceFileAsTextInputFormat().testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java index e50c396a434..93f21ce9e49 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java @@ -18,17 +18,21 @@ package org.apache.hadoop.mapred; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; +import org.apache.commons.logging.Log; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; +import org.junit.Test; -import org.apache.commons.logging.*; +import java.io.IOException; +import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.conf.*; +import static org.junit.Assert.assertEquals; -public class TestSequenceFileInputFilter extends TestCase { +public class TestSequenceFileInputFilter { private static final Log LOG = FileInputFormat.LOG; private static final int MAX_LENGTH = 15000; @@ -97,7 +101,8 @@ public class TestSequenceFileInputFilter extends TestCase { } return count; } - + + @Test public void testRegexFilter() throws Exception { // set the filter class LOG.info("Testing Regex Filter with patter: \\A10*"); @@ -121,6 +126,7 @@ public class TestSequenceFileInputFilter extends TestCase { fs.delete(inDir, true); } + @Test public void testPercentFilter() throws Exception { LOG.info("Testing Percent Filter with frequency: 1000"); // set the filter class @@ -147,7 +153,8 @@ public class TestSequenceFileInputFilter extends TestCase { // clean up fs.delete(inDir, true); } - + + @Test public void testMD5Filter() throws Exception { // set the filter class LOG.info("Testing MD5 Filter with frequency: 1000"); @@ -168,9 +175,4 @@ public class TestSequenceFileInputFilter extends TestCase { // clean up fs.delete(inDir, true); } - - public static void main(String[] args) throws Exception { - TestSequenceFileInputFilter filter = new TestSequenceFileInputFilter(); - filter.testRegexFilter(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java index 575ed532545..ba4dce30974 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java @@ -18,22 +18,28 @@ package org.apache.hadoop.mapred; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; +import org.apache.commons.logging.Log; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.SequenceFile; +import org.junit.Test; -import org.apache.commons.logging.*; +import java.util.BitSet; +import java.util.Random; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.conf.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestSequenceFileInputFormat extends TestCase { +public class TestSequenceFileInputFormat { private static final Log LOG = FileInputFormat.LOG; private static int MAX_LENGTH = 10000; private static Configuration conf = new Configuration(); + @Test public void testFormat() throws Exception { JobConf job = new JobConf(conf); FileSystem fs = FileSystem.getLocal(conf); @@ -110,7 +116,6 @@ public class TestSequenceFileInputFormat extends TestCase { } } - public static void main(String[] args) throws Exception { new TestSequenceFileInputFormat().testFormat(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java index ad4d4ce17a9..82d1d2d09a1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java @@ -17,18 +17,20 @@ */ package org.apache.hadoop.mapred; -import java.util.Iterator; - -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.mapred.SortedRanges.Range; +import org.junit.Test; -public class TestSortedRanges extends TestCase { - private static final Log LOG = +import java.util.Iterator; + +import static org.junit.Assert.assertEquals; + +public class TestSortedRanges { + private static final Log LOG = LogFactory.getLog(TestSortedRanges.class); - + + @Test public void testAdd() { SortedRanges sr = new SortedRanges(); sr.add(new Range(2,9)); @@ -66,7 +68,8 @@ public class TestSortedRanges extends TestCase { assertEquals(77, it.next().longValue()); } - + + @Test public void testRemove() { SortedRanges sr = new SortedRanges(); sr.add(new Range(2,19)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java index 426686f9bb5..b9e32759fa4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java @@ -18,12 +18,6 @@ package org.apache.hadoop.mapred; -import java.io.DataOutputStream; -import java.io.IOException; -import java.net.URI; - -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -34,14 +28,20 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; -import org.apache.hadoop.mapreduce.MRConfig; -import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.util.Progressable; +import org.junit.Test; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.net.URI; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; /** * A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795). */ -public class TestSpecialCharactersInOutputPath extends TestCase { +public class TestSpecialCharactersInOutputPath { private static final Log LOG = LogFactory.getLog(TestSpecialCharactersInOutputPath.class.getName()); @@ -96,7 +96,8 @@ public class TestSpecialCharactersInOutputPath extends TestCase { LOG.info("job is complete: " + runningJob.isSuccessful()); return (runningJob.isSuccessful()); } - + + @Test public void testJobWithDFS() throws IOException { String namenode = null; MiniDFSCluster dfs = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java index 12568d09175..8a83e8153e3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java @@ -19,14 +19,18 @@ package org.apache.hadoop.mapred; import java.util.Map; -import junit.framework.TestCase; - import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow; import org.apache.hadoop.mapred.StatisticsCollector.Stat; +import org.junit.Test; -public class TestStatisticsCollector extends TestCase{ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +public class TestStatisticsCollector { @SuppressWarnings("rawtypes") + @Test public void testMovingWindow() throws Exception { StatisticsCollector collector = new StatisticsCollector(1); TimeWindow window = new TimeWindow("test", 6, 2); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java index 3c2cf215fb3..2d67edc581a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java @@ -17,6 +17,15 @@ */ package org.apache.hadoop.mapred; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.lib.IdentityMapper; +import org.apache.hadoop.mapred.lib.IdentityReducer; +import org.junit.Test; + import java.io.BufferedReader; import java.io.File; import java.io.IOException; @@ -26,18 +35,10 @@ import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapred.lib.IdentityMapper; -import org.apache.hadoop.mapred.lib.IdentityReducer; - -public class TestUserDefinedCounters extends TestCase { - +public class TestUserDefinedCounters { private static String TEST_ROOT_DIR = new File(System.getProperty("test.build.data", "/tmp")).toURI() .toString().replace(' ', '+') @@ -75,6 +76,7 @@ public class TestUserDefinedCounters extends TestCase { wr.close(); } + @Test public void testMapReduceJob() throws Exception { JobConf conf = new JobConf(TestUserDefinedCounters.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java index 2c0cedcbb30..82c68db30c5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java @@ -18,12 +18,6 @@ package org.apache.hadoop.mapred; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; @@ -31,8 +25,15 @@ import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.util.GenericsUtil; +import org.junit.Test; -public class TestWritableJobConf extends TestCase { +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import static org.junit.Assert.assertTrue; + +public class TestWritableJobConf { private static final Configuration CONF = new Configuration(); @@ -78,15 +79,17 @@ public class TestWritableJobConf extends TestCase { } } - assertEquals(map1, map2); + assertTrue(map1.equals(map2)); } + @Test public void testEmptyConfiguration() throws Exception { JobConf conf = new JobConf(); Configuration deser = serDeser(conf); assertEquals(conf, deser); } + @Test public void testNonEmptyConfiguration() throws Exception { JobConf conf = new JobConf(); conf.set("a", "A"); @@ -95,6 +98,7 @@ public class TestWritableJobConf extends TestCase { assertEquals(conf, deser); } + @Test public void testConfigurationWithDefaults() throws Exception { JobConf conf = new JobConf(false); conf.set("a", "A"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java index 0e340428214..abf2e72e0d1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java @@ -18,6 +18,10 @@ package org.apache.hadoop.mapred; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; @@ -38,8 +42,6 @@ import java.security.PrivilegedExceptionAction; import java.util.List; import java.util.Map; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -113,7 +115,7 @@ import org.mockito.stubbing.Answer; * Test YarnRunner and make sure the client side plugin works * fine */ -public class TestYARNRunner extends TestCase { +public class TestYARNRunner { private static final Log LOG = LogFactory.getLog(TestYARNRunner.class); private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java index 15cea69dab2..a3066765ec0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java @@ -22,11 +22,6 @@ import java.io.DataOutput; import java.io.IOException; import java.util.Iterator; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; -import junit.extensions.TestSetup; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; @@ -54,23 +49,27 @@ import org.apache.hadoop.mapred.Utils; import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; -public class TestDatamerge extends TestCase { +public class TestDatamerge { private static MiniDFSCluster cluster = null; - public static Test suite() { - TestSetup setup = new TestSetup(new TestSuite(TestDatamerge.class)) { - protected void setUp() throws Exception { - Configuration conf = new Configuration(); - cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); - } - protected void tearDown() throws Exception { - if (cluster != null) { - cluster.shutdown(); - } - } - }; - return setup; + + @Before + public void setUp() throws Exception { + Configuration conf = new Configuration(); + cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); + } + @After + public void tearDown() throws Exception { + if (cluster != null) { + cluster.shutdown(); + } } private static SequenceFile.Writer[] createWriters(Path testdir, @@ -246,18 +245,22 @@ public class TestDatamerge extends TestCase { base.getFileSystem(job).delete(base, true); } + @Test public void testSimpleInnerJoin() throws Exception { joinAs("inner", InnerJoinChecker.class); } + @Test public void testSimpleOuterJoin() throws Exception { joinAs("outer", OuterJoinChecker.class); } + @Test public void testSimpleOverride() throws Exception { joinAs("override", OverrideChecker.class); } + @Test public void testNestedJoin() throws Exception { // outer(inner(S1,...,Sn),outer(S1,...Sn)) final int SOURCES = 3; @@ -350,6 +353,7 @@ public class TestDatamerge extends TestCase { } + @Test public void testEmptyJoin() throws Exception { JobConf job = new JobConf(); Path base = cluster.getFileSystem().makeQualified(new Path("/empty")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java index e421ede9827..56871550dc9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java @@ -26,8 +26,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.Random; -import junit.framework.TestCase; - import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; @@ -36,8 +34,12 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; -public class TestTupleWritable extends TestCase { +public class TestTupleWritable { private TupleWritable makeTuple(Writable[] writs) { Writable[] sub1 = { writs[1], writs[2] }; @@ -100,6 +102,7 @@ public class TestTupleWritable extends TestCase { return i; } + @Test public void testIterable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -121,6 +124,7 @@ public class TestTupleWritable extends TestCase { verifIter(writs, t, 0); } + @Test public void testNestedIterable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -139,6 +143,7 @@ public class TestTupleWritable extends TestCase { assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0)); } + @Test public void testWritable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -162,6 +167,7 @@ public class TestTupleWritable extends TestCase { assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); } + @Test public void testWideWritable() throws Exception { Writable[] manyWrits = makeRandomWritables(131); @@ -180,7 +186,8 @@ public class TestTupleWritable extends TestCase { assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); assertEquals("All tuple data has not been read from the stream",-1,in.read()); } - + + @Test public void testWideWritable2() throws Exception { Writable[] manyWrits = makeRandomWritables(71); @@ -202,6 +209,7 @@ public class TestTupleWritable extends TestCase { * Tests a tuple writable with more than 64 values and the values set written * spread far apart. */ + @Test public void testSparseWideWritable() throws Exception { Writable[] manyWrits = makeRandomWritables(131); @@ -220,7 +228,7 @@ public class TestTupleWritable extends TestCase { assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); assertEquals("All tuple data has not been read from the stream",-1,in.read()); } - + @Test public void testWideTuple() throws Exception { Text emptyText = new Text("Should be empty"); Writable[] values = new Writable[64]; @@ -240,7 +248,7 @@ public class TestTupleWritable extends TestCase { } } } - + @Test public void testWideTuple2() throws Exception { Text emptyText = new Text("Should be empty"); Writable[] values = new Writable[64]; @@ -264,6 +272,7 @@ public class TestTupleWritable extends TestCase { /** * Tests that we can write more than 64 values. */ + @Test public void testWideTupleBoundary() throws Exception { Text emptyText = new Text("Should not be set written"); Writable[] values = new Writable[65]; @@ -287,6 +296,7 @@ public class TestTupleWritable extends TestCase { /** * Tests compatibility with pre-0.21 versions of TupleWritable */ + @Test public void testPreVersion21Compatibility() throws Exception { Writable[] manyWrits = makeRandomWritables(64); PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits); @@ -304,7 +314,7 @@ public class TestTupleWritable extends TestCase { assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple)); assertEquals("All tuple data has not been read from the stream",-1,in.read()); } - + @Test public void testPreVersion21CompatibilityEmptyTuple() throws Exception { Writable[] manyWrits = new Writable[0]; PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java index 3ca175a5049..ae5572f5dcd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java @@ -21,8 +21,6 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; @@ -35,13 +33,16 @@ import org.apache.hadoop.mapred.JobConfigurable; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.Test; +import static org.junit.Assert.assertTrue; -public class TestWrappedRecordReaderClassloader extends TestCase { +public class TestWrappedRecordReaderClassloader { /** * Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)} * is inherited by any {@link WrappedRecordReader}s created by * {@link CompositeRecordReader} */ + @Test public void testClassLoader() throws Exception { JobConf job = new JobConf(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java index 8bd855433ea..b916026272e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java @@ -20,8 +20,6 @@ package org.apache.hadoop.mapred.lib; import java.io.DataOutputStream; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -32,9 +30,12 @@ import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.TextInputFormat; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; -public class TestDelegatingInputFormat extends TestCase { - +public class TestDelegatingInputFormat { + @Test public void testSplitting() throws Exception { JobConf conf = new JobConf(); MiniDFSCluster dfs = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java index db9c219e9c1..388de0fb88d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java @@ -20,13 +20,14 @@ package org.apache.hadoop.mapred.lib; import java.io.*; import java.util.*; -import junit.framework.TestCase; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -public class TestLineInputFormat extends TestCase { +public class TestLineInputFormat { private static int MAX_LENGTH = 200; private static JobConf defaultConf = new JobConf(); @@ -43,7 +44,7 @@ public class TestLineInputFormat extends TestCase { private static Path workDir = new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestLineInputFormat"); - + @Test public void testFormat() throws Exception { JobConf job = new JobConf(); Path file = new Path(workDir, "test.txt"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java index 3a9cb9ec337..115a6f70d08 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java @@ -36,7 +36,6 @@ import static org.junit.Assert.assertEquals; * @see TestDelegatingInputFormat */ public class TestMultipleInputs { - @Test public void testAddInputPathWithFormat() { final JobConf conf = new JobConf(); @@ -49,7 +48,6 @@ public class TestMultipleInputs { assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar")) .getClass()); } - @Test public void testAddInputPathWithMapper() { final JobConf conf = new JobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java index 6da96ce22bd..b839a2c3afe 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java @@ -22,13 +22,14 @@ import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapred.lib.*; import org.apache.hadoop.mapreduce.MapReduceTestUtil; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -import junit.framework.TestCase; import java.io.*; import java.util.*; import java.text.NumberFormat; -public class TestAggregates extends TestCase { +public class TestAggregates { private static NumberFormat idFormat = NumberFormat.getInstance(); static { @@ -36,7 +37,7 @@ public class TestAggregates extends TestCase { idFormat.setGroupingUsed(false); } - + @Test public void testAggregates() throws Exception { launch(); } @@ -46,8 +47,9 @@ public class TestAggregates extends TestCase { FileSystem fs = FileSystem.get(conf); int numOfInputLines = 20; - Path OUTPUT_DIR = new Path("build/test/output_for_aggregates_test"); - Path INPUT_DIR = new Path("build/test/input_for_aggregates_test"); + String baseDir = System.getProperty("test.build.data", "build/test/data"); + Path OUTPUT_DIR = new Path(baseDir + "/output_for_aggregates_test"); + Path INPUT_DIR = new Path(baseDir + "/input_for_aggregates_test"); String inputFile = "input.txt"; fs.delete(INPUT_DIR, true); fs.mkdirs(INPUT_DIR); @@ -115,7 +117,7 @@ public class TestAggregates extends TestCase { outdata = outdata.substring(0, expectedOutput.toString().length()); assertEquals(expectedOutput.toString(),outdata); - //fs.delete(OUTPUT_DIR); + fs.delete(OUTPUT_DIR, true); fs.delete(INPUT_DIR, true); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java index 968bb066565..203da4e0b7c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java @@ -19,13 +19,13 @@ package org.apache.hadoop.mapred.lib.db; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.JobConf; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; -public class TestConstructQuery extends TestCase { - +public class TestConstructQuery { private String[] fieldNames = new String[] { "id", "name", "value" }; private String[] nullFieldNames = new String[] { null, null, null }; private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);"; @@ -33,15 +33,15 @@ public class TestConstructQuery extends TestCase { private DBOutputFormat format = new DBOutputFormat(); - - public void testConstructQuery() { + @Test + public void testConstructQuery() { String actual = format.constructQuery("hadoop_output", fieldNames); assertEquals(expected, actual); - + actual = format.constructQuery("hadoop_output", nullFieldNames); assertEquals(nullExpected, actual); } - + @Test public void testSetOutput() throws IOException { JobConf job = new JobConf(); DBOutputFormat.setOutput(job, "hadoop_output", fieldNames); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java index dd7817d65b5..34b1d75dfed 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java @@ -44,10 +44,13 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.junit.Ignore; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; -import junit.framework.TestCase; @Ignore -public class TestPipes extends TestCase { +public class TestPipes { private static final Log LOG = LogFactory.getLog(TestPipes.class.getName()); @@ -66,7 +69,7 @@ public class TestPipes extends TestCase { fs.delete(p, true); assertFalse("output not cleaned up", fs.exists(p)); } - + @Test public void testPipes() throws IOException { if (System.getProperty("compile.c++") == null) { LOG.info("compile.c++ is not defined, so skipping TestPipes"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java index 29640c8854b..8177ecd405b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java @@ -17,36 +17,42 @@ */ package org.apache.hadoop.mapreduce; -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.fs.*; import org.apache.hadoop.mapred.LocalJobRunner; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.ReflectionUtils; - import org.junit.Test; -import junit.framework.TestCase; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.util.ArrayList; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; /** * Stress tests for the LocalJobRunner */ -public class TestLocalRunner extends TestCase { +public class TestLocalRunner { private static final Log LOG = LogFactory.getLog(TestLocalRunner.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java index 6f45b5f5dc2..8fe9078e9e0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java @@ -17,6 +17,23 @@ */ package org.apache.hadoop.mapreduce; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.LocatedFileStatus; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RemoteIterator; +import org.apache.hadoop.mapred.ClusterMapReduceTestCase; +import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.apache.hadoop.mapreduce.tools.CLI; +import org.apache.hadoop.util.ExitUtil; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.junit.Test; + import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -31,23 +48,11 @@ import java.io.PipedOutputStream; import java.io.PrintStream; import java.util.Arrays; -import org.apache.hadoop.fs.LocatedFileStatus; -import org.apache.hadoop.fs.RemoteIterator; -import org.codehaus.jettison.json.JSONException; -import org.codehaus.jettison.json.JSONObject; -import org.junit.Assert; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapred.ClusterMapReduceTestCase; -import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; -import org.apache.hadoop.mapreduce.tools.CLI; -import org.apache.hadoop.util.ExitUtil; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; /** test CLI class. CLI class implemented the Tool interface. @@ -103,7 +108,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { throw new IOException(); } } - + @Test public void testJobSubmissionSpecsAndFiles() throws Exception { Configuration conf = createJobConf(); Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(), @@ -127,7 +132,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { /** * main test method */ - + @Test public void testJobClient() throws Exception { Configuration conf = createJobConf(); Job job = runJob(conf); @@ -180,8 +185,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out); String answer = new String(out.toByteArray(), "UTF-8"); - Assert - .assertTrue(answer.contains("Killed task " + taid + " by failing it")); + assertTrue(answer.contains("Killed task " + taid + " by failing it")); } /** @@ -199,7 +203,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out); String answer = new String(out.toByteArray(), "UTF-8"); - Assert.assertTrue(answer.contains("Killed task " + taid)); + assertTrue(answer.contains("Killed task " + taid)); } /** @@ -686,6 +690,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { * Test -list option displays job name. * The name is capped to 20 characters for display. */ + @Test public void testJobName() throws Exception { Configuration conf = createJobConf(); CLI jc = createJobClient(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java index 1e4f4de9f93..a69e06eacd9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java @@ -25,8 +25,6 @@ import java.io.Writer; import java.util.Arrays; import java.util.List; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -42,13 +40,16 @@ import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; /** * A JUnit test to test the Map-Reduce framework's feature to create part * files only if there is an explicit output.collect. This helps in preventing * 0 byte files */ -public class TestMapReduceLazyOutput extends TestCase { +public class TestMapReduceLazyOutput { private static final int NUM_HADOOP_SLAVES = 3; private static final int NUM_MAPS_PER_NODE = 2; private static final Path INPUT = new Path("/testlazy/input"); @@ -122,7 +123,7 @@ public class TestMapReduceLazyOutput extends TestCase { } } - + @Test public void testLazyOutput() throws Exception { MiniDFSCluster dfs = null; MiniMRCluster mr = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNewCombinerGrouping.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNewCombinerGrouping.java index ab9ddbb4aaa..c2054f1d4c1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNewCombinerGrouping.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNewCombinerGrouping.java @@ -39,8 +39,9 @@ import java.util.Set; import java.util.UUID; public class TestNewCombinerGrouping { - private static String TEST_ROOT_DIR = - new File("build", UUID.randomUUID().toString()).getAbsolutePath(); + private static String TEST_ROOT_DIR = new File(System.getProperty( + "test.build.data", "build/test/data"), UUID.randomUUID().toString()) + .getAbsolutePath(); public static class Map extends Mapper { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java index 5cf08991869..b757fb2c34f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java @@ -27,8 +27,6 @@ import java.io.Writer; import java.util.ArrayList; import java.util.StringTokenizer; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -43,12 +41,15 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; /** * A JUnit test to test the Map-Reduce framework's support for the * "mark-reset" functionality in Reduce Values Iterator */ -public class TestValueIterReset extends TestCase { +public class TestValueIterReset { private static final int NUM_MAPS = 1; private static final int NUM_TESTS = 4; private static final int NUM_VALUES = 40; @@ -518,6 +519,7 @@ public class TestValueIterReset extends TestCase { } } + @Test public void testValueIterReset() { try { Configuration conf = new Configuration(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java index 4d84fa9e108..308b7775a67 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java @@ -18,6 +18,7 @@ package org.apache.hadoop.mapreduce; +import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -26,7 +27,6 @@ import static org.mockito.Mockito.doNothing; import java.io.IOException; import java.nio.ByteBuffer; -import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; @@ -44,8 +44,7 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.junit.Test; -public class TestYarnClientProtocolProvider extends TestCase { - +public class TestYarnClientProtocolProvider { private static final RecordFactory recordFactory = RecordFactoryProvider. getRecordFactory(null); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java index f24dffe2655..3aac54e7159 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java @@ -18,22 +18,24 @@ package org.apache.hadoop.mapreduce.lib.aggregate; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; +import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.io.*; -import org.apache.hadoop.mapred.Utils; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.junit.Test; -import junit.framework.TestCase; -import java.io.*; import java.text.NumberFormat; -public class TestMapReduceAggregates extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class TestMapReduceAggregates { private static NumberFormat idFormat = NumberFormat.getInstance(); static { @@ -41,7 +43,7 @@ public class TestMapReduceAggregates extends TestCase { idFormat.setGroupingUsed(false); } - + @Test public void testAggregates() throws Exception { launch(); } @@ -51,8 +53,9 @@ public class TestMapReduceAggregates extends TestCase { FileSystem fs = FileSystem.get(conf); int numOfInputLines = 20; - Path OUTPUT_DIR = new Path("build/test/output_for_aggregates_test"); - Path INPUT_DIR = new Path("build/test/input_for_aggregates_test"); + String baseDir = System.getProperty("test.build.data", "build/test/data"); + Path OUTPUT_DIR = new Path(baseDir + "/output_for_aggregates_test"); + Path INPUT_DIR = new Path(baseDir + "/input_for_aggregates_test"); String inputFile = "input.txt"; fs.delete(INPUT_DIR, true); fs.mkdirs(INPUT_DIR); @@ -122,11 +125,4 @@ public class TestMapReduceAggregates extends TestCase { fs.delete(OUTPUT_DIR, true); fs.delete(INPUT_DIR, true); } - - /** - * Launches all the tasks in order. - */ - public static void main(String[] argv) throws Exception { - launch(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java index bff25d20038..014855f7d6a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestDBOutputFormat.java @@ -19,14 +19,15 @@ package org.apache.hadoop.mapreduce.lib.db; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Job; +import org.junit.Test; -public class TestDBOutputFormat extends TestCase { - +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +public class TestDBOutputFormat { private String[] fieldNames = new String[] { "id", "name", "value" }; private String[] nullFieldNames = new String[] { null, null, null }; private String expected = "INSERT INTO hadoop_output " + @@ -35,15 +36,17 @@ public class TestDBOutputFormat extends TestCase { private DBOutputFormat format = new DBOutputFormat(); - - public void testConstructQuery() { + + @Test + public void testConstructQuery() { String actual = format.constructQuery("hadoop_output", fieldNames); assertEquals(expected, actual); actual = format.constructQuery("hadoop_output", nullFieldNames); assertEquals(nullExpected, actual); } - + + @Test public void testSetOutput() throws IOException { Job job = Job.getInstance(new Configuration()); DBOutputFormat.setOutput(job, "hadoop_output", fieldNames); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java index e50aba4f462..8b5d907dcdc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestIntegerSplitter.java @@ -17,15 +17,15 @@ */ package org.apache.hadoop.mapreduce.lib.db; -import java.io.IOException; -import java.math.BigDecimal; +import org.junit.Test; + import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -public class TestIntegerSplitter extends TestCase { +public class TestIntegerSplitter { private long [] toLongArray(List in) { long [] out = new long[in.size()]; for (int i = 0; i < in.size(); i++) { @@ -70,12 +70,14 @@ public class TestIntegerSplitter extends TestCase { } } + @Test public void testEvenSplits() throws SQLException { List splits = new IntegerSplitter().split(10, 0, 100); long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 }; assertLongArrayEquals(expected, toLongArray(splits)); } + @Test public void testOddSplits() throws SQLException { List splits = new IntegerSplitter().split(10, 0, 95); long [] expected = { 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 95 }; @@ -83,12 +85,14 @@ public class TestIntegerSplitter extends TestCase { } + @Test public void testSingletonSplit() throws SQLException { List splits = new IntegerSplitter().split(1, 5, 5); long [] expected = { 5, 5 }; assertLongArrayEquals(expected, toLongArray(splits)); } + @Test public void testSingletonSplit2() throws SQLException { // Same test, but overly-high numSplits List splits = new IntegerSplitter().split(5, 5, 5); @@ -96,6 +100,7 @@ public class TestIntegerSplitter extends TestCase { assertLongArrayEquals(expected, toLongArray(splits)); } + @Test public void testTooManySplits() throws SQLException { List splits = new IntegerSplitter().split(5, 3, 5); long [] expected = { 3, 4, 5 }; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java index 045e3a1b1f6..e16f4234877 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/db/TestTextSplitter.java @@ -17,15 +17,16 @@ */ package org.apache.hadoop.mapreduce.lib.db; -import java.io.IOException; +import org.junit.Test; + import java.math.BigDecimal; import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -public class TestTextSplitter extends TestCase { +public class TestTextSplitter { public String formatArray(Object [] ar) { StringBuilder sb = new StringBuilder(); @@ -62,48 +63,56 @@ public class TestTextSplitter extends TestCase { } } + @Test public void testStringConvertEmpty() { TextSplitter splitter = new TextSplitter(); BigDecimal emptyBigDec = splitter.stringToBigDecimal(""); assertEquals(BigDecimal.ZERO, emptyBigDec); } + @Test public void testBigDecConvertEmpty() { TextSplitter splitter = new TextSplitter(); String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO); assertEquals("", emptyStr); } + @Test public void testConvertA() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A")); assertEquals("A", out); } + @Test public void testConvertZ() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z")); assertEquals("Z", out); } + @Test public void testConvertThreeChars() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("abc")); assertEquals("abc", out); } + @Test public void testConvertStr() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("big str")); assertEquals("big str", out); } + @Test public void testConvertChomped() { TextSplitter splitter = new TextSplitter(); String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("AVeryLongStringIndeed")); assertEquals("AVeryLon", out); } + @Test public void testAlphabetSplit() throws SQLException { // This should give us 25 splits, one per letter. TextSplitter splitter = new TextSplitter(); @@ -113,6 +122,7 @@ public class TestTextSplitter extends TestCase { assertArrayEquals(expected, splits.toArray(new String [0])); } + @Test public void testCommonPrefix() throws SQLException { // Splits between 'Hand' and 'Hardy' TextSplitter splitter = new TextSplitter(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java index 91070f89c42..6f9183ab21b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java @@ -18,15 +18,19 @@ package org.apache.hadoop.mapreduce.lib.fieldsel; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MapReduceTestUtil; +import org.junit.Test; -import junit.framework.TestCase; import java.text.NumberFormat; -public class TestMRFieldSelection extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class TestMRFieldSelection { private static NumberFormat idFormat = NumberFormat.getInstance(); static { @@ -34,6 +38,7 @@ private static NumberFormat idFormat = NumberFormat.getInstance(); idFormat.setGroupingUsed(false); } + @Test public void testFieldSelection() throws Exception { launch(); } @@ -114,11 +119,4 @@ private static NumberFormat idFormat = NumberFormat.getInstance(); System.out.println("ExpectedData:"); System.out.println(expectedOutput.toString()); } - - /** - * Launches all the tasks in order. - */ - public static void main(String[] argv) throws Exception { - launch(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java index b49f2d831ab..1fca5c982bf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java @@ -1772,7 +1772,9 @@ public class TestCombineFileInputFormat { // default fs path assertEquals(DUMMY_FS_URI, FileSystem.getDefaultUri(conf).toString()); // add a local file - Path localPath = new Path("testFile1"); + String localPathRoot = System.getProperty("test.build.data", + "build/test/data"); + Path localPath = new Path(localPathRoot, "testFile1"); FileSystem lfs = FileSystem.getLocal(conf); FSDataOutputStream dos = lfs.create(localPath); dos.writeChars("Local file for CFIF"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java index f0b3d57486c..cbf9d183ef2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java @@ -18,11 +18,12 @@ package org.apache.hadoop.mapreduce.lib.input; -import java.io.IOException; -import java.util.Random; - -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.DataInputBuffer; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; @@ -31,12 +32,18 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.task.MapContextImpl; +import org.junit.Test; -import junit.framework.TestCase; +import java.io.IOException; +import java.util.Random; -public class TestMRSequenceFileAsBinaryInputFormat extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class TestMRSequenceFileAsBinaryInputFormat { private static final int RECORDS = 10000; + @Test public void testBinary() throws IOException, InterruptedException { Job job = Job.getInstance(); FileSystem fs = FileSystem.getLocal(job.getConfiguration()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java index 2d03c2dd96a..335ce050d82 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java @@ -18,11 +18,13 @@ package org.apache.hadoop.mapreduce.lib.input; -import java.util.*; -import junit.framework.TestCase; - -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; @@ -31,12 +33,19 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.task.MapContextImpl; -import org.apache.hadoop.conf.*; +import org.junit.Test; -public class TestMRSequenceFileAsTextInputFormat extends TestCase { +import java.util.BitSet; +import java.util.Random; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +public class TestMRSequenceFileAsTextInputFormat { private static int MAX_LENGTH = 10000; private static Configuration conf = new Configuration(); + @Test public void testFormat() throws Exception { Job job = Job.getInstance(conf); FileSystem fs = FileSystem.getLocal(conf); @@ -112,8 +121,4 @@ public class TestMRSequenceFileAsTextInputFormat extends TestCase { } } - - public static void main(String[] args) throws Exception { - new TestMRSequenceFileAsTextInputFormat().testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java index edf7e1ad10d..89aa7b23057 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java @@ -18,14 +18,14 @@ package org.apache.hadoop.mapreduce.lib.input; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; - -import org.apache.commons.logging.*; - -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; @@ -34,10 +34,15 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.task.MapContextImpl; -import org.apache.hadoop.conf.*; +import org.junit.Test; -public class TestMRSequenceFileInputFilter extends TestCase { - private static final Log LOG = +import java.io.IOException; +import java.util.Random; + +import static org.junit.Assert.assertEquals; + +public class TestMRSequenceFileInputFilter { + private static final Log LOG = LogFactory.getLog(TestMRSequenceFileInputFilter.class.getName()); private static final int MAX_LENGTH = 15000; @@ -113,7 +118,8 @@ public class TestMRSequenceFileInputFilter extends TestCase { } return count; } - + + @Test public void testRegexFilter() throws Exception { // set the filter class LOG.info("Testing Regex Filter with patter: \\A10*"); @@ -138,6 +144,7 @@ public class TestMRSequenceFileInputFilter extends TestCase { fs.delete(inDir, true); } + @Test public void testPercentFilter() throws Exception { LOG.info("Testing Percent Filter with frequency: 1000"); // set the filter class @@ -165,7 +172,8 @@ public class TestMRSequenceFileInputFilter extends TestCase { // clean up fs.delete(inDir, true); } - + + @Test public void testMD5Filter() throws Exception { // set the filter class LOG.info("Testing MD5 Filter with frequency: 1000"); @@ -187,9 +195,4 @@ public class TestMRSequenceFileInputFilter extends TestCase { // clean up fs.delete(inDir, true); } - - public static void main(String[] args) throws Exception { - TestMRSequenceFileInputFilter filter = new TestMRSequenceFileInputFilter(); - filter.testRegexFilter(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java index 7b3878d9475..477866f4e35 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java @@ -18,17 +18,28 @@ package org.apache.hadoop.mapreduce.lib.input; -import java.io.*; -import java.util.*; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; -import org.apache.hadoop.mapreduce.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.InputSplit; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.MapContext; +import org.apache.hadoop.mapreduce.MapReduceTestUtil; +import org.apache.hadoop.mapreduce.RecordReader; +import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.task.MapContextImpl; +import org.junit.Test; -public class TestNLineInputFormat extends TestCase { +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.util.List; + +import static org.junit.Assert.assertEquals; + +public class TestNLineInputFormat { private static int MAX_LENGTH = 200; private static Configuration conf = new Configuration(); @@ -45,7 +56,8 @@ public class TestNLineInputFormat extends TestCase { private static Path workDir = new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestNLineInputFormat"); - + + @Test public void testFormat() throws Exception { Job job = Job.getInstance(conf); Path file = new Path(workDir, "test.txt"); @@ -116,8 +128,4 @@ public class TestNLineInputFormat extends TestCase { } } } - - public static void main(String[] args) throws Exception { - new TestNLineInputFormat().testFormat(); - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java index d245bfd6cde..1173ea4fa47 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java @@ -19,11 +19,6 @@ package org.apache.hadoop.mapreduce.lib.join; import java.io.IOException; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; -import junit.extensions.TestSetup; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; @@ -37,23 +32,31 @@ import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; -public class TestJoinDatamerge extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class TestJoinDatamerge { private static MiniDFSCluster cluster = null; - public static Test suite() { - TestSetup setup = new TestSetup(new TestSuite(TestJoinDatamerge.class)) { - protected void setUp() throws Exception { - Configuration conf = new Configuration(); - cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); - } - protected void tearDown() throws Exception { - if (cluster != null) { - cluster.shutdown(); - } - } - }; - return setup; + + @BeforeClass + public static void setUp() throws Exception { + Configuration conf = new Configuration(); + cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); + } + + @AfterClass + public static void tearDown() throws Exception { + if (cluster != null) { + cluster.shutdown(); + } } private static SequenceFile.Writer[] createWriters(Path testdir, @@ -111,7 +114,7 @@ public class TestJoinDatamerge extends TestCase { extends Mapper{ protected final static IntWritable one = new IntWritable(1); int srcs; - + public void setup(Context context) { srcs = context.getConfiguration().getInt("testdatamerge.sources", 0); assertTrue("Invalid src count: " + srcs, srcs > 0); @@ -123,7 +126,7 @@ public class TestJoinDatamerge extends TestCase { protected final static IntWritable one = new IntWritable(1); int srcs; - + public void setup(Context context) { srcs = context.getConfiguration().getInt("testdatamerge.sources", 0); assertTrue("Invalid src count: " + srcs, srcs > 0); @@ -272,10 +275,12 @@ public class TestJoinDatamerge extends TestCase { base.getFileSystem(conf).delete(base, true); } + @Test public void testSimpleInnerJoin() throws Exception { joinAs("inner", InnerJoinMapChecker.class, InnerJoinReduceChecker.class); } + @Test public void testSimpleOuterJoin() throws Exception { joinAs("outer", OuterJoinMapChecker.class, OuterJoinReduceChecker.class); } @@ -322,11 +327,13 @@ public class TestJoinDatamerge extends TestCase { } return product; } - + + @Test public void testSimpleOverride() throws Exception { joinAs("override", OverrideMapChecker.class, OverrideReduceChecker.class); } + @Test public void testNestedJoin() throws Exception { // outer(inner(S1,...,Sn),outer(S1,...Sn)) final int SOURCES = 3; @@ -422,6 +429,7 @@ public class TestJoinDatamerge extends TestCase { } + @Test public void testEmptyJoin() throws Exception { Configuration conf = new Configuration(); Path base = cluster.getFileSystem().makeQualified(new Path("/empty")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java index 151bc875ad3..b6e76069d95 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java @@ -20,11 +20,6 @@ package org.apache.hadoop.mapreduce.lib.join; import java.io.IOException; import java.util.List; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; -import junit.extensions.TestSetup; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -36,8 +31,14 @@ import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.task.MapContextImpl; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; -public class TestJoinProperties extends TestCase { +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class TestJoinProperties { private static MiniDFSCluster cluster = null; final static int SOURCES = 3; @@ -46,21 +47,19 @@ public class TestJoinProperties extends TestCase { static Path[] src; static Path base; - public static Test suite() { - TestSetup setup = new TestSetup(new TestSuite(TestJoinProperties.class)) { - protected void setUp() throws Exception { - Configuration conf = new Configuration(); - cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); - base = cluster.getFileSystem().makeQualified(new Path("/nested")); - src = generateSources(conf); - } - protected void tearDown() throws Exception { - if (cluster != null) { - cluster.shutdown(); - } - } - }; - return setup; + @BeforeClass + public static void setUp() throws Exception { + Configuration conf = new Configuration(); + cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); + base = cluster.getFileSystem().makeQualified(new Path("/nested")); + src = generateSources(conf); + } + + @AfterClass + public static void tearDown() throws Exception { + if (cluster != null) { + cluster.shutdown(); + } } // Sources from 0 to srcs-2 have IntWritable key and IntWritable value @@ -233,6 +232,7 @@ public class TestJoinProperties extends TestCase { } // outer(outer(A, B), C) == outer(A,outer(B, C)) == outer(A, B, C) + @Test public void testOuterAssociativity() throws Exception { Configuration conf = new Configuration(); testExpr1(conf, "outer", TestType.OUTER_ASSOCIATIVITY, 33); @@ -241,6 +241,7 @@ public class TestJoinProperties extends TestCase { } // inner(inner(A, B), C) == inner(A,inner(B, C)) == inner(A, B, C) + @Test public void testInnerAssociativity() throws Exception { Configuration conf = new Configuration(); testExpr1(conf, "inner", TestType.INNER_ASSOCIATIVITY, 2); @@ -249,6 +250,7 @@ public class TestJoinProperties extends TestCase { } // override(inner(A, B), A) == A + @Test public void testIdentity() throws Exception { Configuration conf = new Configuration(); testExpr4(conf); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java index d35941fc884..093da266b95 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java @@ -24,8 +24,6 @@ import java.io.DataOutputStream; import java.util.Arrays; import java.util.Random; -import junit.framework.TestCase; - import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; @@ -33,8 +31,13 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; +import org.junit.Test; -public class TestJoinTupleWritable extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class TestJoinTupleWritable { private TupleWritable makeTuple(Writable[] writs) { Writable[] sub1 = { writs[1], writs[2] }; @@ -97,6 +100,7 @@ public class TestJoinTupleWritable extends TestCase { return i; } + @Test public void testIterable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -118,6 +122,7 @@ public class TestJoinTupleWritable extends TestCase { verifIter(writs, t, 0); } + @Test public void testNestedIterable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -136,6 +141,7 @@ public class TestJoinTupleWritable extends TestCase { assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0)); } + @Test public void testWritable() throws Exception { Random r = new Random(); Writable[] writs = { @@ -159,6 +165,7 @@ public class TestJoinTupleWritable extends TestCase { assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); } + @Test public void testWideWritable() throws Exception { Writable[] manyWrits = makeRandomWritables(131); @@ -178,7 +185,8 @@ public class TestJoinTupleWritable extends TestCase { assertEquals("All tuple data has not been read from the stream", -1, in.read()); } - + + @Test public void testWideWritable2() throws Exception { Writable[] manyWrits = makeRandomWritables(71); @@ -201,6 +209,7 @@ public class TestJoinTupleWritable extends TestCase { * Tests a tuple writable with more than 64 values and the values set written * spread far apart. */ + @Test public void testSparseWideWritable() throws Exception { Writable[] manyWrits = makeRandomWritables(131); @@ -220,7 +229,8 @@ public class TestJoinTupleWritable extends TestCase { assertEquals("All tuple data has not been read from the stream", -1, in.read()); } - + + @Test public void testWideTuple() throws Exception { Text emptyText = new Text("Should be empty"); Writable[] values = new Writable[64]; @@ -241,7 +251,8 @@ public class TestJoinTupleWritable extends TestCase { } } } - + + @Test public void testWideTuple2() throws Exception { Text emptyText = new Text("Should be empty"); Writable[] values = new Writable[64]; @@ -266,6 +277,7 @@ public class TestJoinTupleWritable extends TestCase { /** * Tests that we can write more than 64 values. */ + @Test public void testWideTupleBoundary() throws Exception { Text emptyText = new Text("Should not be set written"); Writable[] values = new Writable[65]; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java index 36cf1872ad4..680e246b4e3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java @@ -17,23 +17,32 @@ */ package org.apache.hadoop.mapreduce.lib.join; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.mapreduce.*; +import org.apache.hadoop.mapreduce.InputSplit; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.MRJobConfig; +import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR; +import org.apache.hadoop.mapreduce.RecordReader; +import org.apache.hadoop.mapreduce.TaskAttemptContext; +import org.apache.hadoop.mapreduce.TaskAttemptID; +import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; +import org.junit.Test; -public class TestWrappedRRClassloader extends TestCase { +import static org.junit.Assert.assertTrue; + +public class TestWrappedRRClassloader { /** * Tests the class loader set by * {@link Configuration#setClassLoader(ClassLoader)} * is inherited by any {@link WrappedRecordReader}s created by * {@link CompositeRecordReader} */ + @Test public void testClassLoader() throws Exception { Configuration conf = new Configuration(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java index 2e40f72fdd2..5a8aeda83be 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java @@ -18,12 +18,17 @@ package org.apache.hadoop.mapreduce.lib.output; -import java.io.IOException; -import java.util.Random; - +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.BooleanWritable; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.FloatWritable; +import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.mapred.InvalidJobConfException; import org.apache.hadoop.mapreduce.InputFormat; @@ -38,16 +43,22 @@ import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.task.MapContextImpl; +import org.junit.Test; -import junit.framework.TestCase; -import org.apache.commons.logging.*; +import java.io.IOException; +import java.util.Random; -public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase { +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class TestMRSequenceFileAsBinaryOutputFormat { private static final Log LOG = LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName()); private static final int RECORDS = 10000; - + + @Test public void testBinary() throws IOException, InterruptedException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf); @@ -144,7 +155,8 @@ public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase { assertEquals("Some records not found", RECORDS, count); } - public void testSequenceOutputClassDefaultsToMapRedOutputClass() + @Test + public void testSequenceOutputClassDefaultsToMapRedOutputClass() throws IOException { Job job = Job.getInstance(); // Setting Random class to test getSequenceFileOutput{Key,Value}Class @@ -172,7 +184,8 @@ public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase { SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job)); } - public void testcheckOutputSpecsForbidRecordCompression() + @Test + public void testcheckOutputSpecsForbidRecordCompression() throws IOException { Job job = Job.getInstance(); FileSystem fs = FileSystem.getLocal(job.getConfiguration()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java index 7be538ecf41..f83bc11a216 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestBinaryPartitioner.java @@ -22,11 +22,14 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.BinaryComparable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.util.ReflectionUtils; +import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; -public class TestBinaryPartitioner extends TestCase { +public class TestBinaryPartitioner { + @Test public void testDefaultOffsets() { Configuration conf = new Configuration(); BinaryPartitioner partitioner = @@ -50,7 +53,8 @@ public class TestBinaryPartitioner extends TestCase { partition2 = partitioner.getPartition(key2, null, 10); assertTrue(partition1 != partition2); } - + + @Test public void testCustomOffsets() { Configuration conf = new Configuration(); BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 }); @@ -75,7 +79,8 @@ public class TestBinaryPartitioner extends TestCase { partition2 = partitioner.getPartition(key2, null, 10); assertEquals(partition1, partition2); } - + + @Test public void testLowerBound() { Configuration conf = new Configuration(); BinaryPartitioner.setLeftOffset(conf, 0); @@ -87,7 +92,8 @@ public class TestBinaryPartitioner extends TestCase { int partition2 = partitioner.getPartition(key2, null, 10); assertTrue(partition1 != partition2); } - + + @Test public void testUpperBound() { Configuration conf = new Configuration(); BinaryPartitioner.setRightOffset(conf, 4); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java index 6bad846f6d3..4d05d13d445 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestKeyFieldHelper.java @@ -19,14 +19,17 @@ package org.apache.hadoop.mapreduce.lib.partition; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; -public class TestKeyFieldHelper extends TestCase { +public class TestKeyFieldHelper { private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class); /** * Test is key-field-helper's parse option. */ + @Test public void testparseOption() throws Exception { KeyFieldHelper helper = new KeyFieldHelper(); helper.setKeyFieldSeparator("\t"); @@ -212,6 +215,7 @@ public class TestKeyFieldHelper extends TestCase { /** * Test is key-field-helper's getWordLengths. */ + @Test public void testGetWordLengths() throws Exception { KeyFieldHelper helper = new KeyFieldHelper(); helper.setKeyFieldSeparator("\t"); @@ -270,6 +274,7 @@ public class TestKeyFieldHelper extends TestCase { /** * Test is key-field-helper's getStartOffset/getEndOffset. */ + @Test public void testgetStartEndOffset() throws Exception { KeyFieldHelper helper = new KeyFieldHelper(); helper.setKeyFieldSeparator("\t"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java index 9c2fb48d9bf..00b415f32cb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java @@ -19,14 +19,16 @@ package org.apache.hadoop.mapreduce.lib.partition; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; +import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; -public class TestMRKeyFieldBasedPartitioner extends TestCase { +public class TestMRKeyFieldBasedPartitioner { /** * Test is key-field-based partitioned works with empty key. */ + @Test public void testEmptyKey() throws Exception { int numReducers = 10; KeyFieldBasedPartitioner kfbp = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java index a844737e09d..bdb4ff4794e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.FileSystem; @@ -41,8 +39,11 @@ import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.WritableSerialization; import org.apache.hadoop.mapreduce.MRJobConfig; +import org.junit.Test; -public class TestTotalOrderPartitioner extends TestCase { +import static org.junit.Assert.assertEquals; + +public class TestTotalOrderPartitioner { private static final Text[] splitStrings = new Text[] { // -inf // 0 @@ -140,6 +141,7 @@ public class TestTotalOrderPartitioner extends TestCase { return p; } + @Test public void testTotalOrderWithCustomSerialization() throws Exception { TotalOrderPartitioner partitioner = new TotalOrderPartitioner(); @@ -165,6 +167,7 @@ public class TestTotalOrderPartitioner extends TestCase { } } + @Test public void testTotalOrderMemCmp() throws Exception { TotalOrderPartitioner partitioner = new TotalOrderPartitioner(); @@ -184,6 +187,7 @@ public class TestTotalOrderPartitioner extends TestCase { } } + @Test public void testTotalOrderBinarySearch() throws Exception { TotalOrderPartitioner partitioner = new TotalOrderPartitioner(); @@ -216,6 +220,7 @@ public class TestTotalOrderPartitioner extends TestCase { } } + @Test public void testTotalOrderCustomComparator() throws Exception { TotalOrderPartitioner partitioner = new TotalOrderPartitioner(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java index e1849a3ce9c..07b5d8b9f50 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java @@ -20,8 +20,6 @@ package org.apache.hadoop.mapreduce.util; import java.io.File; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -30,20 +28,27 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.mapreduce.util.MRAsyncDiskService; +import org.junit.Before; import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + /** * A test for MRAsyncDiskService. */ -public class TestMRAsyncDiskService extends TestCase { +public class TestMRAsyncDiskService { public static final Log LOG = LogFactory.getLog(TestMRAsyncDiskService.class); private static String TEST_ROOT_DIR = new Path(System.getProperty( "test.build.data", "/tmp")).toString(); - @Override - protected void setUp() { + @Before + public void setUp() { FileUtil.fullyDelete(new File(TEST_ROOT_DIR)); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java index aa769f85974..f68cc8310a6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMiniMRProxyUser.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.mapreduce.v2; -import junit.framework.TestCase; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; @@ -29,22 +28,25 @@ import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.ProxyUsers; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; -import java.net.InetAddress; -import java.io.File; -import java.io.FileOutputStream; -import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; +import java.net.InetAddress; import java.security.PrivilegedExceptionAction; -public class TestMiniMRProxyUser extends TestCase { +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class TestMiniMRProxyUser { private MiniDFSCluster dfsCluster = null; private MiniMRCluster mrCluster = null; - - protected void setUp() throws Exception { - super.setUp(); + + @Before + public void setUp() throws Exception { if (System.getProperty("hadoop.log.dir") == null) { System.setProperty("hadoop.log.dir", "/tmp"); } @@ -91,15 +93,14 @@ public class TestMiniMRProxyUser extends TestCase { return mrCluster.createJobConf(); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { if (mrCluster != null) { mrCluster.shutdown(); } if (dfsCluster != null) { dfsCluster.shutdown(); } - super.tearDown(); } private void mrRun() throws Exception { @@ -125,11 +126,13 @@ public class TestMiniMRProxyUser extends TestCase { assertTrue(runJob.isComplete()); assertTrue(runJob.isSuccessful()); } - + + @Test public void __testCurrentUser() throws Exception { mrRun(); } + @Test public void testValidProxyUser() throws Exception { UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", UserGroupInformation.getLoginUser()); ugi.doAs(new PrivilegedExceptionAction() { @@ -142,6 +145,7 @@ public class TestMiniMRProxyUser extends TestCase { }); } + @Test public void ___testInvalidProxyUser() throws Exception { UserGroupInformation ugi = UserGroupInformation.createProxyUser("u2", UserGroupInformation.getLoginUser()); ugi.doAs(new PrivilegedExceptionAction() { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java index b6947f3fc48..e90c509d7a8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestNonExistentJob.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.mapreduce.v2; -import junit.framework.TestCase; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; @@ -28,17 +27,22 @@ import org.apache.hadoop.mapred.JobID; import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.security.authorize.ProxyUsers; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import java.io.IOException; import java.net.InetAddress; -public class TestNonExistentJob extends TestCase { +import static org.junit.Assert.assertNull; + +public class TestNonExistentJob { private MiniDFSCluster dfsCluster = null; private MiniMRCluster mrCluster = null; - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { if (System.getProperty("hadoop.log.dir") == null) { System.setProperty("hadoop.log.dir", "/tmp"); } @@ -78,17 +82,17 @@ public class TestNonExistentJob extends TestCase { return mrCluster.createJobConf(); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { if (mrCluster != null) { mrCluster.shutdown(); } if (dfsCluster != null) { dfsCluster.shutdown(); } - super.tearDown(); } + @Test public void testGetInvalidJob() throws Exception { RunningJob runJob = new JobClient(getJobConf()).getJob(JobID.forName("job_0_0")); assertNull(runJob); diff --git a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java index 6b8af97e9c4..c502ffd173d 100644 --- a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java @@ -450,7 +450,7 @@ public class HadoopArchiveLogs implements Tool { fi export HADOOP_CLIENT_OPTS="-Xmx1024m" export HADOOP_CLASSPATH=/dist/share/hadoop/tools/lib/hadoop-archive-logs-2.8.0-SNAPSHOT.jar:/dist/share/hadoop/tools/lib/hadoop-archives-2.8.0-SNAPSHOT.jar - "$HADOOP_PREFIX"/bin/hadoop org.apache.hadoop.tools.HadoopArchiveLogsRunner -appId "$appId" -user "$user" -workingDir /tmp/logs/archive-logs-work -remoteRootLogDir /tmp/logs -suffix logs + "$HADOOP_HOME"/bin/hadoop org.apache.hadoop.tools.HadoopArchiveLogsRunner -appId "$appId" -user "$user" -workingDir /tmp/logs/archive-logs-work -remoteRootLogDir /tmp/logs -suffix logs */ @VisibleForTesting void generateScript(File localScript, Path workingDir, @@ -484,7 +484,7 @@ public class HadoopArchiveLogs implements Tool { fw.write("m\"\n"); fw.write("export HADOOP_CLASSPATH="); fw.write(classpath); - fw.write("\n\"$HADOOP_PREFIX\"/bin/hadoop "); + fw.write("\n\"$HADOOP_HOME\"/bin/hadoop "); fw.write(HadoopArchiveLogsRunner.class.getName()); fw.write(" -appId \"$appId\" -user \"$user\" -workingDir "); fw.write(workingDir.toString()); diff --git a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java index 7fcb0bfad80..d2d7801caf2 100644 --- a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java @@ -294,14 +294,14 @@ public class TestHadoopArchiveLogs { Assert.assertTrue(lines[14].startsWith("export HADOOP_CLASSPATH=")); if (proxy) { Assert.assertEquals( - "\"$HADOOP_PREFIX\"/bin/hadoop org.apache.hadoop.tools." + + "\"$HADOOP_HOME\"/bin/hadoop org.apache.hadoop.tools." + "HadoopArchiveLogsRunner -appId \"$appId\" -user \"$user\" " + "-workingDir " + workingDir.toString() + " -remoteRootLogDir " + remoteRootLogDir.toString() + " -suffix " + suffix, lines[15]); } else { Assert.assertEquals( - "\"$HADOOP_PREFIX\"/bin/hadoop org.apache.hadoop.tools." + + "\"$HADOOP_HOME\"/bin/hadoop org.apache.hadoop.tools." + "HadoopArchiveLogsRunner -appId \"$appId\" -user \"$user\" " + "-workingDir " + workingDir.toString() + " -remoteRootLogDir " + remoteRootLogDir.toString() + " -suffix " + suffix + " -noProxy", diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java index 7ab6c796308..fe705cef83c 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java @@ -26,6 +26,7 @@ import java.net.URI; import java.util.ArrayList; import java.util.Date; import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; @@ -787,11 +788,14 @@ public class S3AFileSystem extends FileSystem { ObjectListing objects = s3.listObjects(request); statistics.incrementReadOps(1); + Path fQualified = f.makeQualified(uri, workingDir); + while (true) { for (S3ObjectSummary summary : objects.getObjectSummaries()) { Path keyPath = keyToPath(summary.getKey()).makeQualified(uri, workingDir); // Skip over keys that are ourselves and old S3N _$folder$ files - if (keyPath.equals(f) || summary.getKey().endsWith(S3N_FOLDER_SUFFIX)) { + if (keyPath.equals(fQualified) || + summary.getKey().endsWith(S3N_FOLDER_SUFFIX)) { if (LOG.isDebugEnabled()) { LOG.debug("Ignoring: " + keyPath); } @@ -806,7 +810,7 @@ public class S3AFileSystem extends FileSystem { } else { result.add(new S3AFileStatus(summary.getSize(), dateToLong(summary.getLastModified()), keyPath, - getDefaultBlockSize(f.makeQualified(uri, workingDir)))); + getDefaultBlockSize(fQualified))); if (LOG.isDebugEnabled()) { LOG.debug("Adding: fi: " + keyPath); } @@ -1128,7 +1132,7 @@ public class S3AFileSystem extends FileSystem { } ObjectMetadata srcom = s3.getObjectMetadata(bucket, srcKey); - final ObjectMetadata dstom = srcom.clone(); + ObjectMetadata dstom = cloneObjectMetadata(srcom); if (StringUtils.isNotBlank(serverSideEncryptionAlgorithm)) { dstom.setSSEAlgorithm(serverSideEncryptionAlgorithm); } @@ -1234,6 +1238,73 @@ public class S3AFileSystem extends FileSystem { statistics.incrementWriteOps(1); } + /** + * Creates a copy of the passed {@link ObjectMetadata}. + * Does so without using the {@link ObjectMetadata#clone()} method, + * to avoid copying unnecessary headers. + * @param source the {@link ObjectMetadata} to copy + * @return a copy of {@link ObjectMetadata} with only relevant attributes + */ + private ObjectMetadata cloneObjectMetadata(ObjectMetadata source) { + // This approach may be too brittle, especially if + // in future there are new attributes added to ObjectMetadata + // that we do not explicitly call to set here + ObjectMetadata ret = new ObjectMetadata(); + + // Non null attributes + ret.setContentLength(source.getContentLength()); + + // Possibly null attributes + // Allowing nulls to pass breaks it during later use + if (source.getCacheControl() != null) { + ret.setCacheControl(source.getCacheControl()); + } + if (source.getContentDisposition() != null) { + ret.setContentDisposition(source.getContentDisposition()); + } + if (source.getContentEncoding() != null) { + ret.setContentEncoding(source.getContentEncoding()); + } + if (source.getContentMD5() != null) { + ret.setContentMD5(source.getContentMD5()); + } + if (source.getContentType() != null) { + ret.setContentType(source.getContentType()); + } + if (source.getExpirationTime() != null) { + ret.setExpirationTime(source.getExpirationTime()); + } + if (source.getExpirationTimeRuleId() != null) { + ret.setExpirationTimeRuleId(source.getExpirationTimeRuleId()); + } + if (source.getHttpExpiresDate() != null) { + ret.setHttpExpiresDate(source.getHttpExpiresDate()); + } + if (source.getLastModified() != null) { + ret.setLastModified(source.getLastModified()); + } + if (source.getOngoingRestore() != null) { + ret.setOngoingRestore(source.getOngoingRestore()); + } + if (source.getRestoreExpirationTime() != null) { + ret.setRestoreExpirationTime(source.getRestoreExpirationTime()); + } + if (source.getSSEAlgorithm() != null) { + ret.setSSEAlgorithm(source.getSSEAlgorithm()); + } + if (source.getSSECustomerAlgorithm() != null) { + ret.setSSECustomerAlgorithm(source.getSSECustomerAlgorithm()); + } + if (source.getSSECustomerKeyMd5() != null) { + ret.setSSECustomerKeyMd5(source.getSSECustomerKeyMd5()); + } + + for (Map.Entry e : source.getUserMetadata().entrySet()) { + ret.addUserMetadata(e.getKey(), e.getValue()); + } + return ret; + } + /** * Return the number of bytes that large input files should be optimally * be split into to minimize i/o time. diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md index af3541fafb1..7382029f3af 100644 --- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md +++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md @@ -417,6 +417,13 @@ which pass in authentication details to the test runner These are both Hadoop XML configuration files, which must be placed into `hadoop-tools/hadoop-aws/src/test/resources`. +### `core-site.xml` + +This file pre-exists and sources the configurations created +under `auth-keys.xml`. + +For most purposes you will not need to edit this file unless you +need to apply a specific, non-default property change during the tests. ### `auth-keys.xml` diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractGetFileStatus.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractGetFileStatus.java new file mode 100644 index 00000000000..d7b8fe33fdc --- /dev/null +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractGetFileStatus.java @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.fs.contract.s3a; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.contract.AbstractFSContract; +import org.apache.hadoop.fs.contract.AbstractContractGetFileStatusTest; + +public class TestS3AContractGetFileStatus extends AbstractContractGetFileStatusTest { + + @Override + protected AbstractFSContract createContract(Configuration conf) { + return new S3AContract(conf); + } + +} diff --git a/hadoop-tools/hadoop-aws/src/test/resources/contract/s3a.xml b/hadoop-tools/hadoop-aws/src/test/resources/contract/s3a.xml index 4f9c0818ffa..be1e7ca6535 100644 --- a/hadoop-tools/hadoop-aws/src/test/resources/contract/s3a.xml +++ b/hadoop-tools/hadoop-aws/src/test/resources/contract/s3a.xml @@ -77,6 +77,11 @@ false + + fs.contract.supports-getfilestatus + true + + fs.contract.supports-seek true diff --git a/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt b/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt index 73fd6ef15a8..47ef31c4677 100644 --- a/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt +++ b/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt @@ -20,7 +20,7 @@ B.a31 B.a32 ***************************** *** Invoke SampleDataJoin *** ***************************** -[:~]$ $HADOOP_PREFIX/bin/hadoop jar hadoop-datajoin-examples.jar org.apache.hadoop.contrib.utils.join.DataJoinJob datajoin/input datajoin/output Text 1 org.apache.hadoop.contrib.utils.join.SampleDataJoinMapper org.apache.hadoop.contrib.utils.join.SampleDataJoinReducer org.apache.hadoop.contrib.utils.join.SampleTaggedMapOutput Text +[:~]$ $HADOOP_HOME/bin/hadoop jar hadoop-datajoin-examples.jar org.apache.hadoop.contrib.utils.join.DataJoinJob datajoin/input datajoin/output Text 1 org.apache.hadoop.contrib.utils.join.SampleDataJoinMapper org.apache.hadoop.contrib.utils.join.SampleDataJoinReducer org.apache.hadoop.contrib.utils.join.SampleTaggedMapOutput Text Using TextInputFormat: Text Using TextOutputFormat: Text 07/06/01 19:58:23 INFO mapred.FileInputFormat: Total input paths to process : 2 diff --git a/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script b/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script index 6bacc437e43..7b74fb67a1f 100644 --- a/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script +++ b/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script @@ -1,3 +1,4 @@ +#!/usr/bin/env bash # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -9,6 +10,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -core=`find . -name 'core*'` +core=$(find . -name 'core*') #Only pipes programs have 5th argument as program name. -gdb -quiet $5 -c $core -x $HADOOP_PREFIX/src/c++/pipes/debug/pipes-default-gdb-commands.txt +gdb -quiet "${5}" -c "${core}" -x "${HADOOP_HOME}/src/c++/pipes/debug/pipes-default-gdb-commands.txt" diff --git a/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh b/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh index f9bfaef2db0..0bd291bb8f3 100644 --- a/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh +++ b/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh @@ -77,8 +77,8 @@ function run_sls_generator() } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh b/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh index 30fd60a4440..403c4bb05f6 100644 --- a/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh +++ b/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh @@ -71,8 +71,8 @@ function parse_args() function calculate_classpath { hadoop_add_to_classpath_tools hadoop-sls - hadoop_debug "Injecting ${HADOOP_PREFIX}/share/hadoop/tools/sls/html into CLASSPATH" - hadoop_add_classpath "${HADOOP_PREFIX}/share/hadoop/tools/sls/html" + hadoop_debug "Injecting ${HADOOP_TOOLS_DIR}/sls/html into CLASSPATH" + hadoop_add_classpath "${HADOOP_TOOLS_DIR}/sls/html" } function run_simulation() { @@ -105,8 +105,8 @@ function run_simulation() { } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java index 92d586bfa37..951f5a850df 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java @@ -199,6 +199,15 @@ public class NodeInfo { public ResourceUtilization getNodeUtilization() { return null; } + + @Override + public long getUntrackedTimeStamp() { + return 0; + } + + @Override + public void setUntrackedTimeStamp(long timeStamp) { + } } public static RMNode newNodeInfo(String rackName, String hostName, diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java index 2e9cccb2778..e5013c43d75 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/RMNodeWrapper.java @@ -188,4 +188,13 @@ public class RMNodeWrapper implements RMNode { public ResourceUtilization getNodeUtilization() { return node.getNodeUtilization(); } + + @Override + public long getUntrackedTimeStamp() { + return 0; + } + + @Override + public void setUntrackedTimeStamp(long timeStamp) { + } } diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java index 42007a07560..5a07cc325ca 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java @@ -91,7 +91,7 @@ public class DumpTypedBytes implements Tool { } private void printUsage() { - System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar" + System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar" + " dumptb "); System.out.println(" Dumps all files that match the given pattern to " + "standard output as typed bytes."); diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java index 5d0112458ff..eabf46c83c0 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java @@ -56,7 +56,7 @@ public class HadoopStreaming { } private static void printUsage() { - System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar" + System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar" + " [options]"); System.out.println("Options:"); System.out.println(" dumptb Dumps all files that match the" diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/LoadTypedBytes.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/LoadTypedBytes.java index 6470393cba5..a7a001cff6c 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/LoadTypedBytes.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/LoadTypedBytes.java @@ -89,7 +89,7 @@ public class LoadTypedBytes implements Tool { } private void printUsage() { - System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar" + System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar" + " loadtb "); System.out.println(" Reads typed bytes from standard input" + " and stores them in a sequence file in"); diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java index 118e0fb8998..9f5b293b369 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java @@ -502,7 +502,7 @@ public class StreamJob implements Tool { } private void printUsage(boolean detailed) { - System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar" + System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar" + " [options]"); System.out.println("Options:"); System.out.println(" -input DFS input file(s) for the Map" @@ -551,7 +551,7 @@ public class StreamJob implements Tool { System.out.println(); System.out.println("For more details about these options:"); System.out.println("Use " + - "$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar -info"); + "$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar -info"); return; } System.out.println(); @@ -611,7 +611,7 @@ public class StreamJob implements Tool { System.out.println(" -D stream.non.zero.exit.is.failure=false"); System.out.println("Use a custom hadoop streaming build along with standard" + " hadoop install:"); - System.out.println(" $HADOOP_PREFIX/bin/hadoop jar " + + System.out.println(" $HADOOP_HOME/bin/hadoop jar " + "/path/my-hadoop-streaming.jar [...]\\"); System.out.println(" [...] -D stream.shipped.hadoopstreaming=" + "/path/my-hadoop-streaming.jar"); @@ -625,7 +625,7 @@ public class StreamJob implements Tool { System.out.println(" -cmdenv EXAMPLE_DIR=/home/example/dictionaries/"); System.out.println(); System.out.println("Shortcut:"); - System.out.println(" setenv HSTREAMING \"$HADOOP_PREFIX/bin/hadoop jar " + + System.out.println(" setenv HSTREAMING \"$HADOOP_HOME/bin/hadoop jar " + "hadoop-streaming.jar\""); System.out.println(); System.out.println("Example: $HSTREAMING -mapper " + @@ -648,9 +648,9 @@ public class StreamJob implements Tool { // -------------------------------------------- protected String getHadoopClientHome() { - String h = env_.getProperty("HADOOP_PREFIX"); // standard Hadoop + String h = env_.getProperty("HADOOP_HOME"); // standard Hadoop if (h == null) { - //fail("Missing required environment variable: HADOOP_PREFIX"); + //fail("Missing required environment variable: HADOOP_HOME"); h = "UNDEF"; } return h; @@ -674,8 +674,8 @@ public class StreamJob implements Tool { // usually found in: build/contrib or build/hadoop--dev-streaming.jar // First try an explicit spec: it's too hard to find our own location in this case: - // $HADOOP_PREFIX/bin/hadoop jar /not/first/on/classpath/custom-hadoop-streaming.jar - // where findInClasspath() would find the version of hadoop-streaming.jar in $HADOOP_PREFIX + // $HADOOP_HOME/bin/hadoop jar /not/first/on/classpath/custom-hadoop-streaming.jar + // where findInClasspath() would find the version of hadoop-streaming.jar in $HADOOP_HOME String runtimeClasses = config_.get("stream.shipped.hadoopstreaming"); // jar or class dir if (runtimeClasses == null) { diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java index 7b7901faad1..860fb89cfcf 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java @@ -42,6 +42,11 @@ import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.SkipBadRecords; import org.apache.hadoop.mapred.Utils; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; public class TestStreamingBadRecords extends ClusterMapReduceTestCase { @@ -68,7 +73,8 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase utilTest.redirectIfAntJunit(); } - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { Properties props = new Properties(); props.setProperty(JTConfig.JT_RETIREJOBS, "false"); props.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "false"); @@ -242,6 +248,7 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase } */ + @Test public void testNoOp() { // Added to avoid warnings when running this disabled test } diff --git a/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh b/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh index f462fad61df..3b41299630d 100755 --- a/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh @@ -26,8 +26,8 @@ function hadoop_usage bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh b/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh index 33059894dc7..358f0c90118 100755 --- a/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh @@ -26,8 +26,8 @@ function hadoop_usage bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn b/hadoop-yarn-project/hadoop-yarn/bin/yarn index cb2364b3957..cac3bb6c0cc 100755 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn @@ -51,8 +51,8 @@ function hadoop_usage # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh b/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh index 7df9fa1019e..d7fa4066f60 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh @@ -53,7 +53,7 @@ function hadoop_subproject_init hadoop_deprecate_envvar YARN_SLAVES HADOOP_SLAVES - HADOOP_YARN_HOME="${HADOOP_YARN_HOME:-$HADOOP_PREFIX}" + HADOOP_YARN_HOME="${HADOOP_YARN_HOME:-$HADOOP_HOME}" # YARN-1429 added the completely superfluous YARN_USER_CLASSPATH # env var. We're going to override HADOOP_USER_CLASSPATH to keep @@ -74,8 +74,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] && . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" -elif [ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]; then - . "${HADOOP_PREFIX}/libexec/hadoop-config.sh" +elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then + . "${HADOOP_HOME}/libexec/hadoop-config.sh" else echo "ERROR: Hadoop common not found." 2>&1 exit 1 diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh index 2f886f2a473..a195c60d006 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh @@ -21,8 +21,8 @@ function hadoop_usage } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh index 4ef08648245..958c8bd9754 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh @@ -25,8 +25,8 @@ this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceUtilization.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceUtilization.java index 5f52f85f0c6..2ae4872fbcd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceUtilization.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceUtilization.java @@ -44,6 +44,14 @@ public abstract class ResourceUtilization implements return utilization; } + @Public + @Unstable + public static ResourceUtilization newInstance( + ResourceUtilization resourceUtil) { + return newInstance(resourceUtil.getPhysicalMemory(), + resourceUtil.getVirtualMemory(), resourceUtil.getCPU()); + } + /** * Get used virtual memory. * @@ -147,4 +155,18 @@ public abstract class ResourceUtilization implements this.setVirtualMemory(this.getVirtualMemory() + vmem); this.setCPU(this.getCPU() + cpu); } + + /** + * Subtract utilization from the current one. + * @param pmem Physical memory to be subtracted. + * @param vmem Virtual memory to be subtracted. + * @param cpu CPU utilization to be subtracted. + */ + @Public + @Unstable + public void subtractFrom(int pmem, int vmem, float cpu) { + this.setPhysicalMemory(this.getPhysicalMemory() - pmem); + this.setVirtualMemory(this.getVirtualMemory() - vmem); + this.setCPU(this.getCPU() - cpu); + } } \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 8acee579ff3..66b293f9594 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -647,6 +647,15 @@ public class YarnConfiguration extends Configuration { public static final String DEFAULT_RM_NODEMANAGER_MINIMUM_VERSION = "NONE"; + /** + * Timeout(msec) for an untracked node to remain in shutdown or decommissioned + * state. + */ + public static final String RM_NODEMANAGER_UNTRACKED_REMOVAL_TIMEOUT_MSEC = + RM_PREFIX + "node-removal-untracked.timeout-ms"; + public static final int + DEFAULT_RM_NODEMANAGER_UNTRACKED_REMOVAL_TIMEOUT_MSEC = 60000; + /** * RM proxy users' prefix */ diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java index 0f82903dfe0..cbe03480550 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java @@ -259,8 +259,9 @@ public class ApplicationMaster { private String domainId = null; // Hardcoded path to shell script in launch container's local env - private static final String ExecShellStringPath = Client.SCRIPT_PATH + ".sh"; - private static final String ExecBatScripStringtPath = Client.SCRIPT_PATH + private static final String EXEC_SHELL_STRING_PATH = Client.SCRIPT_PATH + + ".sh"; + private static final String EXEC_BAT_SCRIPT_STRING_PATH = Client.SCRIPT_PATH + ".bat"; // Hardcoded path to custom log_properties @@ -1025,8 +1026,8 @@ public class ApplicationMaster { LocalResource shellRsrc = LocalResource.newInstance(yarnUrl, LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, shellScriptPathLen, shellScriptPathTimestamp); - localResources.put(Shell.WINDOWS ? ExecBatScripStringtPath : - ExecShellStringPath, shellRsrc); + localResources.put(Shell.WINDOWS ? EXEC_BAT_SCRIPT_STRING_PATH : + EXEC_SHELL_STRING_PATH, shellRsrc); shellCommand = Shell.WINDOWS ? windows_command : linux_bash_command; } @@ -1037,8 +1038,8 @@ public class ApplicationMaster { vargs.add(shellCommand); // Set shell script path if (!scriptPath.isEmpty()) { - vargs.add(Shell.WINDOWS ? ExecBatScripStringtPath - : ExecShellStringPath); + vargs.add(Shell.WINDOWS ? EXEC_BAT_SCRIPT_STRING_PATH + : EXEC_SHELL_STRING_PATH); } // Set args for the shell command if any diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java index e9674cfc147..acd29fb02d6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java @@ -122,8 +122,20 @@ public class ConverterUtils { public static ApplicationId toApplicationId(RecordFactory recordFactory, String appIdStr) { Iterator it = _split(appIdStr).iterator(); - it.next(); // prefix. TODO: Validate application prefix - return toApplicationId(recordFactory, it); + if (!it.next().equals(APPLICATION_PREFIX)) { + throw new IllegalArgumentException("Invalid ApplicationId prefix: " + + appIdStr + ". The valid ApplicationId should start with prefix " + + APPLICATION_PREFIX); + } + try { + return toApplicationId(recordFactory, it); + } catch (NumberFormatException n) { + throw new IllegalArgumentException("Invalid ApplicationId: " + appIdStr, + n); + } catch (NoSuchElementException e) { + throw new IllegalArgumentException("Invalid ApplicationId: " + appIdStr, + e); + } } private static ApplicationId toApplicationId(RecordFactory recordFactory, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/Resources.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/Resources.java index b05d021ae27..558f96c7446 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/Resources.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/Resources.java @@ -152,7 +152,7 @@ public class Resources { } /** - * Multiply @param rhs by @param by, and add the result to @param lhs + * Multiply {@code rhs} by {@code by}, and add the result to {@code lhs} * without creating any new {@link Resource} object */ public static Resource multiplyAndAddTo( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java index f8e67ee1fb3..faf4a774447 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java @@ -33,9 +33,14 @@ import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.HAUtil; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; +import org.apache.hadoop.yarn.factories.RecordFactory; +import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.RMHAUtils; +import org.apache.hadoop.yarn.webapp.BadRequestException; +import org.apache.hadoop.yarn.webapp.NotFoundException; @Private @Evolving @@ -378,4 +383,21 @@ public class WebAppUtils { } return password; } + + public static ApplicationId parseApplicationId(RecordFactory recordFactory, + String appId) { + if (appId == null || appId.isEmpty()) { + throw new NotFoundException("appId, " + appId + ", is empty or null"); + } + ApplicationId aid = null; + try { + aid = ConverterUtils.toApplicationId(recordFactory, appId); + } catch (Exception e) { + throw new BadRequestException(e); + } + if (aid == null) { + throw new NotFoundException("app with id " + appId + " not found"); + } + return aid; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index ea1afe48ae0..9e8b5e9b208 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -1816,8 +1816,12 @@ - Indicate to clients whether timeline service is enabled or not. - If enabled, clients will put entities and events to the timeline server. + + In the server side it indicates whether timeline service is enabled or not. + And in the client side, users can enable it to indicate whether client wants + to use timeline service. If it's enabled in the client side along with + security, then yarn client tries to fetch the delegation tokens for the + timeline server. yarn.timeline-service.enabled false @@ -1969,7 +1973,10 @@ - Client policy for whether timeline operations are non-fatal + Client policy for whether timeline operations are non-fatal. + Should the failure to obtain a delegation token be considered an application + failure (option = false), or should the client attempt to continue to + publish information without it (option=true) yarn.timeline-service.client.best-effort false @@ -2715,4 +2722,17 @@ yarn.timeline-service.webapp.rest-csrf.methods-to-ignore GET,OPTIONS,HEAD + + + + The least amount of time(msec.) an inactive (decommissioned or shutdown) node can + stay in the nodes list of the resourcemanager after being declared untracked. + A node is marked untracked if and only if it is absent from both include and + exclude nodemanager lists on the RM. All inactive nodes are checked twice per + timeout interval or every 10 minutes, whichever is lesser, and marked appropriately. + The same is done when refreshNodes command (graceful or otherwise) is invoked. + + yarn.resourcemanager.node-removal-untracked.timeout-ms + 60000 + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/TestResourceUtilization.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/TestResourceUtilization.java new file mode 100644 index 00000000000..5934846e2f3 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/TestResourceUtilization.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.api.records; + +import org.junit.Assert; +import org.junit.Test; + +public class TestResourceUtilization { + + @Test + public void testResourceUtilization() { + ResourceUtilization u1 = ResourceUtilization.newInstance(10, 20, 0.5f); + ResourceUtilization u2 = ResourceUtilization.newInstance(u1); + ResourceUtilization u3 = ResourceUtilization.newInstance(10, 20, 0.5f); + ResourceUtilization u4 = ResourceUtilization.newInstance(20, 20, 0.5f); + ResourceUtilization u5 = ResourceUtilization.newInstance(30, 40, 0.8f); + + Assert.assertEquals(u1, u2); + Assert.assertEquals(u1, u3); + Assert.assertNotEquals(u1, u4); + Assert.assertNotEquals(u2, u5); + Assert.assertNotEquals(u4, u5); + + Assert.assertTrue(u1.hashCode() == u2.hashCode()); + Assert.assertTrue(u1.hashCode() == u3.hashCode()); + Assert.assertFalse(u1.hashCode() == u4.hashCode()); + Assert.assertFalse(u2.hashCode() == u5.hashCode()); + Assert.assertFalse(u4.hashCode() == u5.hashCode()); + + Assert.assertTrue(u1.getPhysicalMemory() == 10); + Assert.assertFalse(u1.getVirtualMemory() == 10); + Assert.assertTrue(u1.getCPU() == 0.5f); + + Assert.assertEquals("", u1.toString()); + + u1.addTo(10, 0, 0.0f); + Assert.assertNotEquals(u1, u2); + Assert.assertEquals(u1, u4); + u1.addTo(10, 20, 0.3f); + Assert.assertEquals(u1, u5); + u1.subtractFrom(10, 20, 0.3f); + Assert.assertEquals(u1, u4); + u1.subtractFrom(10, 0, 0.0f); + Assert.assertEquals(u1, u3); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java index cedbd2eb1ae..d241077c4fe 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java @@ -86,7 +86,14 @@ public class ApplicationHistoryServer extends CompositeService { @Override protected void serviceInit(Configuration conf) throws Exception { - // init timeline services first + + // do security login first. + try { + doSecureLogin(conf); + } catch(IOException ie) { + throw new YarnRuntimeException("Failed to login", ie); + } + // init timeline services timelineStore = createTimelineStore(conf); addIfService(timelineStore); secretManagerService = createTimelineDelegationTokenSecretManagerService(conf); @@ -111,12 +118,6 @@ public class ApplicationHistoryServer extends CompositeService { @Override protected void serviceStart() throws Exception { - try { - doSecureLogin(getConfig()); - } catch(IOException ie) { - throw new YarnRuntimeException("Failed to login", ie); - } - super.serviceStart(); startWebApp(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppBlock.java index 44ed22345da..69beef27ae6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppBlock.java @@ -24,12 +24,14 @@ import static org.apache.hadoop.yarn.webapp.YarnWebParams.WEB_UI_TYPE; import java.security.PrivilegedExceptionAction; import java.util.Collection; +import java.util.Map; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.http.RestCsrfPreventionFilter; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.ApplicationBaseProtocol; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest; @@ -143,6 +145,7 @@ public class AppBlock extends HtmlBlock { .append(" type: 'PUT',") .append(" url: '/ws/v1/cluster/apps/").append(aid).append("/state',") .append(" contentType: 'application/json',") + .append(getCSRFHeaderString(conf)) .append(" data: '{\"state\":\"KILLED\"}',") .append(" dataType: 'json'") .append(" }).done(function(data){") @@ -369,4 +372,21 @@ public class AppBlock extends HtmlBlock { protected LogAggregationStatus getLogAggregationStatus() { return null; } + + public static String getCSRFHeaderString(Configuration conf) { + String ret = ""; + if (conf.getBoolean(YarnConfiguration.RM_CSRF_ENABLED, false)) { + ret = " headers : { '"; + Map filterParams = RestCsrfPreventionFilter + .getFilterParams(conf, YarnConfiguration.RM_CSRF_PREFIX); + if (filterParams + .containsKey(RestCsrfPreventionFilter.CUSTOM_HEADER_PARAM)) { + ret += filterParams.get(RestCsrfPreventionFilter.CUSTOM_HEADER_PARAM); + } else { + ret += RestCsrfPreventionFilter.HEADER_DEFAULT; + } + ret += "' : 'null' },"; + } + return ret; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebPageUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebPageUtils.java index a07baa2366f..3a26ae58890 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebPageUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebPageUtils.java @@ -95,4 +95,10 @@ public class WebPageUtils { .append(", 'mRender': parseHadoopID }]").toString(); } + public static String resourceRequestsTableInit() { + return tableInit().append(", 'aaData': resourceRequestsTableData") + .append(", bDeferRender: true").append(", bProcessing: true}") + .toString(); + } + } \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java index 40e40c98421..19ea30136e8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java @@ -429,7 +429,12 @@ public class WebServices { if (appId == null || appId.isEmpty()) { throw new NotFoundException("appId, " + appId + ", is empty or null"); } - ApplicationId aid = ConverterUtils.toApplicationId(appId); + ApplicationId aid = null; + try { + aid = ConverterUtils.toApplicationId(appId); + } catch (Exception e) { + throw new BadRequestException(e); + } if (aid == null) { throw new NotFoundException("appId is null"); } @@ -442,8 +447,12 @@ public class WebServices { throw new NotFoundException("appAttemptId, " + appAttemptId + ", is empty or null"); } - ApplicationAttemptId aaid = - ConverterUtils.toApplicationAttemptId(appAttemptId); + ApplicationAttemptId aaid = null; + try { + aaid = ConverterUtils.toApplicationAttemptId(appAttemptId); + } catch (Exception e) { + throw new BadRequestException(e); + } if (aaid == null) { throw new NotFoundException("appAttemptId is null"); } @@ -455,7 +464,12 @@ public class WebServices { throw new NotFoundException("containerId, " + containerId + ", is empty or null"); } - ContainerId cid = ConverterUtils.toContainerId(containerId); + ContainerId cid = null; + try { + cid = ConverterUtils.toContainerId(containerId); + } catch (Exception e) { + throw new BadRequestException(e); + } if (cid == null) { throw new NotFoundException("containerId is null"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml index 051bb4e5fcd..59c333289db 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml @@ -117,6 +117,11 @@ com.google.protobuf protobuf-java + + + com.codahale.metrics + metrics-core +