HADOOP-14729. Upgrade JUnit 3 test cases to JUnit 4. Contributed by Ajay Kumar.
This commit is contained in:
parent
3a4e861169
commit
8b7cbe3840
File diff suppressed because it is too large
Load Diff
|
@ -17,7 +17,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.conf;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
|
@ -25,11 +26,12 @@ import java.util.Properties;
|
|||
* Created 21-Jan-2009 13:42:36
|
||||
*/
|
||||
|
||||
public class TestConfigurationSubclass extends TestCase {
|
||||
public class TestConfigurationSubclass {
|
||||
private static final String EMPTY_CONFIGURATION_XML
|
||||
= "/org/apache/hadoop/conf/empty-configuration.xml";
|
||||
|
||||
|
||||
@Test
|
||||
public void testGetProps() {
|
||||
SubConf conf = new SubConf(true);
|
||||
Properties properties = conf.getProperties();
|
||||
|
@ -37,6 +39,7 @@ public class TestConfigurationSubclass extends TestCase {
|
|||
properties.getProperty("hadoop.tmp.dir"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReload() throws Throwable {
|
||||
SubConf conf = new SubConf(true);
|
||||
assertFalse(conf.isReloaded());
|
||||
|
@ -45,6 +48,7 @@ public class TestConfigurationSubclass extends TestCase {
|
|||
Properties properties = conf.getProperties();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReloadNotQuiet() throws Throwable {
|
||||
SubConf conf = new SubConf(true);
|
||||
conf.setQuietMode(false);
|
||||
|
|
|
@ -21,15 +21,14 @@ package org.apache.hadoop.conf;
|
|||
import java.io.ByteArrayOutputStream;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class TestDeprecatedKeys extends TestCase {
|
||||
public class TestDeprecatedKeys {
|
||||
|
||||
//Tests a deprecated key
|
||||
@Test
|
||||
public void testDeprecatedKeys() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("topology.script.file.name", "xyz");
|
||||
|
@ -39,6 +38,7 @@ public class TestDeprecatedKeys extends TestCase {
|
|||
}
|
||||
|
||||
//Tests reading / writing a conf file with deprecation after setting
|
||||
@Test
|
||||
public void testReadWriteWithDeprecatedKeys() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
conf.setBoolean("old.config.yet.to.be.deprecated", true);
|
||||
|
|
|
@ -18,10 +18,11 @@
|
|||
package org.apache.hadoop.conf;
|
||||
|
||||
import java.util.List;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TestGetInstances extends TestCase {
|
||||
public class TestGetInstances {
|
||||
|
||||
interface SampleInterface {}
|
||||
|
||||
|
@ -30,7 +31,7 @@ public class TestGetInstances extends TestCase {
|
|||
static class SampleClass implements SampleInterface {
|
||||
SampleClass() {}
|
||||
}
|
||||
|
||||
|
||||
static class AnotherClass implements ChildInterface {
|
||||
AnotherClass() {}
|
||||
}
|
||||
|
@ -39,6 +40,7 @@ public class TestGetInstances extends TestCase {
|
|||
* Makes sure <code>Configuration.getInstances()</code> returns
|
||||
* instances of the required type.
|
||||
*/
|
||||
@Test
|
||||
public void testGetInstances() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
|
||||
|
|
|
@ -24,9 +24,10 @@ import java.io.OutputStreamWriter;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TestAvroFSInput extends TestCase {
|
||||
public class TestAvroFSInput {
|
||||
|
||||
private static final String INPUT_DIR = "AvroFSInput";
|
||||
|
||||
|
@ -34,6 +35,7 @@ public class TestAvroFSInput extends TestCase {
|
|||
return new Path(GenericTestUtils.getTempPath(INPUT_DIR));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAFSInput() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
FileSystem fs = FileSystem.getLocal(conf);
|
||||
|
|
|
@ -17,7 +17,10 @@
|
|||
*/
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -29,16 +32,16 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
|
|||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
|
||||
/** This test makes sure that "DU" does not get to run on each call to getUsed */
|
||||
public class TestDU extends TestCase {
|
||||
public class TestDU {
|
||||
final static private File DU_DIR = GenericTestUtils.getTestDir("dutmp");
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() {
|
||||
FileUtil.fullyDelete(DU_DIR);
|
||||
assertTrue(DU_DIR.mkdirs());
|
||||
FileUtil.fullyDelete(DU_DIR);
|
||||
assertTrue(DU_DIR.mkdirs());
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws IOException {
|
||||
FileUtil.fullyDelete(DU_DIR);
|
||||
}
|
||||
|
@ -69,6 +72,7 @@ public class TestDU extends TestCase {
|
|||
* @throws IOException
|
||||
* @throws InterruptedException
|
||||
*/
|
||||
@Test
|
||||
public void testDU() throws IOException, InterruptedException {
|
||||
final int writtenSize = 32*1024; // writing 32K
|
||||
// Allow for extra 4K on-disk slack for local file systems
|
||||
|
@ -107,6 +111,8 @@ public class TestDU extends TestCase {
|
|||
duSize >= writtenSize &&
|
||||
writtenSize <= (duSize + slack));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDUGetUsedWillNotReturnNegative() throws IOException {
|
||||
File file = new File(DU_DIR, "data");
|
||||
assertTrue(file.createNewFile());
|
||||
|
@ -118,6 +124,7 @@ public class TestDU extends TestCase {
|
|||
assertTrue(String.valueOf(duSize), duSize >= 0L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDUSetInitialValue() throws IOException {
|
||||
File file = new File(DU_DIR, "dataX");
|
||||
createFile(file, 8192);
|
||||
|
|
|
@ -23,12 +23,12 @@ import java.lang.reflect.Modifier;
|
|||
import java.net.URI;
|
||||
import java.util.Iterator;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.viewfs.ConfigUtil;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestFilterFs extends TestCase {
|
||||
public class TestFilterFs {
|
||||
|
||||
private static final Log LOG = FileSystem.LOG;
|
||||
|
||||
|
@ -41,7 +41,8 @@ public class TestFilterFs extends TestCase {
|
|||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFilterFileSystem() throws Exception {
|
||||
for (Method m : AbstractFileSystem.class.getDeclaredMethods()) {
|
||||
if (Modifier.isStatic(m.getModifiers()))
|
||||
|
@ -69,6 +70,7 @@ public class TestFilterFs extends TestCase {
|
|||
|
||||
// Test that FilterFs will accept an AbstractFileSystem to be filtered which
|
||||
// has an optional authority, such as ViewFs
|
||||
@Test
|
||||
public void testFilteringWithNonrequiredAuthority() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
ConfigUtil.addLink(conf, "custom", "/mnt", URI.create("file:///"));
|
||||
|
|
|
@ -22,7 +22,10 @@ import java.util.Arrays;
|
|||
import java.util.Comparator;
|
||||
import java.util.Random;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
|
@ -30,7 +33,7 @@ import org.apache.hadoop.test.GenericTestUtils;
|
|||
/**
|
||||
* Testing the correctness of FileSystem.getFileBlockLocations.
|
||||
*/
|
||||
public class TestGetFileBlockLocations extends TestCase {
|
||||
public class TestGetFileBlockLocations {
|
||||
private static String TEST_ROOT_DIR = GenericTestUtils.getTempPath(
|
||||
"testGetFileBlockLocations");
|
||||
private static final int FileLength = 4 * 1024 * 1024; // 4MB
|
||||
|
@ -39,11 +42,8 @@ public class TestGetFileBlockLocations extends TestCase {
|
|||
private FileSystem fs;
|
||||
private Random random;
|
||||
|
||||
/**
|
||||
* @see TestCase#setUp()
|
||||
*/
|
||||
@Override
|
||||
protected void setUp() throws IOException {
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
conf = new Configuration();
|
||||
Path rootPath = new Path(TEST_ROOT_DIR);
|
||||
path = new Path(rootPath, "TestGetFileBlockLocations");
|
||||
|
@ -91,15 +91,14 @@ public class TestGetFileBlockLocations extends TestCase {
|
|||
assertTrue(locations.length == 0);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @see TestCase#tearDown()
|
||||
*/
|
||||
@Override
|
||||
protected void tearDown() throws IOException {
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException {
|
||||
fs.delete(path, true);
|
||||
fs.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFailureNegativeParameters() throws IOException {
|
||||
FileStatus status = fs.getFileStatus(path);
|
||||
try {
|
||||
|
@ -117,6 +116,7 @@ public class TestGetFileBlockLocations extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetFileBlockLocations1() throws IOException {
|
||||
FileStatus status = fs.getFileStatus(path);
|
||||
oneTest(0, (int) status.getLen(), status);
|
||||
|
@ -130,6 +130,7 @@ public class TestGetFileBlockLocations extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetFileBlockLocations2() throws IOException {
|
||||
FileStatus status = fs.getFileStatus(path);
|
||||
for (int i = 0; i < 1000; ++i) {
|
||||
|
|
|
@ -20,10 +20,12 @@ package org.apache.hadoop.fs;
|
|||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TestGlobExpander extends TestCase {
|
||||
public class TestGlobExpander {
|
||||
|
||||
@Test
|
||||
public void testExpansionIsIdentical() throws IOException {
|
||||
checkExpansionIsIdentical("");
|
||||
checkExpansionIsIdentical("/}");
|
||||
|
@ -35,6 +37,7 @@ public class TestGlobExpander extends TestCase {
|
|||
checkExpansionIsIdentical("p{a\\/b,c\\/d}s");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpansion() throws IOException {
|
||||
checkExpansion("{a/b}", "a/b");
|
||||
checkExpansion("/}{a/b}", "/}a/b");
|
||||
|
|
|
@ -33,20 +33,21 @@ import java.util.Random;
|
|||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.TrashPolicyDefault.Emptier;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.util.Time;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* This class tests commands from Trash.
|
||||
*/
|
||||
public class TestTrash extends TestCase {
|
||||
public class TestTrash {
|
||||
|
||||
private final static Path TEST_DIR = new Path(GenericTestUtils.getTempPath(
|
||||
"testTrash"));
|
||||
|
@ -507,19 +508,22 @@ public class TestTrash extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTrash() throws IOException {
|
||||
Configuration conf = new Configuration();
|
||||
conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class);
|
||||
trashShell(FileSystem.getLocal(conf), TEST_DIR);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNonDefaultFS() throws IOException {
|
||||
Configuration conf = new Configuration();
|
||||
conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class);
|
||||
conf.set("fs.defaultFS", "invalid://host/bar/foo");
|
||||
trashNonDefaultFS(conf);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testPluggableTrash() throws IOException {
|
||||
Configuration conf = new Configuration();
|
||||
|
||||
|
@ -604,6 +608,7 @@ public class TestTrash extends TestCase {
|
|||
verifyTrashPermission(FileSystem.getLocal(conf), conf);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTrashEmptier() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
// Trash with 12 second deletes and 6 seconds checkpoints
|
||||
|
@ -665,12 +670,9 @@ public class TestTrash extends TestCase {
|
|||
emptierThread.interrupt();
|
||||
emptierThread.join();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see TestCase#tearDown()
|
||||
*/
|
||||
@Override
|
||||
protected void tearDown() throws IOException {
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException {
|
||||
File trashDir = new File(TEST_DIR.toUri().getPath());
|
||||
if (trashDir.exists() && !FileUtil.fullyDelete(trashDir)) {
|
||||
throw new IOException("Cannot remove data directory: " + trashDir);
|
||||
|
|
|
@ -20,16 +20,17 @@ package org.apache.hadoop.fs;
|
|||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* test for the input truncation bug when mark/reset is used.
|
||||
* HADOOP-1489
|
||||
*/
|
||||
public class TestTruncatedInputBug extends TestCase {
|
||||
public class TestTruncatedInputBug {
|
||||
private static String TEST_ROOT_DIR =
|
||||
GenericTestUtils.getTestDir().getAbsolutePath();
|
||||
|
||||
|
@ -49,6 +50,7 @@ public class TestTruncatedInputBug extends TestCase {
|
|||
* checksum file system currently depends on the request size
|
||||
* >= bytesPerSum to work properly.
|
||||
*/
|
||||
@Test
|
||||
public void testTruncatedInputBug() throws IOException {
|
||||
final int ioBufSize = 512;
|
||||
final int fileSize = ioBufSize*4;
|
||||
|
|
|
@ -21,11 +21,14 @@ import java.io.IOException;
|
|||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import static org.apache.hadoop.fs.permission.FsAction.*;
|
||||
|
||||
public class TestFsPermission extends TestCase {
|
||||
public class TestFsPermission {
|
||||
|
||||
@Test
|
||||
public void testFsAction() {
|
||||
//implies
|
||||
for(FsAction a : FsAction.values()) {
|
||||
|
@ -53,6 +56,7 @@ public class TestFsPermission extends TestCase {
|
|||
* Ensure that when manually specifying permission modes we get
|
||||
* the expected values back out for all combinations
|
||||
*/
|
||||
@Test
|
||||
public void testConvertingPermissions() {
|
||||
for(short s = 0; s <= 01777; s++) {
|
||||
assertEquals(s, new FsPermission(s).toShort());
|
||||
|
@ -80,6 +84,7 @@ public class TestFsPermission extends TestCase {
|
|||
assertEquals(02000, s);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSpecialBitsToString() {
|
||||
for (boolean sb : new boolean[] { false, true }) {
|
||||
for (FsAction u : FsAction.values()) {
|
||||
|
@ -106,6 +111,7 @@ public class TestFsPermission extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFsPermission() {
|
||||
String symbolic = "-rwxrwxrwx";
|
||||
|
||||
|
@ -132,6 +138,7 @@ public class TestFsPermission extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSymbolicPermission() {
|
||||
for (int i = 0; i < SYMBOLIC.length; ++i) {
|
||||
short val = 0777;
|
||||
|
@ -146,6 +153,7 @@ public class TestFsPermission extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUMaskParser() throws IOException {
|
||||
Configuration conf = new Configuration();
|
||||
|
||||
|
@ -163,6 +171,7 @@ public class TestFsPermission extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSymbolicUmasks() {
|
||||
Configuration conf = new Configuration();
|
||||
|
||||
|
@ -176,6 +185,7 @@ public class TestFsPermission extends TestCase {
|
|||
assertEquals(0111, FsPermission.getUMask(conf).toShort());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadUmasks() {
|
||||
Configuration conf = new Configuration();
|
||||
|
||||
|
|
|
@ -25,7 +25,9 @@ import static org.mockito.Mockito.mock;
|
|||
import static org.mockito.Mockito.when;
|
||||
import static org.mockito.Mockito.times;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import javax.management.MBeanServer;
|
||||
import javax.management.ObjectName;
|
||||
|
@ -39,13 +41,12 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.junit.Test;
|
||||
import org.mockito.Mockito;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException;
|
||||
import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
|
||||
|
||||
public class TestFairCallQueue extends TestCase {
|
||||
public class TestFairCallQueue {
|
||||
private FairCallQueue<Schedulable> fcq;
|
||||
|
||||
private Schedulable mockCall(String id, int priority) {
|
||||
|
@ -65,6 +66,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Before
|
||||
public void setUp() {
|
||||
Configuration conf = new Configuration();
|
||||
conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2);
|
||||
|
@ -74,6 +76,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
|
||||
// Validate that the total capacity of all subqueues equals
|
||||
// the maxQueueSize for different values of maxQueueSize
|
||||
@Test
|
||||
public void testTotalCapacityOfSubQueues() {
|
||||
Configuration conf = new Configuration();
|
||||
FairCallQueue<Schedulable> fairCallQueue;
|
||||
|
@ -291,11 +294,12 @@ public class TestFairCallQueue extends TestCase {
|
|||
|
||||
//
|
||||
// Ensure that FairCallQueue properly implements BlockingQueue
|
||||
//
|
||||
@Test
|
||||
public void testPollReturnsNullWhenEmpty() {
|
||||
assertNull(fcq.poll());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPollReturnsTopCallWhenNotEmpty() {
|
||||
Schedulable call = mockCall("c");
|
||||
assertTrue(fcq.offer(call));
|
||||
|
@ -306,6 +310,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(0, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOfferSucceeds() {
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -316,6 +321,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(5, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOfferFailsWhenFull() {
|
||||
for (int i = 0; i < 5; i++) { assertTrue(fcq.offer(mockCall("c"))); }
|
||||
|
||||
|
@ -324,6 +330,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(5, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOfferSucceedsWhenScheduledLowPriority() {
|
||||
// Scheduler will schedule into queue 0 x 5, then queue 1
|
||||
int mockedPriorities[] = {0, 0, 0, 0, 0, 1, 0};
|
||||
|
@ -334,10 +341,12 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(6, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPeekNullWhenEmpty() {
|
||||
assertNull(fcq.peek());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPeekNonDestructive() {
|
||||
Schedulable call = mockCall("c", 0);
|
||||
assertTrue(fcq.offer(call));
|
||||
|
@ -347,6 +356,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(1, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPeekPointsAtHead() {
|
||||
Schedulable call = mockCall("c", 0);
|
||||
Schedulable next = mockCall("b", 0);
|
||||
|
@ -356,10 +366,12 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(call, fcq.peek()); // Peek points at the head
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPollTimeout() throws InterruptedException {
|
||||
assertNull(fcq.poll(10, TimeUnit.MILLISECONDS));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPollSuccess() throws InterruptedException {
|
||||
Schedulable call = mockCall("c", 0);
|
||||
assertTrue(fcq.offer(call));
|
||||
|
@ -369,6 +381,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(0, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOfferTimeout() throws InterruptedException {
|
||||
for (int i = 0; i < 5; i++) {
|
||||
assertTrue(fcq.offer(mockCall("c"), 10, TimeUnit.MILLISECONDS));
|
||||
|
@ -380,6 +393,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void testDrainTo() {
|
||||
Configuration conf = new Configuration();
|
||||
conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2);
|
||||
|
@ -397,6 +411,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void testDrainToWithLimit() {
|
||||
Configuration conf = new Configuration();
|
||||
conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2);
|
||||
|
@ -413,16 +428,19 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(2, fcq2.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInitialRemainingCapacity() {
|
||||
assertEquals(10, fcq.remainingCapacity());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFirstQueueFullRemainingCapacity() {
|
||||
while (fcq.offer(mockCall("c"))) ; // Queue 0 will fill up first, then queue 1
|
||||
|
||||
assertEquals(5, fcq.remainingCapacity());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAllQueuesFullRemainingCapacity() {
|
||||
int[] mockedPriorities = {0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0};
|
||||
int i = 0;
|
||||
|
@ -432,6 +450,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(10, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueuesPartialFilledRemainingCapacity() {
|
||||
int[] mockedPriorities = {0, 1, 0, 1, 0};
|
||||
for (int i = 0; i < 5; i++) { fcq.offer(mockCall("c", mockedPriorities[i])); }
|
||||
|
@ -555,12 +574,14 @@ public class TestFairCallQueue extends TestCase {
|
|||
}
|
||||
|
||||
// Make sure put will overflow into lower queues when the top is full
|
||||
@Test
|
||||
public void testPutOverflows() throws InterruptedException {
|
||||
// We can fit more than 5, even though the scheduler suggests the top queue
|
||||
assertCanPut(fcq, 8, 8);
|
||||
assertEquals(8, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPutBlocksWhenAllFull() throws InterruptedException {
|
||||
assertCanPut(fcq, 10, 10); // Fill up
|
||||
assertEquals(10, fcq.size());
|
||||
|
@ -569,10 +590,12 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertCanPut(fcq, 0, 1); // Will block
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTakeBlocksWhenEmpty() throws InterruptedException {
|
||||
assertCanTake(fcq, 0, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTakeRemovesCall() throws InterruptedException {
|
||||
Schedulable call = mockCall("c");
|
||||
fcq.offer(call);
|
||||
|
@ -581,6 +604,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(0, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTakeTriesNextQueue() throws InterruptedException {
|
||||
|
||||
// A mux which only draws from q 0
|
||||
|
@ -597,6 +621,7 @@ public class TestFairCallQueue extends TestCase {
|
|||
assertEquals(0, fcq.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFairCallQueueMXBean() throws Exception {
|
||||
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
|
||||
ObjectName mxbeanName = new ObjectName(
|
||||
|
|
|
@ -20,7 +20,8 @@ package org.apache.hadoop.log;
|
|||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ContainerNode;
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
@ -33,7 +34,6 @@ import org.apache.log4j.spi.HierarchyEventListener;
|
|||
import org.apache.log4j.spi.LoggerFactory;
|
||||
import org.apache.log4j.spi.LoggerRepository;
|
||||
import org.apache.log4j.spi.ThrowableInformation;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
|
@ -42,7 +42,7 @@ import java.net.NoRouteToHostException;
|
|||
import java.util.Enumeration;
|
||||
import java.util.Vector;
|
||||
|
||||
public class TestLog4Json extends TestCase {
|
||||
public class TestLog4Json {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(TestLog4Json.class);
|
||||
|
||||
|
|
|
@ -19,15 +19,12 @@ package org.apache.hadoop.net;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestScriptBasedMapping extends TestCase {
|
||||
|
||||
|
||||
public class TestScriptBasedMapping {
|
||||
|
||||
public TestScriptBasedMapping() {
|
||||
|
||||
|
|
|
@ -19,13 +19,12 @@ package org.apache.hadoop.net;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestScriptBasedMappingWithDependency extends TestCase {
|
||||
public class TestScriptBasedMappingWithDependency {
|
||||
|
||||
|
||||
public TestScriptBasedMappingWithDependency() {
|
||||
|
|
|
@ -17,19 +17,21 @@
|
|||
package org.apache.hadoop.security;
|
||||
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.*;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.http.FilterContainer;
|
||||
import org.junit.Test;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class TestAuthenticationFilter extends TestCase {
|
||||
public class TestAuthenticationFilter {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testConfiguration() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("hadoop.http.authentication.foo", "bar");
|
||||
|
|
|
@ -16,8 +16,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.security;
|
||||
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.http.FilterContainer;
|
||||
|
@ -30,9 +30,10 @@ import java.util.Map;
|
|||
* This class is tested for {@link AuthenticationWithProxyUserFilter}
|
||||
* to verify configurations of this filter.
|
||||
*/
|
||||
public class TestAuthenticationWithProxyUserFilter extends TestCase {
|
||||
public class TestAuthenticationWithProxyUserFilter {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testConfiguration() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("hadoop.http.authentication.foo", "bar");
|
||||
|
|
|
@ -21,17 +21,18 @@ import java.io.IOException;
|
|||
import java.net.InetAddress;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.security.WhitelistBasedResolver;
|
||||
import org.apache.hadoop.util.TestFileBasedIPList;
|
||||
|
||||
public class TestWhitelistBasedResolver extends TestCase {
|
||||
public class TestWhitelistBasedResolver {
|
||||
|
||||
public static final Map<String, String> SASL_PRIVACY_PROPS =
|
||||
WhitelistBasedResolver.getSaslProperties(new Configuration());
|
||||
|
||||
@Test
|
||||
public void testFixedVariableAndLocalWhiteList() throws IOException {
|
||||
|
||||
String[] fixedIps = {"10.119.103.112", "10.221.102.0/23"};
|
||||
|
@ -79,6 +80,7 @@ public class TestWhitelistBasedResolver extends TestCase {
|
|||
* Check for inclusion in whitelist
|
||||
* Check for exclusion from whitelist
|
||||
*/
|
||||
@Test
|
||||
public void testFixedAndLocalWhiteList() throws IOException {
|
||||
|
||||
String[] fixedIps = {"10.119.103.112", "10.221.102.0/23"};
|
||||
|
@ -128,6 +130,7 @@ public class TestWhitelistBasedResolver extends TestCase {
|
|||
* Add a bunch of subnets and IPSs to the whitelist
|
||||
* Check for inclusion in whitelist with a null value
|
||||
*/
|
||||
@Test
|
||||
public void testNullIPAddress() throws IOException {
|
||||
|
||||
String[] fixedIps = {"10.119.103.112", "10.221.102.0/23"};
|
||||
|
|
|
@ -25,11 +25,12 @@ import org.apache.hadoop.io.*;
|
|||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
|
||||
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier;
|
||||
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenSecretManager;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/** Unit tests for Token */
|
||||
public class TestToken extends TestCase {
|
||||
public class TestToken {
|
||||
|
||||
static boolean isEqual(Object a, Object b) {
|
||||
return a == null ? b == null : a.equals(b);
|
||||
|
@ -45,6 +46,7 @@ public class TestToken extends TestCase {
|
|||
/**
|
||||
* Test token serialization
|
||||
*/
|
||||
@Test
|
||||
public void testTokenSerialization() throws IOException {
|
||||
// Get a token
|
||||
Token<TokenIdentifier> sourceToken = new Token<TokenIdentifier>();
|
||||
|
@ -76,7 +78,8 @@ public class TestToken extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public static void testEncodeWritable() throws Exception {
|
||||
@Test
|
||||
public void testEncodeWritable() throws Exception {
|
||||
String[] values = new String[]{"", "a", "bb", "ccc", "dddd", "eeeee",
|
||||
"ffffff", "ggggggg", "hhhhhhhh", "iiiiiiiii",
|
||||
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLM" +
|
||||
|
@ -96,7 +99,8 @@ public class TestToken extends TestCase {
|
|||
checkUrlSafe(encode);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDecodeIdentifier() throws IOException {
|
||||
TestDelegationTokenSecretManager secretManager =
|
||||
new TestDelegationTokenSecretManager(0, 0, 0, 0);
|
||||
|
|
|
@ -17,17 +17,15 @@
|
|||
*/
|
||||
package org.apache.hadoop.util;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.util.AsyncDiskService;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* A test for AsyncDiskService.
|
||||
*/
|
||||
public class TestAsyncDiskService extends TestCase {
|
||||
public class TestAsyncDiskService {
|
||||
|
||||
public static final Logger LOG =
|
||||
LoggerFactory.getLogger(TestAsyncDiskService.class);
|
||||
|
|
|
@ -18,14 +18,11 @@
|
|||
package org.apache.hadoop.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.hadoop.util.CacheableIPList;
|
||||
import org.apache.hadoop.util.FileBasedIPList;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class TestCacheableIPList extends TestCase {
|
||||
public class TestCacheableIPList {
|
||||
|
||||
/**
|
||||
* Add a bunch of subnets and IPSs to the file
|
||||
|
@ -37,6 +34,7 @@ public class TestCacheableIPList extends TestCase {
|
|||
* test for inclusion
|
||||
* Check for exclusion
|
||||
*/
|
||||
@Test
|
||||
public void testAddWithSleepForCacheTimeout() throws IOException, InterruptedException {
|
||||
|
||||
String[] ips = {"10.119.103.112", "10.221.102.0/23", "10.113.221.221"};
|
||||
|
@ -76,6 +74,7 @@ public class TestCacheableIPList extends TestCase {
|
|||
* test for inclusion
|
||||
* Check for exclusion
|
||||
*/
|
||||
@Test
|
||||
public void testRemovalWithSleepForCacheTimeout() throws IOException, InterruptedException {
|
||||
|
||||
String[] ips = {"10.119.103.112", "10.221.102.0/23",
|
||||
|
@ -115,6 +114,7 @@ public class TestCacheableIPList extends TestCase {
|
|||
* test for inclusion
|
||||
* Check for exclusion
|
||||
*/
|
||||
@Test
|
||||
public void testAddWithRefresh() throws IOException, InterruptedException {
|
||||
|
||||
String[] ips = {"10.119.103.112", "10.221.102.0/23", "10.113.221.221"};
|
||||
|
@ -154,6 +154,7 @@ public class TestCacheableIPList extends TestCase {
|
|||
* test for inclusion
|
||||
* Check for exclusion
|
||||
*/
|
||||
@Test
|
||||
public void testRemovalWithRefresh() throws IOException, InterruptedException {
|
||||
|
||||
String[] ips = {"10.119.103.112", "10.221.102.0/23",
|
||||
|
|
|
@ -22,14 +22,11 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.util.FileBasedIPList;
|
||||
import org.apache.hadoop.util.IPList;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class TestFileBasedIPList extends TestCase {
|
||||
public class TestFileBasedIPList {
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
|
@ -127,6 +124,7 @@ public class TestFileBasedIPList extends TestCase {
|
|||
* test for inclusion
|
||||
* should be true as if the feature is turned off
|
||||
*/
|
||||
@Test
|
||||
public void testFileNotSpecified() {
|
||||
|
||||
IPList ipl = new FileBasedIPList(null);
|
||||
|
@ -140,6 +138,7 @@ public class TestFileBasedIPList extends TestCase {
|
|||
* test for inclusion
|
||||
* should be true as if the feature is turned off
|
||||
*/
|
||||
@Test
|
||||
public void testFileMissing() {
|
||||
|
||||
IPList ipl = new FileBasedIPList("missingips.txt");
|
||||
|
@ -153,6 +152,7 @@ public class TestFileBasedIPList extends TestCase {
|
|||
* test for inclusion
|
||||
* should be true as if the feature is turned off
|
||||
*/
|
||||
@Test
|
||||
public void testWithEmptyList() throws IOException {
|
||||
String[] ips = {};
|
||||
|
||||
|
@ -168,6 +168,7 @@ public class TestFileBasedIPList extends TestCase {
|
|||
* test for inclusion
|
||||
* should be true as if the feature is turned off
|
||||
*/
|
||||
@Test
|
||||
public void testForBadFIle() throws IOException {
|
||||
String[] ips = { "10.221.102/23"};
|
||||
|
||||
|
@ -187,6 +188,7 @@ public class TestFileBasedIPList extends TestCase {
|
|||
* Check for inclusion with good entries
|
||||
* Check for exclusion
|
||||
*/
|
||||
@Test
|
||||
public void testWithAWrongEntry() throws IOException {
|
||||
|
||||
String[] ips = {"10.119.103.112", "10.221.102/23", "10.221.204.1/23"};
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.util;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import junit.framework.Assert;
|
||||
import org.junit.Assert;
|
||||
import org.apache.hadoop.util.FindClass;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.junit.Test;
|
||||
|
|
|
@ -21,12 +21,14 @@ package org.apache.hadoop.util;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
public class TestGenericsUtil extends TestCase {
|
||||
public class TestGenericsUtil {
|
||||
|
||||
@Test
|
||||
public void testToArray() {
|
||||
|
||||
//test a list of size 10
|
||||
|
@ -45,6 +47,7 @@ public class TestGenericsUtil extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithEmptyList() {
|
||||
try {
|
||||
List<String> list = new ArrayList<String>();
|
||||
|
@ -57,6 +60,7 @@ public class TestGenericsUtil extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithEmptyList2() {
|
||||
List<String> list = new ArrayList<String>();
|
||||
//this method should not throw IndexOutOfBoundsException
|
||||
|
@ -81,6 +85,7 @@ public class TestGenericsUtil extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithGenericClass() {
|
||||
|
||||
GenericClass<String> testSubject = new GenericClass<String>();
|
||||
|
@ -102,6 +107,7 @@ public class TestGenericsUtil extends TestCase {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGenericOptionsParser() throws Exception {
|
||||
GenericOptionsParser parser = new GenericOptionsParser(
|
||||
new Configuration(), new String[] {"-jt"});
|
||||
|
@ -116,6 +122,7 @@ public class TestGenericsUtil extends TestCase {
|
|||
"y=z", parser.getConfiguration().get("x"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetClass() {
|
||||
|
||||
//test with Integer
|
||||
|
@ -131,6 +138,7 @@ public class TestGenericsUtil extends TestCase {
|
|||
GenericClass.class, c2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsLog4jLogger() throws Exception {
|
||||
assertFalse("False if clazz is null", GenericsUtil.isLog4jLogger(null));
|
||||
assertTrue("The implementation is Log4j",
|
||||
|
|
|
@ -21,14 +21,15 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.io.DataInputBuffer;
|
||||
import org.apache.hadoop.io.DataOutputBuffer;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.WritableComparator;
|
||||
|
||||
public class TestIndexedSort extends TestCase {
|
||||
public class TestIndexedSort {
|
||||
|
||||
public void sortAllEqual(IndexedSorter sorter) throws Exception {
|
||||
final int SAMPLE = 500;
|
||||
|
@ -128,6 +129,7 @@ public class TestIndexedSort extends TestCase {
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testQuickSort() throws Exception {
|
||||
QuickSort sorter = new QuickSort();
|
||||
sortRandom(sorter);
|
||||
|
@ -158,6 +160,7 @@ public class TestIndexedSort extends TestCase {
|
|||
assertTrue(Arrays.equals(values, check));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHeapSort() throws Exception {
|
||||
HeapSort sorter = new HeapSort();
|
||||
sortRandom(sorter);
|
||||
|
|
|
@ -19,13 +19,13 @@ package org.apache.hadoop.util;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.util.ExitUtil.ExitException;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestNativeLibraryChecker extends TestCase {
|
||||
|
||||
public class TestNativeLibraryChecker {
|
||||
private void expectExit(String [] args) {
|
||||
try {
|
||||
// should throw exit exception
|
||||
|
|
|
@ -28,7 +28,8 @@ import java.util.jar.JarOutputStream;
|
|||
import java.util.zip.ZipEntry;
|
||||
|
||||
import org.junit.Assert;
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -49,8 +50,6 @@ import org.apache.hadoop.mapreduce.TaskInputOutputContext;
|
|||
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Tests the use of the
|
||||
* {@link org.apache.hadoop.mapreduce.filecache.DistributedCache} within the
|
||||
|
@ -66,7 +65,7 @@ import org.junit.Test;
|
|||
* This test is not fast: it uses MiniMRCluster.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestMRWithDistributedCache extends TestCase {
|
||||
public class TestMRWithDistributedCache {
|
||||
private static Path TEST_ROOT_DIR =
|
||||
new Path(System.getProperty("test.build.data","/tmp"));
|
||||
private static File symlinkFile = new File("distributed.first.symlink");
|
||||
|
@ -97,23 +96,23 @@ public class TestMRWithDistributedCache extends TestCase {
|
|||
FileSystem fs = LocalFileSystem.get(conf);
|
||||
|
||||
// Check that 2 files and 2 archives are present
|
||||
TestCase.assertEquals(2, localFiles.length);
|
||||
TestCase.assertEquals(2, localArchives.length);
|
||||
TestCase.assertEquals(2, files.length);
|
||||
TestCase.assertEquals(2, archives.length);
|
||||
Assert.assertEquals(2, localFiles.length);
|
||||
Assert.assertEquals(2, localArchives.length);
|
||||
Assert.assertEquals(2, files.length);
|
||||
Assert.assertEquals(2, archives.length);
|
||||
|
||||
// Check the file name
|
||||
TestCase.assertTrue(files[0].getPath().endsWith("distributed.first"));
|
||||
TestCase.assertTrue(files[1].getPath().endsWith("distributed.second.jar"));
|
||||
Assert.assertTrue(files[0].getPath().endsWith("distributed.first"));
|
||||
Assert.assertTrue(files[1].getPath().endsWith("distributed.second.jar"));
|
||||
|
||||
// Check lengths of the files
|
||||
TestCase.assertEquals(1, fs.getFileStatus(localFiles[0]).getLen());
|
||||
TestCase.assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1);
|
||||
Assert.assertEquals(1, fs.getFileStatus(localFiles[0]).getLen());
|
||||
Assert.assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1);
|
||||
|
||||
// Check extraction of the archive
|
||||
TestCase.assertTrue(fs.exists(new Path(localArchives[0],
|
||||
Assert.assertTrue(fs.exists(new Path(localArchives[0],
|
||||
"distributed.jar.inside3")));
|
||||
TestCase.assertTrue(fs.exists(new Path(localArchives[1],
|
||||
Assert.assertTrue(fs.exists(new Path(localArchives[1],
|
||||
"distributed.jar.inside4")));
|
||||
|
||||
// Check the class loaders
|
||||
|
@ -121,18 +120,18 @@ public class TestMRWithDistributedCache extends TestCase {
|
|||
ClassLoader cl = Thread.currentThread().getContextClassLoader();
|
||||
// Both the file and the archive were added to classpath, so both
|
||||
// should be reachable via the class loader.
|
||||
TestCase.assertNotNull(cl.getResource("distributed.jar.inside2"));
|
||||
TestCase.assertNotNull(cl.getResource("distributed.jar.inside3"));
|
||||
TestCase.assertNull(cl.getResource("distributed.jar.inside4"));
|
||||
Assert.assertNotNull(cl.getResource("distributed.jar.inside2"));
|
||||
Assert.assertNotNull(cl.getResource("distributed.jar.inside3"));
|
||||
Assert.assertNull(cl.getResource("distributed.jar.inside4"));
|
||||
|
||||
// Check that the symlink for the renaming was created in the cwd;
|
||||
TestCase.assertTrue("symlink distributed.first.symlink doesn't exist",
|
||||
Assert.assertTrue("symlink distributed.first.symlink doesn't exist",
|
||||
symlinkFile.exists());
|
||||
TestCase.assertEquals("symlink distributed.first.symlink length not 1", 1,
|
||||
Assert.assertEquals("symlink distributed.first.symlink length not 1", 1,
|
||||
symlinkFile.length());
|
||||
|
||||
//This last one is a difference between MRv2 and MRv1
|
||||
TestCase.assertTrue("second file should be symlinked too",
|
||||
Assert.assertTrue("second file should be symlinked too",
|
||||
expectedAbsentSymlinkFile.exists());
|
||||
}
|
||||
|
||||
|
@ -188,6 +187,7 @@ public class TestMRWithDistributedCache extends TestCase {
|
|||
}
|
||||
|
||||
/** Tests using the local job runner. */
|
||||
@Test
|
||||
public void testLocalJobRunner() throws Exception {
|
||||
symlinkFile.delete(); // ensure symlink is not present (e.g. if test is
|
||||
// killed part way through)
|
||||
|
|
|
@ -23,7 +23,8 @@ import java.io.FileInputStream;
|
|||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
import org.junit.Assert;
|
||||
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
|
@ -38,7 +39,7 @@ import org.apache.hadoop.io.Text;
|
|||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public class TestFileOutputCommitter extends TestCase {
|
||||
public class TestFileOutputCommitter {
|
||||
private static Path outDir = new Path(System.getProperty("test.build.data",
|
||||
"/tmp"), "output");
|
||||
|
||||
|
@ -153,14 +154,18 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
validateContent(outDir);
|
||||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRecoveryV1() throws Exception {
|
||||
testRecoveryInternal(1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRecoveryV2() throws Exception {
|
||||
testRecoveryInternal(2, 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRecoveryUpgradeV1V2() throws Exception {
|
||||
testRecoveryInternal(1, 2);
|
||||
}
|
||||
|
@ -203,11 +208,13 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
assert(dataFileFound && indexFileFound);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterWithFailureV1() throws Exception {
|
||||
testCommitterWithFailureInternal(1, 1);
|
||||
testCommitterWithFailureInternal(1, 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterWithFailureV2() throws Exception {
|
||||
testCommitterWithFailureInternal(2, 1);
|
||||
testCommitterWithFailureInternal(2, 2);
|
||||
|
@ -256,10 +263,12 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterWithDuplicatedCommitV1() throws Exception {
|
||||
testCommitterWithDuplicatedCommitInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterWithDuplicatedCommitV2() throws Exception {
|
||||
testCommitterWithDuplicatedCommitInternal(2);
|
||||
}
|
||||
|
@ -340,10 +349,12 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterV1() throws Exception {
|
||||
testCommitterInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterV2() throws Exception {
|
||||
testCommitterInternal(2);
|
||||
}
|
||||
|
@ -380,18 +391,22 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapFileOutputCommitterV1() throws Exception {
|
||||
testMapFileOutputCommitterInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapFileOutputCommitterV2() throws Exception {
|
||||
testMapFileOutputCommitterInternal(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapOnlyNoOutputV1() throws Exception {
|
||||
testMapOnlyNoOutputInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapOnlyNoOutputV2() throws Exception {
|
||||
testMapOnlyNoOutputInternal(2);
|
||||
}
|
||||
|
@ -456,10 +471,12 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAbortV1() throws Exception {
|
||||
testAbortInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAbortV2() throws Exception {
|
||||
testAbortInternal(2);
|
||||
}
|
||||
|
@ -537,10 +554,12 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFailAbortV1() throws Exception {
|
||||
testFailAbortInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFailAbortV2() throws Exception {
|
||||
testFailAbortInternal(2);
|
||||
}
|
||||
|
|
|
@ -32,14 +32,16 @@ import org.apache.hadoop.fs.FSDataOutputStream;
|
|||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TestIndexCache extends TestCase {
|
||||
public class TestIndexCache {
|
||||
private JobConf conf;
|
||||
private FileSystem fs;
|
||||
private Path p;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
conf = new JobConf();
|
||||
fs = FileSystem.getLocal(conf).getRaw();
|
||||
|
@ -47,6 +49,7 @@ public class TestIndexCache extends TestCase {
|
|||
"cache").makeQualified(fs.getUri(), fs.getWorkingDirectory());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLRCPolicy() throws Exception {
|
||||
Random r = new Random();
|
||||
long seed = r.nextLong();
|
||||
|
@ -120,6 +123,7 @@ public class TestIndexCache extends TestCase {
|
|||
checkRecord(rec, totalsize);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadIndex() throws Exception {
|
||||
final int parts = 30;
|
||||
fs.delete(p, true);
|
||||
|
@ -152,6 +156,7 @@ public class TestIndexCache extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidReduceNumberOrLength() throws Exception {
|
||||
fs.delete(p, true);
|
||||
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
|
||||
|
@ -192,6 +197,7 @@ public class TestIndexCache extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveMap() throws Exception {
|
||||
// This test case use two thread to call getIndexInformation and
|
||||
// removeMap concurrently, in order to construct race condition.
|
||||
|
@ -241,7 +247,8 @@ public class TestIndexCache extends TestCase {
|
|||
assertEquals(true, cache.checkTotalMemoryUsed());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCreateRace() throws Exception {
|
||||
fs.delete(p, true);
|
||||
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
|
||||
|
|
|
@ -31,12 +31,15 @@ import javax.servlet.http.HttpServlet;
|
|||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestJobEndNotifier extends TestCase {
|
||||
public class TestJobEndNotifier {
|
||||
HttpServer2 server;
|
||||
URL baseUrl;
|
||||
|
||||
|
@ -99,6 +102,7 @@ public class TestJobEndNotifier extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
new File(System.getProperty("build.webapps", "build/webapps") + "/test"
|
||||
).mkdirs();
|
||||
|
@ -118,6 +122,7 @@ public class TestJobEndNotifier extends TestCase {
|
|||
FailServlet.calledTimes = 0;
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
server.stop();
|
||||
}
|
||||
|
@ -125,6 +130,7 @@ public class TestJobEndNotifier extends TestCase {
|
|||
/**
|
||||
* Basic validation for localRunnerNotification.
|
||||
*/
|
||||
@Test
|
||||
public void testLocalJobRunnerUriSubstitution() throws InterruptedException {
|
||||
JobStatus jobStatus = createTestJobStatus(
|
||||
"job_20130313155005308_0001", JobStatus.SUCCEEDED);
|
||||
|
@ -145,6 +151,7 @@ public class TestJobEndNotifier extends TestCase {
|
|||
/**
|
||||
* Validate job.end.retry.attempts for the localJobRunner.
|
||||
*/
|
||||
@Test
|
||||
public void testLocalJobRunnerRetryCount() throws InterruptedException {
|
||||
int retryAttempts = 3;
|
||||
JobStatus jobStatus = createTestJobStatus(
|
||||
|
@ -161,6 +168,7 @@ public class TestJobEndNotifier extends TestCase {
|
|||
* Validate that the notification times out after reaching
|
||||
* mapreduce.job.end-notification.timeout.
|
||||
*/
|
||||
@Test
|
||||
public void testNotificationTimeout() throws InterruptedException {
|
||||
Configuration conf = new Configuration();
|
||||
// Reduce the timeout to 1 second
|
||||
|
|
|
@ -33,7 +33,9 @@ import java.io.IOException;
|
|||
import java.io.LineNumberReader;
|
||||
import java.io.StringReader;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.mapred.TaskReport;
|
||||
|
@ -43,8 +45,6 @@ import org.apache.log4j.Layout;
|
|||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.log4j.WriterAppender;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
|
@ -53,7 +53,7 @@ import org.mockito.stubbing.Answer;
|
|||
* job monitoring is correct and prints 100% for map and reduce before
|
||||
* successful completion.
|
||||
*/
|
||||
public class TestJobMonitorAndPrint extends TestCase {
|
||||
public class TestJobMonitorAndPrint {
|
||||
private Job job;
|
||||
private Configuration conf;
|
||||
private ClientProtocol clientProtocol;
|
||||
|
|
|
@ -27,7 +27,10 @@ import java.util.concurrent.Callable;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
import org.apache.hadoop.util.concurrent.HadoopExecutors;
|
||||
import org.junit.Assert;
|
||||
|
||||
|
@ -55,7 +58,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
|
|||
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public class TestFileOutputCommitter extends TestCase {
|
||||
public class TestFileOutputCommitter {
|
||||
private static final Path outDir = new Path(
|
||||
System.getProperty("test.build.data",
|
||||
System.getProperty("java.io.tmpdir")),
|
||||
|
@ -87,12 +90,12 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
fs.delete(outDir, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException {
|
||||
cleanup();
|
||||
}
|
||||
|
@ -195,14 +198,17 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRecoveryV1() throws Exception {
|
||||
testRecoveryInternal(1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRecoveryV2() throws Exception {
|
||||
testRecoveryInternal(2, 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRecoveryUpgradeV1V2() throws Exception {
|
||||
testRecoveryInternal(1, 2);
|
||||
}
|
||||
|
@ -278,18 +284,22 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterV1() throws Exception {
|
||||
testCommitterInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterV2() throws Exception {
|
||||
testCommitterInternal(2);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCommitterWithDuplicatedCommitV1() throws Exception {
|
||||
testCommitterWithDuplicatedCommitInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterWithDuplicatedCommitV2() throws Exception {
|
||||
testCommitterWithDuplicatedCommitInternal(2);
|
||||
}
|
||||
|
@ -336,11 +346,13 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterWithFailureV1() throws Exception {
|
||||
testCommitterWithFailureInternal(1, 1);
|
||||
testCommitterWithFailureInternal(1, 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterWithFailureV2() throws Exception {
|
||||
testCommitterWithFailureInternal(2, 1);
|
||||
testCommitterWithFailureInternal(2, 2);
|
||||
|
@ -390,10 +402,12 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterRepeatableV1() throws Exception {
|
||||
testCommitterRetryInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitterRepeatableV2() throws Exception {
|
||||
testCommitterRetryInternal(2);
|
||||
}
|
||||
|
@ -493,14 +507,17 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapFileOutputCommitterV1() throws Exception {
|
||||
testMapFileOutputCommitterInternal(1);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMapFileOutputCommitterV2() throws Exception {
|
||||
testMapFileOutputCommitterInternal(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidVersionNumber() throws IOException {
|
||||
Job job = Job.getInstance();
|
||||
FileOutputFormat.setOutputPath(job, outDir);
|
||||
|
@ -552,10 +569,12 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAbortV1() throws IOException, InterruptedException {
|
||||
testAbortInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAbortV2() throws IOException, InterruptedException {
|
||||
testAbortInternal(2);
|
||||
}
|
||||
|
@ -575,7 +594,7 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void testFailAbortInternal(int version)
|
||||
throws IOException, InterruptedException {
|
||||
Job job = Job.getInstance();
|
||||
|
@ -631,10 +650,12 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFailAbortV1() throws Exception {
|
||||
testFailAbortInternal(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFailAbortV2() throws Exception {
|
||||
testFailAbortInternal(2);
|
||||
}
|
||||
|
@ -732,10 +753,12 @@ public class TestFileOutputCommitter extends TestCase {
|
|||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConcurrentCommitTaskWithSubDirV1() throws Exception {
|
||||
testConcurrentCommitTaskWithSubDir(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConcurrentCommitTaskWithSubDirV2() throws Exception {
|
||||
testConcurrentCommitTaskWithSubDir(2);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
package org.apache.hadoop.mapreduce.lib.output;
|
||||
|
||||
import java.io.IOException;
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
@ -28,8 +29,9 @@ import org.apache.hadoop.mapreduce.Job;
|
|||
import org.apache.hadoop.mapreduce.RecordWriter;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
|
||||
public class TestFileOutputFormat extends TestCase {
|
||||
public class TestFileOutputFormat {
|
||||
|
||||
@Test
|
||||
public void testSetOutputPathException() throws Exception {
|
||||
Job job = Job.getInstance();
|
||||
try {
|
||||
|
@ -42,6 +44,7 @@ public class TestFileOutputFormat extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCheckOutputSpecsException() throws Exception {
|
||||
Job job = Job.getInstance();
|
||||
Path outDir = new Path(System.getProperty("test.build.data", "/tmp"),
|
||||
|
|
|
@ -38,7 +38,7 @@ import org.junit.Test;
|
|||
* This class performs unit test for Job/JobControl classes.
|
||||
*
|
||||
*/
|
||||
public class TestJobControl extends junit.framework.TestCase {
|
||||
public class TestJobControl {
|
||||
|
||||
/**
|
||||
* This is a main function for testing JobControl class.
|
||||
|
@ -263,13 +263,13 @@ public class TestJobControl extends junit.framework.TestCase {
|
|||
JobConf jc = new JobConf();
|
||||
Job j = new Job(jc);
|
||||
//Just make sure no exception is thrown
|
||||
assertNull(j.getAssignedJobID());
|
||||
Assert.assertNull(j.getAssignedJobID());
|
||||
org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class);
|
||||
org.apache.hadoop.mapreduce.JobID jid = new org.apache.hadoop.mapreduce.JobID("test",0);
|
||||
when(mockjob.getJobID()).thenReturn(jid);
|
||||
j.setJob(mockjob);
|
||||
JobID expected = new JobID("test",0);
|
||||
assertEquals(expected, j.getAssignedJobID());
|
||||
Assert.assertEquals(expected, j.getAssignedJobID());
|
||||
verify(mockjob).getJobID();
|
||||
}
|
||||
|
||||
|
|
|
@ -17,9 +17,6 @@
|
|||
*/
|
||||
|
||||
package org.apache.hadoop.mapreduce;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
|
@ -27,9 +24,6 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
|
|
|
@ -20,8 +20,8 @@ package org.apache.hadoop.mapreduce.lib.input;
|
|||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
@ -30,9 +30,10 @@ import org.apache.hadoop.mapreduce.InputSplit;
|
|||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
|
||||
public class TestDelegatingInputFormat extends TestCase {
|
||||
public class TestDelegatingInputFormat {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testSplitting() throws Exception {
|
||||
Job job = Job.getInstance();
|
||||
MiniDFSCluster dfs = null;
|
||||
|
|
|
@ -95,7 +95,7 @@ public class TestMapReduceJobControl extends HadoopTestCase {
|
|||
cjob2 = new ControlledJob(job2, dependingJobs);
|
||||
|
||||
Job job3 = MapReduceTestUtil.createCopyJob(conf, outdir_3,
|
||||
outdir_1, outdir_2);
|
||||
outdir_1, outdir_2);
|
||||
dependingJobs = new ArrayList<ControlledJob>();
|
||||
dependingJobs.add(cjob1);
|
||||
dependingJobs.add(cjob2);
|
||||
|
|
|
@ -21,7 +21,10 @@ package org.apache.hadoop.mapreduce.lib.output;
|
|||
import java.io.*;
|
||||
import java.net.URI;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
|
@ -38,7 +41,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
|
|||
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
|
||||
|
||||
|
||||
public class TestMRCJCFileOutputCommitter extends TestCase {
|
||||
public class TestMRCJCFileOutputCommitter {
|
||||
private static Path outDir = new Path(System.getProperty("test.build.data",
|
||||
"/tmp"), "output");
|
||||
|
||||
|
@ -76,17 +79,18 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
|
|||
fs.delete(outDir, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws IOException {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testCommitter() throws Exception {
|
||||
Job job = Job.getInstance();
|
||||
FileOutputFormat.setOutputPath(job, outDir);
|
||||
|
@ -122,7 +126,8 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
|
|||
assertEquals(output, expectedOutput.toString());
|
||||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testEmptyOutput() throws Exception {
|
||||
Job job = Job.getInstance();
|
||||
FileOutputFormat.setOutputPath(job, outDir);
|
||||
|
@ -146,6 +151,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testAbort() throws IOException, InterruptedException {
|
||||
Job job = Job.getInstance();
|
||||
FileOutputFormat.setOutputPath(job, outDir);
|
||||
|
@ -195,6 +201,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testFailAbort() throws IOException, InterruptedException {
|
||||
Job job = Job.getInstance();
|
||||
Configuration conf = job.getConfiguration();
|
||||
|
|
|
@ -22,23 +22,30 @@ import org.apache.hadoop.io.IntWritable;
|
|||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import org.junit.Assert;
|
||||
|
||||
public class TestTaskContext extends TestCase {
|
||||
|
||||
public class TestTaskContext {
|
||||
|
||||
@Test
|
||||
public void testTaskContext() {
|
||||
TaskContext context = new TaskContext(null, null, null, null, null, null, null);
|
||||
TaskContext context = new TaskContext(null, null, null, null, null, null,
|
||||
null);
|
||||
|
||||
context.setInputKeyClass(IntWritable.class);
|
||||
assertEquals(IntWritable.class.getName(), context.getInputKeyClass().getName());
|
||||
Assert.assertEquals(IntWritable.class.getName(), context.getInputKeyClass
|
||||
().getName());
|
||||
|
||||
context.setInputValueClass(Text.class);
|
||||
assertEquals(Text.class.getName(), context.getInputValueClass().getName());
|
||||
Assert.assertEquals(Text.class.getName(), context.getInputValueClass()
|
||||
.getName());
|
||||
|
||||
context.setOutputKeyClass(LongWritable.class);
|
||||
assertEquals(LongWritable.class.getName(), context.getOutputKeyClass().getName());
|
||||
Assert.assertEquals(LongWritable.class.getName(), context
|
||||
.getOutputKeyClass().getName());
|
||||
|
||||
context.setOutputValueClass(FloatWritable.class);
|
||||
assertEquals(FloatWritable.class.getName(), context.getOutputValueClass().getName());
|
||||
Assert.assertEquals(FloatWritable.class.getName(), context
|
||||
.getOutputValueClass().getName());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,11 +19,12 @@ package org.apache.hadoop.mapred.nativetask.buffer;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.Assert;
|
||||
|
||||
public class TestInputBuffer extends TestCase {
|
||||
public class TestInputBuffer {
|
||||
|
||||
@Test
|
||||
public void testInputBuffer() throws IOException {
|
||||
final int size = 100;
|
||||
final InputBuffer input1 = new InputBuffer(BufferType.DIRECT_BUFFER, size);
|
||||
|
|
|
@ -17,11 +17,12 @@
|
|||
*/
|
||||
package org.apache.hadoop.mapred.nativetask.buffer;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.Assert;
|
||||
|
||||
public class TestOutputBuffer extends TestCase {
|
||||
public class TestOutputBuffer {
|
||||
|
||||
@Test
|
||||
public void testOutputBuffer() {
|
||||
final int size = 100;
|
||||
final OutputBuffer output1 = new OutputBuffer(BufferType.DIRECT_BUFFER, size);
|
||||
|
|
|
@ -20,7 +20,8 @@ package org.apache.hadoop.mapred.nativetask.serde;
|
|||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.mapred.nativetask.Constants;
|
||||
|
@ -30,12 +31,11 @@ import org.apache.hadoop.mapred.nativetask.testutil.TestInput;
|
|||
import org.apache.hadoop.mapred.nativetask.testutil.TestInput.KV;
|
||||
import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.mockito.Matchers;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public class TestKVSerializer extends TestCase {
|
||||
public class TestKVSerializer {
|
||||
|
||||
int inputArraySize = 1000; // 1000 bytesWriable elements
|
||||
int bufferSize = 100; // bytes
|
||||
|
@ -46,7 +46,6 @@ public class TestKVSerializer extends TestCase {
|
|||
private SizedWritable value;
|
||||
private KVSerializer serializer;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
this.inputArray = TestInput.getMapInputs(inputArraySize);
|
||||
|
@ -60,6 +59,7 @@ public class TestKVSerializer extends TestCase {
|
|||
serializer.updateLength(key, value);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateLength() throws IOException {
|
||||
Mockito.mock(DataOutputStream.class);
|
||||
|
||||
|
@ -75,6 +75,7 @@ public class TestKVSerializer extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerializeKV() throws IOException {
|
||||
final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class);
|
||||
|
||||
|
@ -92,6 +93,7 @@ public class TestKVSerializer extends TestCase {
|
|||
Assert.assertEquals(written, key.length + value.length + Constants.SIZEOF_KV_LENGTH);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerializeNoFlush() throws IOException {
|
||||
final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class);
|
||||
|
||||
|
@ -109,6 +111,7 @@ public class TestKVSerializer extends TestCase {
|
|||
Assert.assertEquals(written, key.length + value.length + Constants.SIZEOF_KV_LENGTH);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerializePartitionKV() throws IOException {
|
||||
final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class);
|
||||
|
||||
|
@ -130,12 +133,14 @@ public class TestKVSerializer extends TestCase {
|
|||
+ Constants.SIZEOF_PARTITION_LENGTH);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeserializerNoData() throws IOException {
|
||||
final DataInputStream in = Mockito.mock(DataInputStream.class);
|
||||
Mockito.when(in.hasUnReadData()).thenReturn(false);
|
||||
Assert.assertEquals(0, serializer.deserializeKV(in, key, value));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeserializer() throws IOException {
|
||||
final DataInputStream in = Mockito.mock(DataInputStream.class);
|
||||
Mockito.when(in.hasUnReadData()).thenReturn(true);
|
||||
|
|
|
@ -17,15 +17,16 @@
|
|||
*/
|
||||
package org.apache.hadoop.mapred.nativetask.utils;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
|
||||
import org.junit.Test;
|
||||
import org.junit.Assert;
|
||||
|
||||
public class TestReadWriteBuffer extends TestCase {
|
||||
import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
|
||||
|
||||
public class TestReadWriteBuffer {
|
||||
|
||||
private static byte[] bytes = new byte[] { '0', 'a', 'b', 'c', 'd', '9' };
|
||||
|
||||
@Test
|
||||
public void testReadWriteBuffer() {
|
||||
|
||||
final ReadWriteBuffer buffer = new ReadWriteBuffer();
|
||||
|
|
|
@ -17,15 +17,16 @@
|
|||
*/
|
||||
package org.apache.hadoop.mapred.nativetask.utils;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
|
||||
import org.junit.Assert;
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public class TestSizedWritable extends TestCase {
|
||||
public class TestSizedWritable {
|
||||
|
||||
@Test
|
||||
public void testSizedWritable() {
|
||||
final SizedWritable w = new SizedWritable(BytesWritable.class);
|
||||
Assert.assertTrue(w.length == SizedWritable.INVALID_LENGTH);
|
||||
|
|
|
@ -18,29 +18,35 @@
|
|||
package org.apache.hadoop.examples;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import org.junit.Test;
|
||||
import org.junit.Assert;
|
||||
|
||||
/** Tests for BaileyBorweinPlouffe */
|
||||
public class TestBaileyBorweinPlouffe extends junit.framework.TestCase {
|
||||
public class TestBaileyBorweinPlouffe {
|
||||
|
||||
@Test
|
||||
public void testMod() {
|
||||
final BigInteger TWO = BigInteger.ONE.add(BigInteger.ONE);
|
||||
for(long n = 3; n < 100; n++) {
|
||||
for (long e = 1; e < 100; e++) {
|
||||
final long r = TWO.modPow(
|
||||
BigInteger.valueOf(e), BigInteger.valueOf(n)).longValue();
|
||||
assertEquals("e=" + e + ", n=" + n, r, BaileyBorweinPlouffe.mod(e, n));
|
||||
Assert.assertEquals("e=" + e + ", n=" + n, r, BaileyBorweinPlouffe
|
||||
.mod(e, n));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHexDigit() {
|
||||
final long[] answers = {0x43F6, 0xA308, 0x29B7, 0x49F1, 0x8AC8, 0x35EA};
|
||||
long d = 1;
|
||||
for(int i = 0; i < answers.length; i++) {
|
||||
assertEquals("d=" + d, answers[i], BaileyBorweinPlouffe.hexDigits(d));
|
||||
Assert.assertEquals("d=" + d, answers[i], BaileyBorweinPlouffe
|
||||
.hexDigits(d));
|
||||
d *= 10;
|
||||
}
|
||||
|
||||
assertEquals(0x243FL, BaileyBorweinPlouffe.hexDigits(0));
|
||||
Assert.assertEquals(0x243FL, BaileyBorweinPlouffe.hexDigits(0));
|
||||
}
|
||||
}
|
|
@ -19,24 +19,30 @@ package org.apache.hadoop.examples.pi.math;
|
|||
|
||||
import java.math.BigInteger;
|
||||
import java.util.Random;
|
||||
import org.junit.Test;
|
||||
import org.junit.Assert;
|
||||
|
||||
public class TestLongLong extends junit.framework.TestCase {
|
||||
static final Random RAN = new Random();
|
||||
public class TestLongLong {
|
||||
|
||||
static final Random RAN = new Random();
|
||||
static final long MASK = (1L << (LongLong.SIZE >> 1)) - 1;
|
||||
|
||||
static long nextPositiveLong() {
|
||||
return RAN.nextLong() & MASK;
|
||||
}
|
||||
|
||||
|
||||
static void verifyMultiplication(long a, long b) {
|
||||
final LongLong ll = LongLong.multiplication(new LongLong(), a, b);
|
||||
final BigInteger bi = BigInteger.valueOf(a).multiply(BigInteger.valueOf(b));
|
||||
|
||||
final String s = String.format("\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a, b);
|
||||
final String s = String.format(
|
||||
"\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a,
|
||||
b);
|
||||
//System.out.println(s);
|
||||
assertEquals(s, bi, ll.toBigInteger());
|
||||
Assert.assertEquals(s, bi, ll.toBigInteger());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultiplication() {
|
||||
for(int i = 0; i < 100; i++) {
|
||||
final long a = nextPositiveLong();
|
||||
|
@ -50,19 +56,24 @@ public class TestLongLong extends junit.framework.TestCase {
|
|||
static void verifyRightShift(long a, long b) {
|
||||
final LongLong ll = new LongLong().set(a, b);
|
||||
final BigInteger bi = ll.toBigInteger();
|
||||
|
||||
for(int i = 0; i < LongLong.SIZE >> 1; i++) {
|
||||
|
||||
for (int i = 0; i < LongLong.SIZE >> 1; i++) {
|
||||
final long result = ll.shiftRight(i) & MASK;
|
||||
final long expected = bi.shiftRight(i).longValue() & MASK;
|
||||
final String s = String.format("\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a, b);
|
||||
assertEquals(s, expected, result);
|
||||
final String s = String.format(
|
||||
"\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a,
|
||||
b);
|
||||
Assert.assertEquals(s, expected, result);
|
||||
}
|
||||
|
||||
final String s = String.format("\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a, b);
|
||||
final String s = String.format(
|
||||
"\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a,
|
||||
b);
|
||||
//System.out.println(s);
|
||||
assertEquals(s, bi, ll.toBigInteger());
|
||||
Assert.assertEquals(s, bi, ll.toBigInteger());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRightShift() {
|
||||
for(int i = 0; i < 1000; i++) {
|
||||
final long a = nextPositiveLong();
|
||||
|
|
|
@ -21,14 +21,16 @@ import java.math.BigInteger;
|
|||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.examples.pi.Util.Timer;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestModular extends junit.framework.TestCase {
|
||||
private static final Random RANDOM = new Random();
|
||||
public class TestModular{
|
||||
private static final Random RANDOM = new Random();
|
||||
private static final BigInteger TWO = BigInteger.valueOf(2);
|
||||
|
||||
|
||||
static final int DIV_VALID_BIT = 32;
|
||||
static final long DIV_LIMIT = 1L << DIV_VALID_BIT;
|
||||
static final long DIV_LIMIT = 1L << DIV_VALID_BIT;
|
||||
|
||||
// return r/n for n > r > 0
|
||||
static long div(long sum, long r, long n) {
|
||||
|
@ -36,7 +38,7 @@ public class TestModular extends junit.framework.TestCase {
|
|||
int i = DIV_VALID_BIT - 1;
|
||||
for(r <<= 1; r < n; r <<= 1) i--;
|
||||
//System.out.printf(" r=%d, n=%d, q=%d\n", r, n, q);
|
||||
|
||||
|
||||
for(; i >= 0 ;) {
|
||||
r -= n;
|
||||
q |= (1L << i);
|
||||
|
@ -48,14 +50,15 @@ public class TestModular extends junit.framework.TestCase {
|
|||
sum += q;
|
||||
return sum < DIV_LIMIT? sum: sum - DIV_LIMIT;
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDiv() {
|
||||
for(long n = 2; n < 100; n++)
|
||||
for(long r = 1; r < n; r++) {
|
||||
final long a = div(0, r, n);
|
||||
final long b = (long)((r*1.0/n) * (1L << DIV_VALID_BIT));
|
||||
final String s = String.format("r=%d, n=%d, a=%X, b=%X", r, n, a, b);
|
||||
assertEquals(s, b, a);
|
||||
Assert.assertEquals(s, b, a);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -64,16 +67,16 @@ public class TestModular extends junit.framework.TestCase {
|
|||
|
||||
for(int i = 0; i < rn.length; i++) {
|
||||
rn[i] = new long[rsize + 1][];
|
||||
long n = RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL;
|
||||
long n = RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL;
|
||||
if (n <= 1) n = 0xFFFFFFFFFFFFFFFL - n;
|
||||
rn[i][0] = new long[]{n};
|
||||
final BigInteger N = BigInteger.valueOf(n);
|
||||
final BigInteger N = BigInteger.valueOf(n);
|
||||
|
||||
for(int j = 1; j < rn[i].length; j++) {
|
||||
long r = RANDOM.nextLong();
|
||||
if (r < 0) r = -r;
|
||||
if (r >= n) r %= n;
|
||||
final BigInteger R = BigInteger.valueOf(r);
|
||||
final BigInteger R = BigInteger.valueOf(r);
|
||||
rn[i][j] = new long[]{r, R.multiply(R).mod(N).longValue()};
|
||||
}
|
||||
}
|
||||
|
@ -102,20 +105,20 @@ public class TestModular extends junit.framework.TestCase {
|
|||
} else {
|
||||
final int HALF = (63 - Long.numberOfLeadingZeros(n)) >> 1;
|
||||
final int FULL = HALF << 1;
|
||||
final long ONES = (1 << HALF) - 1;
|
||||
|
||||
final long ONES = (1 << HALF) - 1;
|
||||
|
||||
final long high = r >>> HALF;
|
||||
final long low = r &= ONES;
|
||||
|
||||
r *= r;
|
||||
if (r >= n) r %= n;
|
||||
|
||||
|
||||
if (high != 0) {
|
||||
long s = high * high;
|
||||
if (s >= n) s %= n;
|
||||
for(int i = 0; i < FULL; i++)
|
||||
if ((s <<= 1) >= n) s -= n;
|
||||
|
||||
|
||||
if (low == 0)
|
||||
r = s;
|
||||
else {
|
||||
|
@ -123,7 +126,7 @@ public class TestModular extends junit.framework.TestCase {
|
|||
if (t >= n) t %= n;
|
||||
for(int i = -1; i < HALF; i++)
|
||||
if ((t <<= 1) >= n) t -= n;
|
||||
|
||||
|
||||
r += s;
|
||||
if (r >= n) r -= n;
|
||||
r += t;
|
||||
|
@ -133,7 +136,7 @@ public class TestModular extends junit.framework.TestCase {
|
|||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
|
||||
static void squareBenchmarks() {
|
||||
final Timer t = new Timer(false);
|
||||
t.tick("squareBenchmarks(), MAX_SQRT=" + Modular.MAX_SQRT_LONG);
|
||||
|
@ -147,8 +150,11 @@ public class TestModular extends junit.framework.TestCase {
|
|||
final long r = rn[i][j][0];
|
||||
final long answer = rn[i][j][1];
|
||||
final long s = square_slow(r, n);
|
||||
if (s != answer)
|
||||
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
|
||||
if (s != answer) {
|
||||
Assert.assertEquals(
|
||||
"r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
|
||||
answer, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
t.tick("square_slow");
|
||||
|
@ -161,8 +167,11 @@ public class TestModular extends junit.framework.TestCase {
|
|||
final long r = rn[i][j][0];
|
||||
final long answer = rn[i][j][1];
|
||||
final long s = square(r, n, r2p64);
|
||||
if (s != answer)
|
||||
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
|
||||
if (s != answer) {
|
||||
Assert.assertEquals(
|
||||
"r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
|
||||
answer, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
t.tick("square");
|
||||
|
@ -175,8 +184,11 @@ public class TestModular extends junit.framework.TestCase {
|
|||
final long answer = rn[i][j][1];
|
||||
final BigInteger R = BigInteger.valueOf(r);
|
||||
final long s = R.multiply(R).mod(N).longValue();
|
||||
if (s != answer)
|
||||
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
|
||||
if (s != answer) {
|
||||
Assert.assertEquals(
|
||||
"r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
|
||||
answer, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
t.tick("R.multiply(R).mod(N)");
|
||||
|
@ -189,8 +201,11 @@ public class TestModular extends junit.framework.TestCase {
|
|||
final long answer = rn[i][j][1];
|
||||
final BigInteger R = BigInteger.valueOf(r);
|
||||
final long s = R.modPow(TWO, N).longValue();
|
||||
if (s != answer)
|
||||
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
|
||||
if (s != answer) {
|
||||
Assert.assertEquals(
|
||||
"r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
|
||||
answer, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
t.tick("R.modPow(TWO, N)");
|
||||
|
@ -201,15 +216,15 @@ public class TestModular extends junit.framework.TestCase {
|
|||
|
||||
for(int i = 0; i < en.length; i++) {
|
||||
en[i] = new long[esize + 1][];
|
||||
long n = (RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL) | 1L;
|
||||
long n = (RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL) | 1L;
|
||||
if (n == 1) n = 3;
|
||||
en[i][0] = new long[]{n};
|
||||
final BigInteger N = BigInteger.valueOf(n);
|
||||
final BigInteger N = BigInteger.valueOf(n);
|
||||
|
||||
for(int j = 1; j < en[i].length; j++) {
|
||||
long e = RANDOM.nextLong();
|
||||
if (e < 0) e = -e;
|
||||
final BigInteger E = BigInteger.valueOf(e);
|
||||
final BigInteger E = BigInteger.valueOf(e);
|
||||
en[i][j] = new long[]{e, TWO.modPow(E, N).longValue()};
|
||||
}
|
||||
}
|
||||
|
@ -253,10 +268,10 @@ public class TestModular extends junit.framework.TestCase {
|
|||
static class Montgomery2 extends Montgomery {
|
||||
/** Compute 2^y mod N for N odd. */
|
||||
long mod2(final long y) {
|
||||
long r0 = R - N;
|
||||
long r0 = R - N;
|
||||
long r1 = r0 << 1;
|
||||
if (r1 >= N) r1 -= N;
|
||||
|
||||
|
||||
for(long mask = Long.highestOneBit(y); mask > 0; mask >>>= 1) {
|
||||
if ((mask & y) == 0) {
|
||||
r1 = product.m(r0, r1);
|
||||
|
@ -269,7 +284,7 @@ public class TestModular extends junit.framework.TestCase {
|
|||
return product.m(r0, 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void modBenchmarks() {
|
||||
final Timer t = new Timer(false);
|
||||
t.tick("modBenchmarks()");
|
||||
|
@ -283,12 +298,15 @@ public class TestModular extends junit.framework.TestCase {
|
|||
final long e = en[i][j][0];
|
||||
final long answer = en[i][j][1];
|
||||
final long s = Modular.mod(e, n);
|
||||
if (s != answer)
|
||||
assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
|
||||
if (s != answer) {
|
||||
Assert.assertEquals(
|
||||
"e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
|
||||
answer, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
t.tick("Modular.mod");
|
||||
|
||||
|
||||
final Montgomery2 m2 = new Montgomery2();
|
||||
for(int i = 0; i < en.length; i++) {
|
||||
final long n = en[i][0][0];
|
||||
|
@ -297,8 +315,11 @@ public class TestModular extends junit.framework.TestCase {
|
|||
final long e = en[i][j][0];
|
||||
final long answer = en[i][j][1];
|
||||
final long s = m2.mod(e);
|
||||
if (s != answer)
|
||||
assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
|
||||
if (s != answer) {
|
||||
Assert.assertEquals(
|
||||
"e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
|
||||
answer, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
t.tick("montgomery.mod");
|
||||
|
@ -310,21 +331,27 @@ public class TestModular extends junit.framework.TestCase {
|
|||
final long e = en[i][j][0];
|
||||
final long answer = en[i][j][1];
|
||||
final long s = m2.mod2(e);
|
||||
if (s != answer)
|
||||
assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
|
||||
if (s != answer) {
|
||||
Assert.assertEquals(
|
||||
"e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
|
||||
answer, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
t.tick("montgomery.mod2");
|
||||
|
||||
for(int i = 0; i < en.length; i++) {
|
||||
final long n = en[i][0][0];
|
||||
final BigInteger N = BigInteger.valueOf(n);
|
||||
final BigInteger N = BigInteger.valueOf(n);
|
||||
for(int j = 1; j < en[i].length; j++) {
|
||||
final long e = en[i][j][0];
|
||||
final long answer = en[i][j][1];
|
||||
final long s = TWO.modPow(BigInteger.valueOf(e), N).longValue();
|
||||
if (s != answer)
|
||||
assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
|
||||
if (s != answer) {
|
||||
Assert.assertEquals(
|
||||
"e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
|
||||
answer, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
t.tick("BigInteger.modPow(e, n)");
|
||||
|
|
|
@ -28,14 +28,19 @@ import org.apache.hadoop.examples.pi.Container;
|
|||
import org.apache.hadoop.examples.pi.Util;
|
||||
import org.apache.hadoop.examples.pi.Util.Timer;
|
||||
import org.apache.hadoop.examples.pi.math.TestModular.Montgomery2;
|
||||
import org.junit.Test;
|
||||
import org.junit.Assert;
|
||||
|
||||
public class TestSummation extends junit.framework.TestCase {
|
||||
public class TestSummation {
|
||||
static final Random RANDOM = new Random();
|
||||
static final BigInteger TWO = BigInteger.valueOf(2);
|
||||
private static final double DOUBLE_DELTA = 0.000000001f;
|
||||
|
||||
private static Summation2 newSummation(final long base, final long range, final long delta) {
|
||||
final ArithmeticProgression N = new ArithmeticProgression('n', base+3, delta, base+3+range);
|
||||
final ArithmeticProgression E = new ArithmeticProgression('e', base+range, -delta, base);
|
||||
final ArithmeticProgression N = new ArithmeticProgression('n', base + 3,
|
||||
delta, base + 3 + range);
|
||||
final ArithmeticProgression E = new ArithmeticProgression('e', base + range,
|
||||
-delta, base);
|
||||
return new Summation2(N, E);
|
||||
}
|
||||
|
||||
|
@ -53,10 +58,11 @@ public class TestSummation extends junit.framework.TestCase {
|
|||
|
||||
final List<Summation> combined = Util.combine(a);
|
||||
// Util.out.println("combined=" + combined);
|
||||
assertEquals(1, combined.size());
|
||||
assertEquals(sigma, combined.get(0));
|
||||
Assert.assertEquals(1, combined.size());
|
||||
Assert.assertEquals(sigma, combined.get(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubtract() {
|
||||
final Summation sigma = newSummation(3, 10000, 20);
|
||||
final int size = 10;
|
||||
|
@ -112,7 +118,9 @@ public class TestSummation extends junit.framework.TestCase {
|
|||
long n = N.value;
|
||||
double s = 0;
|
||||
for(; e > E.limit; e += E.delta) {
|
||||
s = Modular.addMod(s, TWO.modPow(BigInteger.valueOf(e), BigInteger.valueOf(n)).doubleValue()/n);
|
||||
s = Modular.addMod(s,
|
||||
TWO.modPow(BigInteger.valueOf(e), BigInteger.valueOf(n))
|
||||
.doubleValue() / n);
|
||||
n += N.delta;
|
||||
}
|
||||
return s;
|
||||
|
@ -124,16 +132,16 @@ public class TestSummation extends junit.framework.TestCase {
|
|||
t.tick("sigma=" + sigma);
|
||||
final double value = sigma.compute();
|
||||
t.tick("compute=" + value);
|
||||
assertEquals(value, sigma.compute_modular());
|
||||
Assert.assertEquals(value, sigma.compute_modular(), DOUBLE_DELTA);
|
||||
t.tick("compute_modular");
|
||||
assertEquals(value, sigma.compute_montgomery());
|
||||
Assert.assertEquals(value, sigma.compute_montgomery(), DOUBLE_DELTA);
|
||||
t.tick("compute_montgomery");
|
||||
assertEquals(value, sigma.compute_montgomery2());
|
||||
Assert.assertEquals(value, sigma.compute_montgomery2(), DOUBLE_DELTA);
|
||||
t.tick("compute_montgomery2");
|
||||
|
||||
assertEquals(value, sigma.compute_modBigInteger());
|
||||
Assert.assertEquals(value, sigma.compute_modBigInteger(), DOUBLE_DELTA);
|
||||
t.tick("compute_modBigInteger");
|
||||
assertEquals(value, sigma.compute_modPow());
|
||||
Assert.assertEquals(value, sigma.compute_modPow(), DOUBLE_DELTA);
|
||||
t.tick("compute_modPow");
|
||||
}
|
||||
|
||||
|
|
|
@ -20,10 +20,10 @@ package org.apache.hadoop.contrib.utils.join;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestCase;
|
||||
import junit.framework.TestSuite;
|
||||
import junit.extensions.TestSetup;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
|
@ -36,24 +36,27 @@ import org.apache.hadoop.io.SequenceFile;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.*;
|
||||
|
||||
public class TestDataJoin extends TestCase {
|
||||
|
||||
/**
|
||||
* Class to test JOIN between 2 data
|
||||
* sources.
|
||||
*/
|
||||
public class TestDataJoin {
|
||||
private static MiniDFSCluster cluster = null;
|
||||
public static Test suite() {
|
||||
TestSetup setup = new TestSetup(new TestSuite(TestDataJoin.class)) {
|
||||
protected void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
}
|
||||
protected void tearDown() throws Exception {
|
||||
if (cluster != null) {
|
||||
cluster.shutdown();
|
||||
}
|
||||
}
|
||||
};
|
||||
return setup;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (cluster != null) {
|
||||
cluster.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDataJoin() throws Exception {
|
||||
final int srcs = 4;
|
||||
JobConf job = new JobConf();
|
||||
|
|
|
@ -38,11 +38,13 @@ import org.apache.hadoop.io.IOUtils;
|
|||
import org.apache.hadoop.mapred.MiniMRClientClusterFactory;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class TestDistCh extends junit.framework.TestCase {
|
||||
public class TestDistCh {
|
||||
{
|
||||
GenericTestUtils.setLogLevel(
|
||||
getLogger("org.apache.hadoop.hdfs.StateChange"), Level.ERROR);
|
||||
|
@ -75,20 +77,20 @@ public class TestDistCh extends junit.framework.TestCase {
|
|||
|
||||
Path createSmallFile(Path dir) throws IOException {
|
||||
final Path f = new Path(dir, "f" + ++fcount);
|
||||
assertTrue(!fs.exists(f));
|
||||
Assert.assertTrue(!fs.exists(f));
|
||||
final DataOutputStream out = fs.create(f);
|
||||
try {
|
||||
out.writeBytes("createSmallFile: f=" + f);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
assertTrue(fs.exists(f));
|
||||
Assert.assertTrue(fs.exists(f));
|
||||
return f;
|
||||
}
|
||||
|
||||
Path mkdir(Path dir) throws IOException {
|
||||
assertTrue(fs.mkdirs(dir));
|
||||
assertTrue(fs.getFileStatus(dir).isDirectory());
|
||||
Assert.assertTrue(fs.mkdirs(dir));
|
||||
Assert.assertTrue(fs.getFileStatus(dir).isDirectory());
|
||||
return dir;
|
||||
}
|
||||
|
||||
|
@ -127,7 +129,8 @@ public class TestDistCh extends junit.framework.TestCase {
|
|||
defaultPerm = permission == null || "".equals(permission);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDistCh() throws Exception {
|
||||
final Configuration conf = new Configuration();
|
||||
|
||||
|
@ -190,13 +193,13 @@ public class TestDistCh extends junit.framework.TestCase {
|
|||
}
|
||||
|
||||
static void checkFileStatus(ChPermissionStatus expected, FileStatus actual) {
|
||||
assertEquals(expected.getUserName(), actual.getOwner());
|
||||
assertEquals(expected.getGroupName(), actual.getGroup());
|
||||
Assert.assertEquals(expected.getUserName(), actual.getOwner());
|
||||
Assert.assertEquals(expected.getGroupName(), actual.getGroup());
|
||||
FsPermission perm = expected.getPermission();
|
||||
if (actual.isFile() && expected.defaultPerm) {
|
||||
perm = perm.applyUMask(UMASK);
|
||||
}
|
||||
assertEquals(perm, actual.getPermission());
|
||||
Assert.assertEquals(perm, actual.getPermission());
|
||||
}
|
||||
|
||||
private static String runLsr(final FsShell shell, String root, int returnvalue
|
||||
|
@ -210,7 +213,7 @@ public class TestDistCh extends junit.framework.TestCase {
|
|||
System.setErr(out);
|
||||
final String results;
|
||||
try {
|
||||
assertEquals(returnvalue, shell.run(new String[]{"-lsr", root}));
|
||||
Assert.assertEquals(returnvalue, shell.run(new String[]{"-lsr", root}));
|
||||
results = bytes.toString();
|
||||
} finally {
|
||||
IOUtils.closeStream(out);
|
||||
|
|
|
@ -26,10 +26,12 @@ import java.io.DataOutput;
|
|||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TestTypedBytesWritable extends TestCase {
|
||||
public class TestTypedBytesWritable {
|
||||
|
||||
@Test
|
||||
public void testToString() {
|
||||
TypedBytesWritable tbw = new TypedBytesWritable();
|
||||
tbw.setValue(true);
|
||||
|
@ -46,6 +48,7 @@ public class TestTypedBytesWritable extends TestCase {
|
|||
assertEquals("random text", tbw.toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIO() throws IOException {
|
||||
TypedBytesWritable tbw = new TypedBytesWritable();
|
||||
tbw.setValue(12345);
|
||||
|
|
|
@ -22,20 +22,23 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.http.FilterContainer;
|
||||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestAmFilterInitializer extends TestCase {
|
||||
/**
|
||||
* Test class for {@Link AmFilterInitializer}.
|
||||
*/
|
||||
public class TestAmFilterInitializer {
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
NetUtils.addStaticResolution("host1", "172.0.0.1");
|
||||
NetUtils.addStaticResolution("host2", "172.0.0.1");
|
||||
NetUtils.addStaticResolution("host3", "172.0.0.1");
|
||||
|
|
Loading…
Reference in New Issue