HADOOP-14729. Upgrade JUnit 3 test cases to JUnit 4. Contributed by Ajay Kumar.

This commit is contained in:
Arpit Agarwal 2017-08-25 09:55:46 -07:00
parent 3a4e861169
commit 8b7cbe3840
53 changed files with 718 additions and 416 deletions

View File

@ -44,7 +44,11 @@ import java.util.regex.Pattern;
import static java.util.concurrent.TimeUnit.*;
import com.fasterxml.jackson.databind.ObjectMapper;
import junit.framework.TestCase;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assert.assertArrayEquals;
import org.apache.commons.lang.StringUtils;
@ -65,11 +69,11 @@ import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.mockito.Mockito;
public class TestConfiguration extends TestCase {
public class TestConfiguration {
private static final double DOUBLE_DELTA = 0.000000001f;
private Configuration conf;
final static String CONFIG = new File("./test-config-TestConfiguration.xml").getAbsolutePath();
final static String CONFIG2 = new File("./test-config2-TestConfiguration.xml").getAbsolutePath();
@ -94,18 +98,16 @@ public class TestConfiguration extends TestCase {
private BufferedWriter out;
@Override
protected void setUp() throws Exception {
super.setUp();
@Before
public void setUp() throws Exception {
conf = new Configuration();
}
@Override
protected void tearDown() throws Exception {
@After
public void tearDown() throws Exception {
if(out != null) {
out.close();
}
super.tearDown();
new File(CONFIG).delete();
new File(CONFIG2).delete();
new File(CONFIG_FOR_ENUM).delete();
@ -164,6 +166,7 @@ public class TestConfiguration extends TestCase {
+ " [\n<!ENTITY " + entity + " SYSTEM \"" + value + "\">\n]>");
}
@Test
public void testInputStreamResource() throws Exception {
StringWriter writer = new StringWriter();
out = new BufferedWriter(writer);
@ -182,6 +185,7 @@ public class TestConfiguration extends TestCase {
assertEquals("A", conf.get("prop"));
}
@Test
public void testFinalWarnings() throws Exception {
// Make a configuration file with a final property
StringWriter writer = new StringWriter();
@ -226,6 +230,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testNoFinalWarnings() throws Exception {
// Make a configuration file with a final property
StringWriter writer = new StringWriter();
@ -263,6 +268,7 @@ public class TestConfiguration extends TestCase {
@Test
public void testFinalWarningsMultiple() throws Exception {
// Make a configuration file with a repeated final property
StringWriter writer = new StringWriter();
@ -296,6 +302,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testFinalWarningsMultipleOverride() throws Exception {
// Make a configuration file with 2 final properties with different values
StringWriter writer = new StringWriter();
@ -358,6 +365,7 @@ public class TestConfiguration extends TestCase {
* round-trips multi-byte string literals through saving and loading of config
* and asserts that the same values were read.
*/
@Test
public void testMultiByteCharacters() throws IOException {
String priorDefaultEncoding = System.getProperty("file.encoding");
try {
@ -388,6 +396,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testVariableSubstitution() throws IOException {
// stubbing only environment dependent functions
Configuration mock = Mockito.spy(conf);
@ -425,6 +434,7 @@ public class TestConfiguration extends TestCase {
assertTrue(mock.getInt("my.int", -1) == 42);
}
@Test
public void testEnvDefault() throws IOException {
Configuration mock = Mockito.spy(conf);
Mockito.when(mock.getenv("NULL_VALUE")).thenReturn(null);
@ -465,6 +475,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testFinalParam() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -486,6 +497,7 @@ public class TestConfiguration extends TestCase {
assertNull("my var is not final", conf2.get("my.var"));
}
@Test
public void testCompactFormat() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -530,8 +542,8 @@ public class TestConfiguration extends TestCase {
}
void declareProperty(String name, String val, String expectEval,
boolean isFinal)
throws IOException {
boolean isFinal)
throws IOException {
appendProperty(name, val, isFinal);
Prop p = new Prop();
p.name = name;
@ -546,7 +558,7 @@ public class TestConfiguration extends TestCase {
void appendProperty(String name, String val, boolean isFinal,
String ... sources)
throws IOException {
throws IOException {
out.write("<property>");
out.write("<name>");
out.write(name);
@ -570,13 +582,13 @@ public class TestConfiguration extends TestCase {
}
void appendCompactFormatProperty(String name, String val, boolean isFinal)
throws IOException {
throws IOException {
appendCompactFormatProperty(name, val, isFinal, null);
}
void appendCompactFormatProperty(String name, String val, boolean isFinal,
String source)
throws IOException {
throws IOException {
out.write("<property ");
out.write("name=\"");
out.write(name);
@ -595,6 +607,7 @@ public class TestConfiguration extends TestCase {
out.write("/>\n");
}
@Test
public void testOverlay() throws IOException{
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -629,6 +642,7 @@ public class TestConfiguration extends TestCase {
}
@Test
public void testCommentsInValue() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -640,6 +654,7 @@ public class TestConfiguration extends TestCase {
assertEquals("this contains a comment", conf.get("my.comment"));
}
@Test
public void testEscapedCharactersInValue() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -651,6 +666,7 @@ public class TestConfiguration extends TestCase {
assertEquals("''''", conf.get("my.comment"));
}
@Test
public void testTrim() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -675,6 +691,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testGetLocalPath() throws IOException {
Configuration conf = new Configuration();
String[] dirs = new String[]{"a", "b", "c"};
@ -691,6 +708,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testGetFile() throws IOException {
Configuration conf = new Configuration();
String[] dirs = new String[]{"a", "b", "c"};
@ -707,6 +725,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testToString() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -720,6 +739,7 @@ public class TestConfiguration extends TestCase {
assertEquals(expectedOutput, conf.toString());
}
@Test
public void testWriteXml() throws IOException {
Configuration conf = new Configuration();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@ -730,6 +750,7 @@ public class TestConfiguration extends TestCase {
assertTrue("Result has proper footer", result.endsWith("</configuration>"));
}
@Test
public void testIncludes() throws Exception {
tearDown();
System.out.println("XXX testIncludes");
@ -767,6 +788,7 @@ public class TestConfiguration extends TestCase {
tearDown();
}
@Test
public void testCharsetInDocumentEncoding() throws Exception {
tearDown();
out=new BufferedWriter(new OutputStreamWriter(new FileOutputStream(CONFIG),
@ -785,6 +807,7 @@ public class TestConfiguration extends TestCase {
tearDown();
}
@Test
public void testEntityReference() throws Exception {
tearDown();
out=new BufferedWriter(new FileWriter(CONFIG));
@ -803,6 +826,7 @@ public class TestConfiguration extends TestCase {
tearDown();
}
@Test
public void testSystemEntityReference() throws Exception {
tearDown();
out=new BufferedWriter(new FileWriter(CONFIG2));
@ -824,6 +848,7 @@ public class TestConfiguration extends TestCase {
tearDown();
}
@Test
public void testIncludesWithFallback() throws Exception {
tearDown();
out=new BufferedWriter(new FileWriter(CONFIG2));
@ -862,6 +887,7 @@ public class TestConfiguration extends TestCase {
tearDown();
}
@Test
public void testRelativeIncludes() throws Exception {
tearDown();
String relConfig = new File("./tmp/test-config.xml").getAbsolutePath();
@ -893,6 +919,7 @@ public class TestConfiguration extends TestCase {
new File(new File(relConfig).getParent()).delete();
}
@Test
public void testIntegerRanges() {
Configuration conf = new Configuration();
conf.set("first", "-100");
@ -924,6 +951,7 @@ public class TestConfiguration extends TestCase {
assertEquals(true, range.isIncluded(100000000));
}
@Test
public void testGetRangeIterator() throws Exception {
Configuration config = new Configuration(false);
IntegerRanges ranges = config.getRange("Test", "");
@ -953,6 +981,7 @@ public class TestConfiguration extends TestCase {
assertEquals(expected, found);
}
@Test
public void testHexValues() throws IOException{
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -984,6 +1013,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testIntegerValues() throws IOException{
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1015,6 +1045,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testHumanReadableValues() throws IOException {
out = new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1035,6 +1066,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testBooleanValues() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1059,6 +1091,7 @@ public class TestConfiguration extends TestCase {
assertEquals(false, conf.getBoolean("test.bool8", false));
}
@Test
public void testFloatValues() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1070,10 +1103,10 @@ public class TestConfiguration extends TestCase {
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
assertEquals(3.1415f, conf.getFloat("test.float1", 0.0f));
assertEquals(3.1415f, conf.getFloat("test.float2", 0.0f));
assertEquals(-3.1415f, conf.getFloat("test.float3", 0.0f));
assertEquals(-3.1415f, conf.getFloat("test.float4", 0.0f));
assertEquals(3.1415f, conf.getFloat("test.float1", 0.0f), DOUBLE_DELTA);
assertEquals(3.1415f, conf.getFloat("test.float2", 0.0f), DOUBLE_DELTA);
assertEquals(-3.1415f, conf.getFloat("test.float3", 0.0f), DOUBLE_DELTA);
assertEquals(-3.1415f, conf.getFloat("test.float4", 0.0f), DOUBLE_DELTA);
try {
conf.getFloat("test.float5", 0.0f);
fail("Property had invalid float value, but was read successfully.");
@ -1082,6 +1115,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testDoubleValues() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1093,10 +1127,10 @@ public class TestConfiguration extends TestCase {
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
assertEquals(3.1415, conf.getDouble("test.double1", 0.0));
assertEquals(3.1415, conf.getDouble("test.double2", 0.0));
assertEquals(-3.1415, conf.getDouble("test.double3", 0.0));
assertEquals(-3.1415, conf.getDouble("test.double4", 0.0));
assertEquals(3.1415, conf.getDouble("test.double1", 0.0), DOUBLE_DELTA);
assertEquals(3.1415, conf.getDouble("test.double2", 0.0), DOUBLE_DELTA);
assertEquals(-3.1415, conf.getDouble("test.double3", 0.0), DOUBLE_DELTA);
assertEquals(-3.1415, conf.getDouble("test.double4", 0.0), DOUBLE_DELTA);
try {
conf.getDouble("test.double5", 0.0);
fail("Property had invalid double value, but was read successfully.");
@ -1105,6 +1139,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testGetClass() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1113,10 +1148,13 @@ public class TestConfiguration extends TestCase {
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
assertEquals("java.lang.Integer", conf.getClass("test.class1", null).getCanonicalName());
assertEquals("java.lang.Integer", conf.getClass("test.class2", null).getCanonicalName());
assertEquals("java.lang.Integer",
conf.getClass("test.class1", null).getCanonicalName());
assertEquals("java.lang.Integer",
conf.getClass("test.class2", null).getCanonicalName());
}
@Test
public void testGetClasses() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1133,13 +1171,14 @@ public class TestConfiguration extends TestCase {
assertArrayEquals(expectedNames, extractClassNames(classes2));
}
@Test
public void testGetStringCollection() {
Configuration c = new Configuration();
c.set("x", " a, b\n,\nc ");
Collection<String> strs = c.getTrimmedStringCollection("x");
assertEquals(3, strs.size());
assertArrayEquals(new String[]{ "a", "b", "c" },
strs.toArray(new String[0]));
strs.toArray(new String[0]));
// Check that the result is mutable
strs.add("z");
@ -1150,13 +1189,14 @@ public class TestConfiguration extends TestCase {
strs.add("z");
}
@Test
public void testGetTrimmedStringCollection() {
Configuration c = new Configuration();
c.set("x", "a, b, c");
Collection<String> strs = c.getStringCollection("x");
assertEquals(3, strs.size());
assertArrayEquals(new String[]{ "a", " b", " c" },
strs.toArray(new String[0]));
strs.toArray(new String[0]));
// Check that the result is mutable
strs.add("z");
@ -1177,6 +1217,7 @@ public class TestConfiguration extends TestCase {
enum Dingo { FOO, BAR };
enum Yak { RAB, FOO };
@Test
public void testEnum() {
Configuration conf = new Configuration();
conf.setEnum("test.enum", Dingo.FOO);
@ -1193,6 +1234,7 @@ public class TestConfiguration extends TestCase {
assertTrue(fail);
}
@Test
public void testEnumFromXml() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG_FOR_ENUM));
startConfig();
@ -1213,6 +1255,7 @@ public class TestConfiguration extends TestCase {
assertTrue(fail);
}
@Test
public void testTimeDuration() {
Configuration conf = new Configuration(false);
conf.setTimeDuration("test.time.a", 7L, SECONDS);
@ -1246,17 +1289,18 @@ public class TestConfiguration extends TestCase {
assertEquals(30L, conf.getTimeDuration("test.time.d", 40, SECONDS));
for (Configuration.ParsedTimeDuration ptd :
Configuration.ParsedTimeDuration.values()) {
Configuration.ParsedTimeDuration.values()) {
conf.setTimeDuration("test.time.unit", 1, ptd.unit());
assertEquals(1 + ptd.suffix(), conf.get("test.time.unit"));
assertEquals(1, conf.getTimeDuration("test.time.unit", 2, ptd.unit()));
}
}
@Test
public void testTimeDurationWarning() {
// check warn for possible loss of precision
final String warnFormat = "Possible loss of precision converting %s" +
" to %s for test.time.warn";
" to %s for test.time.warn";
final ArrayList<String> warnchk = new ArrayList<>();
Configuration wconf = new Configuration(false) {
@Override
@ -1290,6 +1334,7 @@ public class TestConfiguration extends TestCase {
assertEquals(2, warnchk.size());
}
@Test
public void testPattern() throws IOException {
out = new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1303,20 +1348,21 @@ public class TestConfiguration extends TestCase {
Pattern defaultPattern = Pattern.compile("x+");
// Return default if missing
assertEquals(defaultPattern.pattern(),
conf.getPattern("xxxxx", defaultPattern).pattern());
conf.getPattern("xxxxx", defaultPattern).pattern());
// Return null if empty and default is null
assertNull(conf.getPattern("test.pattern1", null));
// Return default for empty
assertEquals(defaultPattern.pattern(),
conf.getPattern("test.pattern1", defaultPattern).pattern());
conf.getPattern("test.pattern1", defaultPattern).pattern());
// Return default for malformed
assertEquals(defaultPattern.pattern(),
conf.getPattern("test.pattern2", defaultPattern).pattern());
conf.getPattern("test.pattern2", defaultPattern).pattern());
// Works for correct patterns
assertEquals("a+b",
conf.getPattern("test.pattern3", defaultPattern).pattern());
conf.getPattern("test.pattern3", defaultPattern).pattern());
}
@Test
public void testPropertySource() throws IOException {
out = new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1329,17 +1375,19 @@ public class TestConfiguration extends TestCase {
assertEquals(1, sources.length);
assertEquals(
"Resource string returned for a file-loaded property" +
" must be a proper absolute path",
" must be a proper absolute path",
fileResource,
new Path(sources[0]));
assertArrayEquals("Resource string returned for a set() property must be " +
"\"programmatically\"",
"\"programmatically\"",
new String[]{"programmatically"},
conf.getPropertySources("fs.defaultFS"));
assertEquals("Resource string returned for an unset property must be null",
assertArrayEquals("Resource string returned for an unset property must "
+ "be null",
null, conf.getPropertySources("fs.defaultFoo"));
}
@Test
public void testMultiplePropertySource() throws IOException {
out = new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1354,11 +1402,12 @@ public class TestConfiguration extends TestCase {
assertEquals("c", sources[2]);
assertEquals(
"Resource string returned for a file-loaded property" +
" must be a proper absolute path",
" must be a proper absolute path",
fileResource,
new Path(sources[3]));
}
@Test
public void testSocketAddress() {
Configuration conf = new Configuration();
final String defaultAddr = "host:1";
@ -1387,14 +1436,15 @@ public class TestConfiguration extends TestCase {
} catch (IllegalArgumentException iae) {
threwException = true;
assertEquals("Does not contain a valid host:port authority: " +
"bad:-port (configuration property 'myAddress')",
iae.getMessage());
"bad:-port (configuration property 'myAddress')",
iae.getMessage());
} finally {
assertTrue(threwException);
}
}
@Test
public void testSetSocketAddress() {
Configuration conf = new Configuration();
NetUtils.addStaticResolution("host", "127.0.0.1");
@ -1405,6 +1455,7 @@ public class TestConfiguration extends TestCase {
assertEquals(defaultAddr, NetUtils.getHostPortString(addr));
}
@Test
public void testUpdateSocketAddress() throws IOException {
InetSocketAddress addr = NetUtils.createSocketAddrForHost("host", 1);
InetSocketAddress connectAddr = conf.updateConnectAddr("myAddress", addr);
@ -1413,9 +1464,10 @@ public class TestConfiguration extends TestCase {
addr = new InetSocketAddress(1);
connectAddr = conf.updateConnectAddr("myAddress", addr);
assertEquals(connectAddr.getHostName(),
InetAddress.getLocalHost().getHostName());
InetAddress.getLocalHost().getHostName());
}
@Test
public void testReload() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -1457,6 +1509,7 @@ public class TestConfiguration extends TestCase {
assertEquals("value5", conf.get("test.key4"));
}
@Test
public void testSize() {
Configuration conf = new Configuration(false);
conf.set("a", "A");
@ -1464,6 +1517,7 @@ public class TestConfiguration extends TestCase {
assertEquals(2, conf.size());
}
@Test
public void testClear() {
Configuration conf = new Configuration(false);
conf.set("a", "A");
@ -1476,6 +1530,7 @@ public class TestConfiguration extends TestCase {
public static class Fake_ClassLoader extends ClassLoader {
}
@Test
public void testClassLoader() {
Configuration conf = new Configuration(false);
conf.setQuietMode(false);
@ -1546,6 +1601,7 @@ public class TestConfiguration extends TestCase {
return ac;
}
@Test
public void testGetSetTrimmedNames() throws IOException {
Configuration conf = new Configuration(false);
conf.set(" name", "value");
@ -1554,6 +1610,7 @@ public class TestConfiguration extends TestCase {
assertEquals("value", conf.getRaw(" name "));
}
@Test
public void testDumpProperty() throws IOException {
StringWriter outWriter = new StringWriter();
ObjectMapper mapper = new ObjectMapper();
@ -1668,13 +1725,14 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testDumpConfiguration() throws IOException {
StringWriter outWriter = new StringWriter();
Configuration.dumpConfiguration(conf, outWriter);
String jsonStr = outWriter.toString();
ObjectMapper mapper = new ObjectMapper();
JsonConfiguration jconf =
mapper.readValue(jsonStr, JsonConfiguration.class);
mapper.readValue(jsonStr, JsonConfiguration.class);
int defaultLength = jconf.getProperties().length;
// add 3 keys to the existing configuration properties
@ -1754,6 +1812,7 @@ public class TestConfiguration extends TestCase {
outWriter.close();
}
@Test
public void testDumpConfiguratioWithoutDefaults() throws IOException {
// check for case when default resources are not loaded
Configuration config = new Configuration(false);
@ -1762,7 +1821,7 @@ public class TestConfiguration extends TestCase {
String jsonStr = outWriter.toString();
ObjectMapper mapper = new ObjectMapper();
JsonConfiguration jconf =
mapper.readValue(jsonStr, JsonConfiguration.class);
mapper.readValue(jsonStr, JsonConfiguration.class);
//ensure that no properties are loaded.
assertEquals(0, jconf.getProperties().length);
@ -1801,6 +1860,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testDumpSensitiveProperty() throws IOException {
final String myPassword = "ThisIsMyPassword";
Configuration testConf = new Configuration(false);
@ -1818,6 +1878,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testDumpSensitiveConfiguration() throws IOException {
final String myPassword = "ThisIsMyPassword";
Configuration testConf = new Configuration(false);
@ -1835,6 +1896,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testGetValByRegex() {
Configuration conf = new Configuration();
String key1 = "t.abc.key1";
@ -1853,10 +1915,11 @@ public class TestConfiguration extends TestCase {
assertTrue("Picked out wrong key " + key4, !res.containsKey(key4));
}
@Test
public void testGetClassesShouldReturnDefaultValue() throws Exception {
Configuration config = new Configuration();
Class<?>[] classes =
config.getClasses("testClassName", Configuration.class);
config.getClasses("testClassName", Configuration.class);
assertEquals(
"Not returning expected number of classes. Number of returned classes ="
+ classes.length, 1, classes.length);
@ -1864,6 +1927,7 @@ public class TestConfiguration extends TestCase {
classes[0]);
}
@Test
public void testGetClassesShouldReturnEmptyArray()
throws Exception {
Configuration config = new Configuration();
@ -1874,6 +1938,7 @@ public class TestConfiguration extends TestCase {
+ classes.length, 0, classes.length);
}
@Test
public void testSettingValueNull() throws Exception {
Configuration config = new Configuration();
try {
@ -1886,6 +1951,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testSettingKeyNull() throws Exception {
Configuration config = new Configuration();
try {
@ -1897,6 +1963,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testInvalidSubstitution() {
final Configuration configuration = new Configuration(false);
@ -1913,6 +1980,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testIncompleteSubbing() {
Configuration configuration = new Configuration(false);
String key = "test.random.key";
@ -1931,6 +1999,7 @@ public class TestConfiguration extends TestCase {
}
}
@Test
public void testBoolean() {
boolean value = true;
Configuration configuration = new Configuration();
@ -1938,6 +2007,7 @@ public class TestConfiguration extends TestCase {
assertEquals(value, configuration.getBoolean("value", false));
}
@Test
public void testBooleanIfUnset() {
boolean value = true;
Configuration configuration = new Configuration();
@ -1947,20 +2017,23 @@ public class TestConfiguration extends TestCase {
assertEquals(value, configuration.getBoolean("value", false));
}
@Test
public void testFloat() {
float value = 1.0F;
Configuration configuration = new Configuration();
configuration.setFloat("value", value);
assertEquals(value, configuration.getFloat("value", 0.0F));
assertEquals(value, configuration.getFloat("value", 0.0F), DOUBLE_DELTA);
}
@Test
public void testDouble() {
double value = 1.0D;
Configuration configuration = new Configuration();
configuration.setDouble("value", value);
assertEquals(value, configuration.getDouble("value", 0.0D));
assertEquals(value, configuration.getDouble("value", 0.0D), DOUBLE_DELTA);
}
@Test
public void testInt() {
int value = 1;
Configuration configuration = new Configuration();
@ -1968,6 +2041,7 @@ public class TestConfiguration extends TestCase {
assertEquals(value, configuration.getInt("value", 0));
}
@Test
public void testLong() {
long value = 1L;
Configuration configuration = new Configuration();
@ -1975,16 +2049,18 @@ public class TestConfiguration extends TestCase {
assertEquals(value, configuration.getLong("value", 0L));
}
@Test
public void testStrings() {
String [] strings = {"FOO","BAR"};
Configuration configuration = new Configuration();
configuration.setStrings("strings", strings);
String [] returnStrings = configuration.getStrings("strings");
for(int i=0;i<returnStrings.length;i++) {
assertEquals(strings[i], returnStrings[i]);
assertEquals(strings[i], returnStrings[i]);
}
}
@Test
public void testSetPattern() {
Pattern testPattern = Pattern.compile("a+b");
Configuration configuration = new Configuration();
@ -1993,12 +2069,14 @@ public class TestConfiguration extends TestCase {
configuration.getPattern("testPattern", Pattern.compile("")).pattern());
}
@Test
public void testGetClassByNameOrNull() throws Exception {
Configuration config = new Configuration();
Class<?> clazz = config.getClassByNameOrNull("java.lang.Object");
assertNotNull(clazz);
Configuration config = new Configuration();
Class<?> clazz = config.getClassByNameOrNull("java.lang.Object");
assertNotNull(clazz);
}
@Test
public void testGetFinalParameters() throws Exception {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -2021,6 +2099,7 @@ public class TestConfiguration extends TestCase {
* by SPARK-2546.
* @throws Exception
*/
@Test
public void testConcurrentAccesses() throws Exception {
out = new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@ -2061,6 +2140,7 @@ public class TestConfiguration extends TestCase {
// it's expected behaviour.
}
@Test
public void testNullValueProperties() throws Exception {
Configuration conf = new Configuration();
conf.setAllowNullValueProperties(true);
@ -2074,6 +2154,7 @@ public class TestConfiguration extends TestCase {
assertEquals("value", conf.get("attr"));
}
@Test
public void testGetPasswordDeprecatedKeyStored() throws Exception {
final String oldKey = "test.password.old.key";
final String newKey = "test.password.new.key";
@ -2093,14 +2174,15 @@ public class TestConfiguration extends TestCase {
Configuration.addDeprecation(oldKey, newKey);
Assert.assertThat(conf.getPassword(newKey),
assertThat(conf.getPassword(newKey),
CoreMatchers.is(password.toCharArray()));
Assert.assertThat(conf.getPassword(oldKey),
assertThat(conf.getPassword(oldKey),
CoreMatchers.is(password.toCharArray()));
FileUtil.fullyDelete(tmpDir);
}
@Test
public void testGetPasswordByDeprecatedKey() throws Exception {
final String oldKey = "test.password.old.key";
final String newKey = "test.password.new.key";
@ -2120,9 +2202,9 @@ public class TestConfiguration extends TestCase {
Configuration.addDeprecation(oldKey, newKey);
Assert.assertThat(conf.getPassword(newKey),
assertThat(conf.getPassword(newKey),
CoreMatchers.is(password.toCharArray()));
Assert.assertThat(conf.getPassword(oldKey),
assertThat(conf.getPassword(oldKey),
CoreMatchers.is(password.toCharArray()));
FileUtil.fullyDelete(tmpDir);

View File

@ -17,7 +17,8 @@
*/
package org.apache.hadoop.conf;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import java.util.Properties;
@ -25,11 +26,12 @@ import java.util.Properties;
* Created 21-Jan-2009 13:42:36
*/
public class TestConfigurationSubclass extends TestCase {
public class TestConfigurationSubclass {
private static final String EMPTY_CONFIGURATION_XML
= "/org/apache/hadoop/conf/empty-configuration.xml";
@Test
public void testGetProps() {
SubConf conf = new SubConf(true);
Properties properties = conf.getProperties();
@ -37,6 +39,7 @@ public class TestConfigurationSubclass extends TestCase {
properties.getProperty("hadoop.tmp.dir"));
}
@Test
public void testReload() throws Throwable {
SubConf conf = new SubConf(true);
assertFalse(conf.isReloaded());
@ -45,6 +48,7 @@ public class TestConfigurationSubclass extends TestCase {
Properties properties = conf.getProperties();
}
@Test
public void testReloadNotQuiet() throws Throwable {
SubConf conf = new SubConf(true);
conf.setQuietMode(false);

View File

@ -21,15 +21,14 @@ package org.apache.hadoop.conf;
import java.io.ByteArrayOutputStream;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.junit.Test;
import static org.junit.Assert.*;
import junit.framework.TestCase;
public class TestDeprecatedKeys extends TestCase {
public class TestDeprecatedKeys {
//Tests a deprecated key
@Test
public void testDeprecatedKeys() throws Exception {
Configuration conf = new Configuration();
conf.set("topology.script.file.name", "xyz");
@ -39,6 +38,7 @@ public class TestDeprecatedKeys extends TestCase {
}
//Tests reading / writing a conf file with deprecation after setting
@Test
public void testReadWriteWithDeprecatedKeys() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean("old.config.yet.to.be.deprecated", true);

View File

@ -18,10 +18,11 @@
package org.apache.hadoop.conf;
import java.util.List;
import org.junit.Test;
import junit.framework.TestCase;
import static org.junit.Assert.*;
public class TestGetInstances extends TestCase {
public class TestGetInstances {
interface SampleInterface {}
@ -39,6 +40,7 @@ public class TestGetInstances extends TestCase {
* Makes sure <code>Configuration.getInstances()</code> returns
* instances of the required type.
*/
@Test
public void testGetInstances() throws Exception {
Configuration conf = new Configuration();

View File

@ -24,9 +24,10 @@ import java.io.OutputStreamWriter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestAvroFSInput extends TestCase {
public class TestAvroFSInput {
private static final String INPUT_DIR = "AvroFSInput";
@ -34,6 +35,7 @@ public class TestAvroFSInput extends TestCase {
return new Path(GenericTestUtils.getTempPath(INPUT_DIR));
}
@Test
public void testAFSInput() throws Exception {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);

View File

@ -17,7 +17,10 @@
*/
package org.apache.hadoop.fs;
import junit.framework.TestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
@ -29,16 +32,16 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.test.GenericTestUtils;
/** This test makes sure that "DU" does not get to run on each call to getUsed */
public class TestDU extends TestCase {
public class TestDU {
final static private File DU_DIR = GenericTestUtils.getTestDir("dutmp");
@Override
@Before
public void setUp() {
FileUtil.fullyDelete(DU_DIR);
assertTrue(DU_DIR.mkdirs());
FileUtil.fullyDelete(DU_DIR);
assertTrue(DU_DIR.mkdirs());
}
@Override
@After
public void tearDown() throws IOException {
FileUtil.fullyDelete(DU_DIR);
}
@ -69,6 +72,7 @@ public class TestDU extends TestCase {
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testDU() throws IOException, InterruptedException {
final int writtenSize = 32*1024; // writing 32K
// Allow for extra 4K on-disk slack for local file systems
@ -107,6 +111,8 @@ public class TestDU extends TestCase {
duSize >= writtenSize &&
writtenSize <= (duSize + slack));
}
@Test
public void testDUGetUsedWillNotReturnNegative() throws IOException {
File file = new File(DU_DIR, "data");
assertTrue(file.createNewFile());
@ -118,6 +124,7 @@ public class TestDU extends TestCase {
assertTrue(String.valueOf(duSize), duSize >= 0L);
}
@Test
public void testDUSetInitialValue() throws IOException {
File file = new File(DU_DIR, "dataX");
createFile(file, 8192);

View File

@ -23,12 +23,12 @@ import java.lang.reflect.Modifier;
import java.net.URI;
import java.util.Iterator;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.viewfs.ConfigUtil;
import org.junit.Test;
public class TestFilterFs extends TestCase {
public class TestFilterFs {
private static final Log LOG = FileSystem.LOG;
@ -42,6 +42,7 @@ public class TestFilterFs extends TestCase {
}
}
@Test
public void testFilterFileSystem() throws Exception {
for (Method m : AbstractFileSystem.class.getDeclaredMethods()) {
if (Modifier.isStatic(m.getModifiers()))
@ -69,6 +70,7 @@ public class TestFilterFs extends TestCase {
// Test that FilterFs will accept an AbstractFileSystem to be filtered which
// has an optional authority, such as ViewFs
@Test
public void testFilteringWithNonrequiredAuthority() throws Exception {
Configuration conf = new Configuration();
ConfigUtil.addLink(conf, "custom", "/mnt", URI.create("file:///"));

View File

@ -22,7 +22,10 @@ import java.util.Arrays;
import java.util.Comparator;
import java.util.Random;
import junit.framework.TestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
@ -30,7 +33,7 @@ import org.apache.hadoop.test.GenericTestUtils;
/**
* Testing the correctness of FileSystem.getFileBlockLocations.
*/
public class TestGetFileBlockLocations extends TestCase {
public class TestGetFileBlockLocations {
private static String TEST_ROOT_DIR = GenericTestUtils.getTempPath(
"testGetFileBlockLocations");
private static final int FileLength = 4 * 1024 * 1024; // 4MB
@ -39,11 +42,8 @@ public class TestGetFileBlockLocations extends TestCase {
private FileSystem fs;
private Random random;
/**
* @see TestCase#setUp()
*/
@Override
protected void setUp() throws IOException {
@Before
public void setUp() throws IOException {
conf = new Configuration();
Path rootPath = new Path(TEST_ROOT_DIR);
path = new Path(rootPath, "TestGetFileBlockLocations");
@ -91,15 +91,14 @@ public class TestGetFileBlockLocations extends TestCase {
assertTrue(locations.length == 0);
}
}
/**
* @see TestCase#tearDown()
*/
@Override
protected void tearDown() throws IOException {
@After
public void tearDown() throws IOException {
fs.delete(path, true);
fs.close();
}
@Test
public void testFailureNegativeParameters() throws IOException {
FileStatus status = fs.getFileStatus(path);
try {
@ -117,6 +116,7 @@ public class TestGetFileBlockLocations extends TestCase {
}
}
@Test
public void testGetFileBlockLocations1() throws IOException {
FileStatus status = fs.getFileStatus(path);
oneTest(0, (int) status.getLen(), status);
@ -130,6 +130,7 @@ public class TestGetFileBlockLocations extends TestCase {
}
}
@Test
public void testGetFileBlockLocations2() throws IOException {
FileStatus status = fs.getFileStatus(path);
for (int i = 0; i < 1000; ++i) {

View File

@ -20,10 +20,12 @@ package org.apache.hadoop.fs;
import java.io.IOException;
import java.util.List;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestGlobExpander extends TestCase {
public class TestGlobExpander {
@Test
public void testExpansionIsIdentical() throws IOException {
checkExpansionIsIdentical("");
checkExpansionIsIdentical("/}");
@ -35,6 +37,7 @@ public class TestGlobExpander extends TestCase {
checkExpansionIsIdentical("p{a\\/b,c\\/d}s");
}
@Test
public void testExpansion() throws IOException {
checkExpansion("{a/b}", "a/b");
checkExpansion("/}{a/b}", "/}a/b");

View File

@ -33,20 +33,21 @@ import java.util.Random;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.TestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.TrashPolicyDefault.Emptier;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.junit.Before;
import org.junit.Test;
/**
* This class tests commands from Trash.
*/
public class TestTrash extends TestCase {
public class TestTrash {
private final static Path TEST_DIR = new Path(GenericTestUtils.getTempPath(
"testTrash"));
@ -507,12 +508,14 @@ public class TestTrash extends TestCase {
}
}
@Test
public void testTrash() throws IOException {
Configuration conf = new Configuration();
conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class);
trashShell(FileSystem.getLocal(conf), TEST_DIR);
}
@Test
public void testNonDefaultFS() throws IOException {
Configuration conf = new Configuration();
conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class);
@ -520,6 +523,7 @@ public class TestTrash extends TestCase {
trashNonDefaultFS(conf);
}
@Test
public void testPluggableTrash() throws IOException {
Configuration conf = new Configuration();
@ -604,6 +608,7 @@ public class TestTrash extends TestCase {
verifyTrashPermission(FileSystem.getLocal(conf), conf);
}
@Test
public void testTrashEmptier() throws Exception {
Configuration conf = new Configuration();
// Trash with 12 second deletes and 6 seconds checkpoints
@ -666,11 +671,8 @@ public class TestTrash extends TestCase {
emptierThread.join();
}
/**
* @see TestCase#tearDown()
*/
@Override
protected void tearDown() throws IOException {
@After
public void tearDown() throws IOException {
File trashDir = new File(TEST_DIR.toUri().getPath());
if (trashDir.exists() && !FileUtil.fullyDelete(trashDir)) {
throw new IOException("Cannot remove data directory: " + trashDir);

View File

@ -20,16 +20,17 @@ package org.apache.hadoop.fs;
import java.io.DataOutputStream;
import java.io.IOException;
import junit.framework.TestCase;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;
/**
* test for the input truncation bug when mark/reset is used.
* HADOOP-1489
*/
public class TestTruncatedInputBug extends TestCase {
public class TestTruncatedInputBug {
private static String TEST_ROOT_DIR =
GenericTestUtils.getTestDir().getAbsolutePath();
@ -49,6 +50,7 @@ public class TestTruncatedInputBug extends TestCase {
* checksum file system currently depends on the request size
* >= bytesPerSum to work properly.
*/
@Test
public void testTruncatedInputBug() throws IOException {
final int ioBufSize = 512;
final int fileSize = ioBufSize*4;

View File

@ -21,11 +21,14 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.apache.hadoop.fs.permission.FsAction.*;
public class TestFsPermission extends TestCase {
public class TestFsPermission {
@Test
public void testFsAction() {
//implies
for(FsAction a : FsAction.values()) {
@ -53,6 +56,7 @@ public class TestFsPermission extends TestCase {
* Ensure that when manually specifying permission modes we get
* the expected values back out for all combinations
*/
@Test
public void testConvertingPermissions() {
for(short s = 0; s <= 01777; s++) {
assertEquals(s, new FsPermission(s).toShort());
@ -80,6 +84,7 @@ public class TestFsPermission extends TestCase {
assertEquals(02000, s);
}
@Test
public void testSpecialBitsToString() {
for (boolean sb : new boolean[] { false, true }) {
for (FsAction u : FsAction.values()) {
@ -106,6 +111,7 @@ public class TestFsPermission extends TestCase {
}
}
@Test
public void testFsPermission() {
String symbolic = "-rwxrwxrwx";
@ -132,6 +138,7 @@ public class TestFsPermission extends TestCase {
}
}
@Test
public void testSymbolicPermission() {
for (int i = 0; i < SYMBOLIC.length; ++i) {
short val = 0777;
@ -146,6 +153,7 @@ public class TestFsPermission extends TestCase {
}
}
@Test
public void testUMaskParser() throws IOException {
Configuration conf = new Configuration();
@ -163,6 +171,7 @@ public class TestFsPermission extends TestCase {
}
}
@Test
public void testSymbolicUmasks() {
Configuration conf = new Configuration();
@ -176,6 +185,7 @@ public class TestFsPermission extends TestCase {
assertEquals(0111, FsPermission.getUMask(conf).toShort());
}
@Test
public void testBadUmasks() {
Configuration conf = new Configuration();

View File

@ -25,7 +25,9 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.times;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import javax.management.MBeanServer;
import javax.management.ObjectName;
@ -39,13 +41,12 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.Test;
import org.mockito.Mockito;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException;
import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
public class TestFairCallQueue extends TestCase {
public class TestFairCallQueue {
private FairCallQueue<Schedulable> fcq;
private Schedulable mockCall(String id, int priority) {
@ -65,6 +66,7 @@ public class TestFairCallQueue extends TestCase {
}
@SuppressWarnings("deprecation")
@Before
public void setUp() {
Configuration conf = new Configuration();
conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2);
@ -74,6 +76,7 @@ public class TestFairCallQueue extends TestCase {
// Validate that the total capacity of all subqueues equals
// the maxQueueSize for different values of maxQueueSize
@Test
public void testTotalCapacityOfSubQueues() {
Configuration conf = new Configuration();
FairCallQueue<Schedulable> fairCallQueue;
@ -291,11 +294,12 @@ public class TestFairCallQueue extends TestCase {
//
// Ensure that FairCallQueue properly implements BlockingQueue
//
@Test
public void testPollReturnsNullWhenEmpty() {
assertNull(fcq.poll());
}
@Test
public void testPollReturnsTopCallWhenNotEmpty() {
Schedulable call = mockCall("c");
assertTrue(fcq.offer(call));
@ -306,6 +310,7 @@ public class TestFairCallQueue extends TestCase {
assertEquals(0, fcq.size());
}
@Test
public void testOfferSucceeds() {
for (int i = 0; i < 5; i++) {
@ -316,6 +321,7 @@ public class TestFairCallQueue extends TestCase {
assertEquals(5, fcq.size());
}
@Test
public void testOfferFailsWhenFull() {
for (int i = 0; i < 5; i++) { assertTrue(fcq.offer(mockCall("c"))); }
@ -324,6 +330,7 @@ public class TestFairCallQueue extends TestCase {
assertEquals(5, fcq.size());
}
@Test
public void testOfferSucceedsWhenScheduledLowPriority() {
// Scheduler will schedule into queue 0 x 5, then queue 1
int mockedPriorities[] = {0, 0, 0, 0, 0, 1, 0};
@ -334,10 +341,12 @@ public class TestFairCallQueue extends TestCase {
assertEquals(6, fcq.size());
}
@Test
public void testPeekNullWhenEmpty() {
assertNull(fcq.peek());
}
@Test
public void testPeekNonDestructive() {
Schedulable call = mockCall("c", 0);
assertTrue(fcq.offer(call));
@ -347,6 +356,7 @@ public class TestFairCallQueue extends TestCase {
assertEquals(1, fcq.size());
}
@Test
public void testPeekPointsAtHead() {
Schedulable call = mockCall("c", 0);
Schedulable next = mockCall("b", 0);
@ -356,10 +366,12 @@ public class TestFairCallQueue extends TestCase {
assertEquals(call, fcq.peek()); // Peek points at the head
}
@Test
public void testPollTimeout() throws InterruptedException {
assertNull(fcq.poll(10, TimeUnit.MILLISECONDS));
}
@Test
public void testPollSuccess() throws InterruptedException {
Schedulable call = mockCall("c", 0);
assertTrue(fcq.offer(call));
@ -369,6 +381,7 @@ public class TestFairCallQueue extends TestCase {
assertEquals(0, fcq.size());
}
@Test
public void testOfferTimeout() throws InterruptedException {
for (int i = 0; i < 5; i++) {
assertTrue(fcq.offer(mockCall("c"), 10, TimeUnit.MILLISECONDS));
@ -380,6 +393,7 @@ public class TestFairCallQueue extends TestCase {
}
@SuppressWarnings("deprecation")
@Test
public void testDrainTo() {
Configuration conf = new Configuration();
conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2);
@ -397,6 +411,7 @@ public class TestFairCallQueue extends TestCase {
}
@SuppressWarnings("deprecation")
@Test
public void testDrainToWithLimit() {
Configuration conf = new Configuration();
conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2);
@ -413,16 +428,19 @@ public class TestFairCallQueue extends TestCase {
assertEquals(2, fcq2.size());
}
@Test
public void testInitialRemainingCapacity() {
assertEquals(10, fcq.remainingCapacity());
}
@Test
public void testFirstQueueFullRemainingCapacity() {
while (fcq.offer(mockCall("c"))) ; // Queue 0 will fill up first, then queue 1
assertEquals(5, fcq.remainingCapacity());
}
@Test
public void testAllQueuesFullRemainingCapacity() {
int[] mockedPriorities = {0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0};
int i = 0;
@ -432,6 +450,7 @@ public class TestFairCallQueue extends TestCase {
assertEquals(10, fcq.size());
}
@Test
public void testQueuesPartialFilledRemainingCapacity() {
int[] mockedPriorities = {0, 1, 0, 1, 0};
for (int i = 0; i < 5; i++) { fcq.offer(mockCall("c", mockedPriorities[i])); }
@ -555,12 +574,14 @@ public class TestFairCallQueue extends TestCase {
}
// Make sure put will overflow into lower queues when the top is full
@Test
public void testPutOverflows() throws InterruptedException {
// We can fit more than 5, even though the scheduler suggests the top queue
assertCanPut(fcq, 8, 8);
assertEquals(8, fcq.size());
}
@Test
public void testPutBlocksWhenAllFull() throws InterruptedException {
assertCanPut(fcq, 10, 10); // Fill up
assertEquals(10, fcq.size());
@ -569,10 +590,12 @@ public class TestFairCallQueue extends TestCase {
assertCanPut(fcq, 0, 1); // Will block
}
@Test
public void testTakeBlocksWhenEmpty() throws InterruptedException {
assertCanTake(fcq, 0, 1);
}
@Test
public void testTakeRemovesCall() throws InterruptedException {
Schedulable call = mockCall("c");
fcq.offer(call);
@ -581,6 +604,7 @@ public class TestFairCallQueue extends TestCase {
assertEquals(0, fcq.size());
}
@Test
public void testTakeTriesNextQueue() throws InterruptedException {
// A mux which only draws from q 0
@ -597,6 +621,7 @@ public class TestFairCallQueue extends TestCase {
assertEquals(0, fcq.size());
}
@Test
public void testFairCallQueueMXBean() throws Exception {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName mxbeanName = new ObjectName(

View File

@ -20,7 +20,8 @@ package org.apache.hadoop.log;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ContainerNode;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.Time;
@ -33,7 +34,6 @@ import org.apache.log4j.spi.HierarchyEventListener;
import org.apache.log4j.spi.LoggerFactory;
import org.apache.log4j.spi.LoggerRepository;
import org.apache.log4j.spi.ThrowableInformation;
import org.junit.Test;
import java.io.IOException;
import java.io.StringWriter;
@ -42,7 +42,7 @@ import java.net.NoRouteToHostException;
import java.util.Enumeration;
import java.util.Vector;
public class TestLog4Json extends TestCase {
public class TestLog4Json {
private static final Log LOG = LogFactory.getLog(TestLog4Json.class);

View File

@ -19,15 +19,12 @@ package org.apache.hadoop.net;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
import org.junit.Test;
public class TestScriptBasedMapping extends TestCase {
public class TestScriptBasedMapping {
public TestScriptBasedMapping() {

View File

@ -19,13 +19,12 @@ package org.apache.hadoop.net;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
import org.junit.Test;
public class TestScriptBasedMappingWithDependency extends TestCase {
public class TestScriptBasedMappingWithDependency {
public TestScriptBasedMappingWithDependency() {

View File

@ -17,19 +17,21 @@
package org.apache.hadoop.security;
import junit.framework.TestCase;
import static org.junit.Assert.*;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.FilterContainer;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.Map;
public class TestAuthenticationFilter extends TestCase {
public class TestAuthenticationFilter {
@SuppressWarnings("unchecked")
@Test
public void testConfiguration() throws Exception {
Configuration conf = new Configuration();
conf.set("hadoop.http.authentication.foo", "bar");

View File

@ -16,8 +16,8 @@
*/
package org.apache.hadoop.security;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.FilterContainer;
@ -30,9 +30,10 @@ import java.util.Map;
* This class is tested for {@link AuthenticationWithProxyUserFilter}
* to verify configurations of this filter.
*/
public class TestAuthenticationWithProxyUserFilter extends TestCase {
public class TestAuthenticationWithProxyUserFilter {
@SuppressWarnings("unchecked")
@Test
public void testConfiguration() throws Exception {
Configuration conf = new Configuration();
conf.set("hadoop.http.authentication.foo", "bar");

View File

@ -21,17 +21,18 @@ import java.io.IOException;
import java.net.InetAddress;
import java.util.Map;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.WhitelistBasedResolver;
import org.apache.hadoop.util.TestFileBasedIPList;
public class TestWhitelistBasedResolver extends TestCase {
public class TestWhitelistBasedResolver {
public static final Map<String, String> SASL_PRIVACY_PROPS =
WhitelistBasedResolver.getSaslProperties(new Configuration());
@Test
public void testFixedVariableAndLocalWhiteList() throws IOException {
String[] fixedIps = {"10.119.103.112", "10.221.102.0/23"};
@ -79,6 +80,7 @@ public class TestWhitelistBasedResolver extends TestCase {
* Check for inclusion in whitelist
* Check for exclusion from whitelist
*/
@Test
public void testFixedAndLocalWhiteList() throws IOException {
String[] fixedIps = {"10.119.103.112", "10.221.102.0/23"};
@ -128,6 +130,7 @@ public class TestWhitelistBasedResolver extends TestCase {
* Add a bunch of subnets and IPSs to the whitelist
* Check for inclusion in whitelist with a null value
*/
@Test
public void testNullIPAddress() throws IOException {
String[] fixedIps = {"10.119.103.112", "10.221.102.0/23"};

View File

@ -25,11 +25,12 @@ import org.apache.hadoop.io.*;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier;
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenSecretManager;
import org.junit.Test;
import junit.framework.TestCase;
import static org.junit.Assert.*;
/** Unit tests for Token */
public class TestToken extends TestCase {
public class TestToken {
static boolean isEqual(Object a, Object b) {
return a == null ? b == null : a.equals(b);
@ -45,6 +46,7 @@ public class TestToken extends TestCase {
/**
* Test token serialization
*/
@Test
public void testTokenSerialization() throws IOException {
// Get a token
Token<TokenIdentifier> sourceToken = new Token<TokenIdentifier>();
@ -76,7 +78,8 @@ public class TestToken extends TestCase {
}
}
public static void testEncodeWritable() throws Exception {
@Test
public void testEncodeWritable() throws Exception {
String[] values = new String[]{"", "a", "bb", "ccc", "dddd", "eeeee",
"ffffff", "ggggggg", "hhhhhhhh", "iiiiiiiii",
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLM" +
@ -97,6 +100,7 @@ public class TestToken extends TestCase {
}
}
@Test
public void testDecodeIdentifier() throws IOException {
TestDelegationTokenSecretManager secretManager =
new TestDelegationTokenSecretManager(0, 0, 0, 0);

View File

@ -17,17 +17,15 @@
*/
package org.apache.hadoop.util;
import junit.framework.TestCase;
import org.apache.hadoop.util.AsyncDiskService;
import org.junit.Test;
import static org.junit.Assert.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A test for AsyncDiskService.
*/
public class TestAsyncDiskService extends TestCase {
public class TestAsyncDiskService {
public static final Logger LOG =
LoggerFactory.getLogger(TestAsyncDiskService.class);

View File

@ -18,14 +18,11 @@
package org.apache.hadoop.util;
import java.io.IOException;
import org.junit.Test;
import org.apache.hadoop.util.CacheableIPList;
import org.apache.hadoop.util.FileBasedIPList;
import static org.junit.Assert.*;
import junit.framework.TestCase;
public class TestCacheableIPList extends TestCase {
public class TestCacheableIPList {
/**
* Add a bunch of subnets and IPSs to the file
@ -37,6 +34,7 @@ public class TestCacheableIPList extends TestCase {
* test for inclusion
* Check for exclusion
*/
@Test
public void testAddWithSleepForCacheTimeout() throws IOException, InterruptedException {
String[] ips = {"10.119.103.112", "10.221.102.0/23", "10.113.221.221"};
@ -76,6 +74,7 @@ public class TestCacheableIPList extends TestCase {
* test for inclusion
* Check for exclusion
*/
@Test
public void testRemovalWithSleepForCacheTimeout() throws IOException, InterruptedException {
String[] ips = {"10.119.103.112", "10.221.102.0/23",
@ -115,6 +114,7 @@ public class TestCacheableIPList extends TestCase {
* test for inclusion
* Check for exclusion
*/
@Test
public void testAddWithRefresh() throws IOException, InterruptedException {
String[] ips = {"10.119.103.112", "10.221.102.0/23", "10.113.221.221"};
@ -154,6 +154,7 @@ public class TestCacheableIPList extends TestCase {
* test for inclusion
* Check for exclusion
*/
@Test
public void testRemovalWithRefresh() throws IOException, InterruptedException {
String[] ips = {"10.119.103.112", "10.221.102.0/23",

View File

@ -22,14 +22,11 @@ import java.io.IOException;
import java.util.Arrays;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.util.FileBasedIPList;
import org.apache.hadoop.util.IPList;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.*;
import junit.framework.TestCase;
public class TestFileBasedIPList extends TestCase {
public class TestFileBasedIPList {
@After
public void tearDown() {
@ -127,6 +124,7 @@ public class TestFileBasedIPList extends TestCase {
* test for inclusion
* should be true as if the feature is turned off
*/
@Test
public void testFileNotSpecified() {
IPList ipl = new FileBasedIPList(null);
@ -140,6 +138,7 @@ public class TestFileBasedIPList extends TestCase {
* test for inclusion
* should be true as if the feature is turned off
*/
@Test
public void testFileMissing() {
IPList ipl = new FileBasedIPList("missingips.txt");
@ -153,6 +152,7 @@ public class TestFileBasedIPList extends TestCase {
* test for inclusion
* should be true as if the feature is turned off
*/
@Test
public void testWithEmptyList() throws IOException {
String[] ips = {};
@ -168,6 +168,7 @@ public class TestFileBasedIPList extends TestCase {
* test for inclusion
* should be true as if the feature is turned off
*/
@Test
public void testForBadFIle() throws IOException {
String[] ips = { "10.221.102/23"};
@ -187,6 +188,7 @@ public class TestFileBasedIPList extends TestCase {
* Check for inclusion with good entries
* Check for exclusion
*/
@Test
public void testWithAWrongEntry() throws IOException {
String[] ips = {"10.119.103.112", "10.221.102/23", "10.221.204.1/23"};

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.util;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.util.FindClass;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Test;

View File

@ -21,12 +21,14 @@ package org.apache.hadoop.util;
import java.util.ArrayList;
import java.util.List;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
public class TestGenericsUtil extends TestCase {
public class TestGenericsUtil {
@Test
public void testToArray() {
//test a list of size 10
@ -45,6 +47,7 @@ public class TestGenericsUtil extends TestCase {
}
}
@Test
public void testWithEmptyList() {
try {
List<String> list = new ArrayList<String>();
@ -57,6 +60,7 @@ public class TestGenericsUtil extends TestCase {
}
}
@Test
public void testWithEmptyList2() {
List<String> list = new ArrayList<String>();
//this method should not throw IndexOutOfBoundsException
@ -81,6 +85,7 @@ public class TestGenericsUtil extends TestCase {
}
}
@Test
public void testWithGenericClass() {
GenericClass<String> testSubject = new GenericClass<String>();
@ -102,6 +107,7 @@ public class TestGenericsUtil extends TestCase {
}
@Test
public void testGenericOptionsParser() throws Exception {
GenericOptionsParser parser = new GenericOptionsParser(
new Configuration(), new String[] {"-jt"});
@ -116,6 +122,7 @@ public class TestGenericsUtil extends TestCase {
"y=z", parser.getConfiguration().get("x"));
}
@Test
public void testGetClass() {
//test with Integer
@ -131,6 +138,7 @@ public class TestGenericsUtil extends TestCase {
GenericClass.class, c2);
}
@Test
public void testIsLog4jLogger() throws Exception {
assertFalse("False if clazz is null", GenericsUtil.isLog4jLogger(null));
assertTrue("The implementation is Log4j",

View File

@ -21,14 +21,15 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparator;
public class TestIndexedSort extends TestCase {
public class TestIndexedSort {
public void sortAllEqual(IndexedSorter sorter) throws Exception {
final int SAMPLE = 500;
@ -128,6 +129,7 @@ public class TestIndexedSort extends TestCase {
}
@Test
public void testQuickSort() throws Exception {
QuickSort sorter = new QuickSort();
sortRandom(sorter);
@ -158,6 +160,7 @@ public class TestIndexedSort extends TestCase {
assertTrue(Arrays.equals(values, check));
}
@Test
public void testHeapSort() throws Exception {
HeapSort sorter = new HeapSort();
sortRandom(sorter);

View File

@ -19,13 +19,13 @@ package org.apache.hadoop.util;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.util.ExitUtil.ExitException;
import org.junit.Test;
public class TestNativeLibraryChecker extends TestCase {
public class TestNativeLibraryChecker {
private void expectExit(String [] args) {
try {
// should throw exit exception

View File

@ -28,7 +28,8 @@ import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import org.junit.Assert;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -49,8 +50,6 @@ import org.apache.hadoop.mapreduce.TaskInputOutputContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.junit.Test;
/**
* Tests the use of the
* {@link org.apache.hadoop.mapreduce.filecache.DistributedCache} within the
@ -66,7 +65,7 @@ import org.junit.Test;
* This test is not fast: it uses MiniMRCluster.
*/
@SuppressWarnings("deprecation")
public class TestMRWithDistributedCache extends TestCase {
public class TestMRWithDistributedCache {
private static Path TEST_ROOT_DIR =
new Path(System.getProperty("test.build.data","/tmp"));
private static File symlinkFile = new File("distributed.first.symlink");
@ -97,23 +96,23 @@ public class TestMRWithDistributedCache extends TestCase {
FileSystem fs = LocalFileSystem.get(conf);
// Check that 2 files and 2 archives are present
TestCase.assertEquals(2, localFiles.length);
TestCase.assertEquals(2, localArchives.length);
TestCase.assertEquals(2, files.length);
TestCase.assertEquals(2, archives.length);
Assert.assertEquals(2, localFiles.length);
Assert.assertEquals(2, localArchives.length);
Assert.assertEquals(2, files.length);
Assert.assertEquals(2, archives.length);
// Check the file name
TestCase.assertTrue(files[0].getPath().endsWith("distributed.first"));
TestCase.assertTrue(files[1].getPath().endsWith("distributed.second.jar"));
Assert.assertTrue(files[0].getPath().endsWith("distributed.first"));
Assert.assertTrue(files[1].getPath().endsWith("distributed.second.jar"));
// Check lengths of the files
TestCase.assertEquals(1, fs.getFileStatus(localFiles[0]).getLen());
TestCase.assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1);
Assert.assertEquals(1, fs.getFileStatus(localFiles[0]).getLen());
Assert.assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1);
// Check extraction of the archive
TestCase.assertTrue(fs.exists(new Path(localArchives[0],
Assert.assertTrue(fs.exists(new Path(localArchives[0],
"distributed.jar.inside3")));
TestCase.assertTrue(fs.exists(new Path(localArchives[1],
Assert.assertTrue(fs.exists(new Path(localArchives[1],
"distributed.jar.inside4")));
// Check the class loaders
@ -121,18 +120,18 @@ public class TestMRWithDistributedCache extends TestCase {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
// Both the file and the archive were added to classpath, so both
// should be reachable via the class loader.
TestCase.assertNotNull(cl.getResource("distributed.jar.inside2"));
TestCase.assertNotNull(cl.getResource("distributed.jar.inside3"));
TestCase.assertNull(cl.getResource("distributed.jar.inside4"));
Assert.assertNotNull(cl.getResource("distributed.jar.inside2"));
Assert.assertNotNull(cl.getResource("distributed.jar.inside3"));
Assert.assertNull(cl.getResource("distributed.jar.inside4"));
// Check that the symlink for the renaming was created in the cwd;
TestCase.assertTrue("symlink distributed.first.symlink doesn't exist",
Assert.assertTrue("symlink distributed.first.symlink doesn't exist",
symlinkFile.exists());
TestCase.assertEquals("symlink distributed.first.symlink length not 1", 1,
Assert.assertEquals("symlink distributed.first.symlink length not 1", 1,
symlinkFile.length());
//This last one is a difference between MRv2 and MRv1
TestCase.assertTrue("second file should be symlinked too",
Assert.assertTrue("second file should be symlinked too",
expectedAbsentSymlinkFile.exists());
}
@ -188,6 +187,7 @@ public class TestMRWithDistributedCache extends TestCase {
}
/** Tests using the local job runner. */
@Test
public void testLocalJobRunner() throws Exception {
symlinkFile.delete(); // ensure symlink is not present (e.g. if test is
// killed part way through)

View File

@ -23,7 +23,8 @@ import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.Assert;
import org.apache.hadoop.fs.FileStatus;
@ -38,7 +39,7 @@ import org.apache.hadoop.io.Text;
@SuppressWarnings("unchecked")
public class TestFileOutputCommitter extends TestCase {
public class TestFileOutputCommitter {
private static Path outDir = new Path(System.getProperty("test.build.data",
"/tmp"), "output");
@ -153,14 +154,18 @@ public class TestFileOutputCommitter extends TestCase {
validateContent(outDir);
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testRecoveryV1() throws Exception {
testRecoveryInternal(1, 1);
}
@Test
public void testRecoveryV2() throws Exception {
testRecoveryInternal(2, 2);
}
@Test
public void testRecoveryUpgradeV1V2() throws Exception {
testRecoveryInternal(1, 2);
}
@ -203,11 +208,13 @@ public class TestFileOutputCommitter extends TestCase {
assert(dataFileFound && indexFileFound);
}
@Test
public void testCommitterWithFailureV1() throws Exception {
testCommitterWithFailureInternal(1, 1);
testCommitterWithFailureInternal(1, 2);
}
@Test
public void testCommitterWithFailureV2() throws Exception {
testCommitterWithFailureInternal(2, 1);
testCommitterWithFailureInternal(2, 2);
@ -256,10 +263,12 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testCommitterWithDuplicatedCommitV1() throws Exception {
testCommitterWithDuplicatedCommitInternal(1);
}
@Test
public void testCommitterWithDuplicatedCommitV2() throws Exception {
testCommitterWithDuplicatedCommitInternal(2);
}
@ -340,10 +349,12 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testCommitterV1() throws Exception {
testCommitterInternal(1);
}
@Test
public void testCommitterV2() throws Exception {
testCommitterInternal(2);
}
@ -380,18 +391,22 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testMapFileOutputCommitterV1() throws Exception {
testMapFileOutputCommitterInternal(1);
}
@Test
public void testMapFileOutputCommitterV2() throws Exception {
testMapFileOutputCommitterInternal(2);
}
@Test
public void testMapOnlyNoOutputV1() throws Exception {
testMapOnlyNoOutputInternal(1);
}
@Test
public void testMapOnlyNoOutputV2() throws Exception {
testMapOnlyNoOutputInternal(2);
}
@ -456,10 +471,12 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(out);
}
@Test
public void testAbortV1() throws Exception {
testAbortInternal(1);
}
@Test
public void testAbortV2() throws Exception {
testAbortInternal(2);
}
@ -537,10 +554,12 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testFailAbortV1() throws Exception {
testFailAbortInternal(1);
}
@Test
public void testFailAbortV2() throws Exception {
testFailAbortInternal(2);
}

View File

@ -32,14 +32,16 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestIndexCache extends TestCase {
public class TestIndexCache {
private JobConf conf;
private FileSystem fs;
private Path p;
@Override
@Before
public void setUp() throws IOException {
conf = new JobConf();
fs = FileSystem.getLocal(conf).getRaw();
@ -47,6 +49,7 @@ public class TestIndexCache extends TestCase {
"cache").makeQualified(fs.getUri(), fs.getWorkingDirectory());
}
@Test
public void testLRCPolicy() throws Exception {
Random r = new Random();
long seed = r.nextLong();
@ -120,6 +123,7 @@ public class TestIndexCache extends TestCase {
checkRecord(rec, totalsize);
}
@Test
public void testBadIndex() throws Exception {
final int parts = 30;
fs.delete(p, true);
@ -152,6 +156,7 @@ public class TestIndexCache extends TestCase {
}
}
@Test
public void testInvalidReduceNumberOrLength() throws Exception {
fs.delete(p, true);
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
@ -192,6 +197,7 @@ public class TestIndexCache extends TestCase {
}
}
@Test
public void testRemoveMap() throws Exception {
// This test case use two thread to call getIndexInformation and
// removeMap concurrently, in order to construct race condition.
@ -242,6 +248,7 @@ public class TestIndexCache extends TestCase {
}
}
@Test
public void testCreateRace() throws Exception {
fs.delete(p, true);
conf.setInt(TTConfig.TT_INDEX_CACHE, 1);

View File

@ -31,12 +31,15 @@ import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import junit.framework.TestCase;
import org.junit.After;
import org.junit.Before;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpServer2;
import org.junit.Test;
public class TestJobEndNotifier extends TestCase {
public class TestJobEndNotifier {
HttpServer2 server;
URL baseUrl;
@ -99,6 +102,7 @@ public class TestJobEndNotifier extends TestCase {
}
}
@Before
public void setUp() throws Exception {
new File(System.getProperty("build.webapps", "build/webapps") + "/test"
).mkdirs();
@ -118,6 +122,7 @@ public class TestJobEndNotifier extends TestCase {
FailServlet.calledTimes = 0;
}
@After
public void tearDown() throws Exception {
server.stop();
}
@ -125,6 +130,7 @@ public class TestJobEndNotifier extends TestCase {
/**
* Basic validation for localRunnerNotification.
*/
@Test
public void testLocalJobRunnerUriSubstitution() throws InterruptedException {
JobStatus jobStatus = createTestJobStatus(
"job_20130313155005308_0001", JobStatus.SUCCEEDED);
@ -145,6 +151,7 @@ public class TestJobEndNotifier extends TestCase {
/**
* Validate job.end.retry.attempts for the localJobRunner.
*/
@Test
public void testLocalJobRunnerRetryCount() throws InterruptedException {
int retryAttempts = 3;
JobStatus jobStatus = createTestJobStatus(
@ -161,6 +168,7 @@ public class TestJobEndNotifier extends TestCase {
* Validate that the notification times out after reaching
* mapreduce.job.end-notification.timeout.
*/
@Test
public void testNotificationTimeout() throws InterruptedException {
Configuration conf = new Configuration();
// Reduce the timeout to 1 second

View File

@ -33,7 +33,9 @@ import java.io.IOException;
import java.io.LineNumberReader;
import java.io.StringReader;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.TaskReport;
@ -43,8 +45,6 @@ import org.apache.log4j.Layout;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.WriterAppender;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@ -53,7 +53,7 @@ import org.mockito.stubbing.Answer;
* job monitoring is correct and prints 100% for map and reduce before
* successful completion.
*/
public class TestJobMonitorAndPrint extends TestCase {
public class TestJobMonitorAndPrint {
private Job job;
private Configuration conf;
private ClientProtocol clientProtocol;

View File

@ -27,7 +27,10 @@ import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import junit.framework.TestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.junit.Assert;
@ -55,7 +58,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
@SuppressWarnings("unchecked")
public class TestFileOutputCommitter extends TestCase {
public class TestFileOutputCommitter {
private static final Path outDir = new Path(
System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir")),
@ -87,12 +90,12 @@ public class TestFileOutputCommitter extends TestCase {
fs.delete(outDir, true);
}
@Override
@Before
public void setUp() throws IOException {
cleanup();
}
@Override
@After
public void tearDown() throws IOException {
cleanup();
}
@ -195,14 +198,17 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testRecoveryV1() throws Exception {
testRecoveryInternal(1, 1);
}
@Test
public void testRecoveryV2() throws Exception {
testRecoveryInternal(2, 2);
}
@Test
public void testRecoveryUpgradeV1V2() throws Exception {
testRecoveryInternal(1, 2);
}
@ -278,18 +284,22 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testCommitterV1() throws Exception {
testCommitterInternal(1);
}
@Test
public void testCommitterV2() throws Exception {
testCommitterInternal(2);
}
@Test
public void testCommitterWithDuplicatedCommitV1() throws Exception {
testCommitterWithDuplicatedCommitInternal(1);
}
@Test
public void testCommitterWithDuplicatedCommitV2() throws Exception {
testCommitterWithDuplicatedCommitInternal(2);
}
@ -336,11 +346,13 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testCommitterWithFailureV1() throws Exception {
testCommitterWithFailureInternal(1, 1);
testCommitterWithFailureInternal(1, 2);
}
@Test
public void testCommitterWithFailureV2() throws Exception {
testCommitterWithFailureInternal(2, 1);
testCommitterWithFailureInternal(2, 2);
@ -390,10 +402,12 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testCommitterRepeatableV1() throws Exception {
testCommitterRetryInternal(1);
}
@Test
public void testCommitterRepeatableV2() throws Exception {
testCommitterRetryInternal(2);
}
@ -493,14 +507,17 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testMapFileOutputCommitterV1() throws Exception {
testMapFileOutputCommitterInternal(1);
}
@Test
public void testMapFileOutputCommitterV2() throws Exception {
testMapFileOutputCommitterInternal(2);
}
@Test
public void testInvalidVersionNumber() throws IOException {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
@ -552,10 +569,12 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testAbortV1() throws IOException, InterruptedException {
testAbortInternal(1);
}
@Test
public void testAbortV2() throws IOException, InterruptedException {
testAbortInternal(2);
}
@ -631,10 +650,12 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testFailAbortV1() throws Exception {
testFailAbortInternal(1);
}
@Test
public void testFailAbortV2() throws Exception {
testFailAbortInternal(2);
}
@ -732,10 +753,12 @@ public class TestFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testConcurrentCommitTaskWithSubDirV1() throws Exception {
testConcurrentCommitTaskWithSubDir(1);
}
@Test
public void testConcurrentCommitTaskWithSubDirV2() throws Exception {
testConcurrentCommitTaskWithSubDir(2);
}

View File

@ -19,7 +19,8 @@
package org.apache.hadoop.mapreduce.lib.output;
import java.io.IOException;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -28,8 +29,9 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
public class TestFileOutputFormat extends TestCase {
public class TestFileOutputFormat {
@Test
public void testSetOutputPathException() throws Exception {
Job job = Job.getInstance();
try {
@ -42,6 +44,7 @@ public class TestFileOutputFormat extends TestCase {
}
}
@Test
public void testCheckOutputSpecsException() throws Exception {
Job job = Job.getInstance();
Path outDir = new Path(System.getProperty("test.build.data", "/tmp"),

View File

@ -38,7 +38,7 @@ import org.junit.Test;
* This class performs unit test for Job/JobControl classes.
*
*/
public class TestJobControl extends junit.framework.TestCase {
public class TestJobControl {
/**
* This is a main function for testing JobControl class.
@ -263,13 +263,13 @@ public class TestJobControl extends junit.framework.TestCase {
JobConf jc = new JobConf();
Job j = new Job(jc);
//Just make sure no exception is thrown
assertNull(j.getAssignedJobID());
Assert.assertNull(j.getAssignedJobID());
org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class);
org.apache.hadoop.mapreduce.JobID jid = new org.apache.hadoop.mapreduce.JobID("test",0);
when(mockjob.getJobID()).thenReturn(jid);
j.setJob(mockjob);
JobID expected = new JobID("test",0);
assertEquals(expected, j.getAssignedJobID());
Assert.assertEquals(expected, j.getAssignedJobID());
verify(mockjob).getJobID();
}

View File

@ -17,9 +17,6 @@
*/
package org.apache.hadoop.mapreduce;
import junit.framework.TestCase;
import java.io.IOException;
import java.io.DataInput;
import java.io.DataOutput;
@ -27,9 +24,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;

View File

@ -20,8 +20,8 @@ package org.apache.hadoop.mapreduce.lib.input;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.List;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -30,9 +30,10 @@ import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
public class TestDelegatingInputFormat extends TestCase {
public class TestDelegatingInputFormat {
@SuppressWarnings("unchecked")
@Test
public void testSplitting() throws Exception {
Job job = Job.getInstance();
MiniDFSCluster dfs = null;

View File

@ -95,7 +95,7 @@ public class TestMapReduceJobControl extends HadoopTestCase {
cjob2 = new ControlledJob(job2, dependingJobs);
Job job3 = MapReduceTestUtil.createCopyJob(conf, outdir_3,
outdir_1, outdir_2);
outdir_1, outdir_2);
dependingJobs = new ArrayList<ControlledJob>();
dependingJobs.add(cjob1);
dependingJobs.add(cjob2);

View File

@ -21,7 +21,10 @@ package org.apache.hadoop.mapreduce.lib.output;
import java.io.*;
import java.net.URI;
import junit.framework.TestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
@ -38,7 +41,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
public class TestMRCJCFileOutputCommitter extends TestCase {
public class TestMRCJCFileOutputCommitter {
private static Path outDir = new Path(System.getProperty("test.build.data",
"/tmp"), "output");
@ -76,17 +79,18 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
fs.delete(outDir, true);
}
@Override
@Before
public void setUp() throws IOException {
cleanup();
}
@Override
@After
public void tearDown() throws IOException {
cleanup();
}
@SuppressWarnings("unchecked")
@Test
public void testCommitter() throws Exception {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
@ -123,6 +127,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
FileUtil.fullyDelete(new File(outDir.toString()));
}
@Test
public void testEmptyOutput() throws Exception {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
@ -146,6 +151,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
}
@SuppressWarnings("unchecked")
@Test
public void testAbort() throws IOException, InterruptedException {
Job job = Job.getInstance();
FileOutputFormat.setOutputPath(job, outDir);
@ -195,6 +201,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
}
@SuppressWarnings("unchecked")
@Test
public void testFailAbort() throws IOException, InterruptedException {
Job job = Job.getInstance();
Configuration conf = job.getConfiguration();

View File

@ -22,23 +22,30 @@ import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import junit.framework.TestCase;
import org.junit.Test;
import org.junit.Assert;
public class TestTaskContext extends TestCase {
public class TestTaskContext {
@Test
public void testTaskContext() {
TaskContext context = new TaskContext(null, null, null, null, null, null, null);
TaskContext context = new TaskContext(null, null, null, null, null, null,
null);
context.setInputKeyClass(IntWritable.class);
assertEquals(IntWritable.class.getName(), context.getInputKeyClass().getName());
Assert.assertEquals(IntWritable.class.getName(), context.getInputKeyClass
().getName());
context.setInputValueClass(Text.class);
assertEquals(Text.class.getName(), context.getInputValueClass().getName());
Assert.assertEquals(Text.class.getName(), context.getInputValueClass()
.getName());
context.setOutputKeyClass(LongWritable.class);
assertEquals(LongWritable.class.getName(), context.getOutputKeyClass().getName());
Assert.assertEquals(LongWritable.class.getName(), context
.getOutputKeyClass().getName());
context.setOutputValueClass(FloatWritable.class);
assertEquals(FloatWritable.class.getName(), context.getOutputValueClass().getName());
Assert.assertEquals(FloatWritable.class.getName(), context
.getOutputValueClass().getName());
}
}

View File

@ -19,11 +19,12 @@ package org.apache.hadoop.mapred.nativetask.buffer;
import java.io.IOException;
import junit.framework.TestCase;
import org.junit.Test;
import org.junit.Assert;
public class TestInputBuffer extends TestCase {
public class TestInputBuffer {
@Test
public void testInputBuffer() throws IOException {
final int size = 100;
final InputBuffer input1 = new InputBuffer(BufferType.DIRECT_BUFFER, size);

View File

@ -17,11 +17,12 @@
*/
package org.apache.hadoop.mapred.nativetask.buffer;
import junit.framework.TestCase;
import org.junit.Test;
import org.junit.Assert;
public class TestOutputBuffer extends TestCase {
public class TestOutputBuffer {
@Test
public void testOutputBuffer() {
final int size = 100;
final OutputBuffer output1 = new OutputBuffer(BufferType.DIRECT_BUFFER, size);

View File

@ -20,7 +20,8 @@ package org.apache.hadoop.mapred.nativetask.serde;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.nativetask.Constants;
@ -30,12 +31,11 @@ import org.apache.hadoop.mapred.nativetask.testutil.TestInput;
import org.apache.hadoop.mapred.nativetask.testutil.TestInput.KV;
import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
import org.junit.Assert;
import org.junit.Before;
import org.mockito.Matchers;
import org.mockito.Mockito;
@SuppressWarnings({ "rawtypes", "unchecked" })
public class TestKVSerializer extends TestCase {
public class TestKVSerializer {
int inputArraySize = 1000; // 1000 bytesWriable elements
int bufferSize = 100; // bytes
@ -46,7 +46,6 @@ public class TestKVSerializer extends TestCase {
private SizedWritable value;
private KVSerializer serializer;
@Override
@Before
public void setUp() throws IOException {
this.inputArray = TestInput.getMapInputs(inputArraySize);
@ -60,6 +59,7 @@ public class TestKVSerializer extends TestCase {
serializer.updateLength(key, value);
}
@Test
public void testUpdateLength() throws IOException {
Mockito.mock(DataOutputStream.class);
@ -75,6 +75,7 @@ public class TestKVSerializer extends TestCase {
}
}
@Test
public void testSerializeKV() throws IOException {
final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class);
@ -92,6 +93,7 @@ public class TestKVSerializer extends TestCase {
Assert.assertEquals(written, key.length + value.length + Constants.SIZEOF_KV_LENGTH);
}
@Test
public void testSerializeNoFlush() throws IOException {
final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class);
@ -109,6 +111,7 @@ public class TestKVSerializer extends TestCase {
Assert.assertEquals(written, key.length + value.length + Constants.SIZEOF_KV_LENGTH);
}
@Test
public void testSerializePartitionKV() throws IOException {
final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class);
@ -130,12 +133,14 @@ public class TestKVSerializer extends TestCase {
+ Constants.SIZEOF_PARTITION_LENGTH);
}
@Test
public void testDeserializerNoData() throws IOException {
final DataInputStream in = Mockito.mock(DataInputStream.class);
Mockito.when(in.hasUnReadData()).thenReturn(false);
Assert.assertEquals(0, serializer.deserializeKV(in, key, value));
}
@Test
public void testDeserializer() throws IOException {
final DataInputStream in = Mockito.mock(DataInputStream.class);
Mockito.when(in.hasUnReadData()).thenReturn(true);

View File

@ -17,15 +17,16 @@
*/
package org.apache.hadoop.mapred.nativetask.utils;
import junit.framework.TestCase;
import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
import org.junit.Test;
import org.junit.Assert;
public class TestReadWriteBuffer extends TestCase {
import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
public class TestReadWriteBuffer {
private static byte[] bytes = new byte[] { '0', 'a', 'b', 'c', 'd', '9' };
@Test
public void testReadWriteBuffer() {
final ReadWriteBuffer buffer = new ReadWriteBuffer();

View File

@ -17,15 +17,16 @@
*/
package org.apache.hadoop.mapred.nativetask.utils;
import junit.framework.TestCase;
import org.junit.Test;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
import org.junit.Assert;
@SuppressWarnings({ "rawtypes", "unchecked" })
public class TestSizedWritable extends TestCase {
public class TestSizedWritable {
@Test
public void testSizedWritable() {
final SizedWritable w = new SizedWritable(BytesWritable.class);
Assert.assertTrue(w.length == SizedWritable.INVALID_LENGTH);

View File

@ -18,29 +18,35 @@
package org.apache.hadoop.examples;
import java.math.BigInteger;
import org.junit.Test;
import org.junit.Assert;
/** Tests for BaileyBorweinPlouffe */
public class TestBaileyBorweinPlouffe extends junit.framework.TestCase {
public class TestBaileyBorweinPlouffe {
@Test
public void testMod() {
final BigInteger TWO = BigInteger.ONE.add(BigInteger.ONE);
for(long n = 3; n < 100; n++) {
for (long e = 1; e < 100; e++) {
final long r = TWO.modPow(
BigInteger.valueOf(e), BigInteger.valueOf(n)).longValue();
assertEquals("e=" + e + ", n=" + n, r, BaileyBorweinPlouffe.mod(e, n));
Assert.assertEquals("e=" + e + ", n=" + n, r, BaileyBorweinPlouffe
.mod(e, n));
}
}
}
@Test
public void testHexDigit() {
final long[] answers = {0x43F6, 0xA308, 0x29B7, 0x49F1, 0x8AC8, 0x35EA};
long d = 1;
for(int i = 0; i < answers.length; i++) {
assertEquals("d=" + d, answers[i], BaileyBorweinPlouffe.hexDigits(d));
Assert.assertEquals("d=" + d, answers[i], BaileyBorweinPlouffe
.hexDigits(d));
d *= 10;
}
assertEquals(0x243FL, BaileyBorweinPlouffe.hexDigits(0));
Assert.assertEquals(0x243FL, BaileyBorweinPlouffe.hexDigits(0));
}
}

View File

@ -19,8 +19,11 @@ package org.apache.hadoop.examples.pi.math;
import java.math.BigInteger;
import java.util.Random;
import org.junit.Test;
import org.junit.Assert;
public class TestLongLong {
public class TestLongLong extends junit.framework.TestCase {
static final Random RAN = new Random();
static final long MASK = (1L << (LongLong.SIZE >> 1)) - 1;
@ -32,11 +35,14 @@ public class TestLongLong extends junit.framework.TestCase {
final LongLong ll = LongLong.multiplication(new LongLong(), a, b);
final BigInteger bi = BigInteger.valueOf(a).multiply(BigInteger.valueOf(b));
final String s = String.format("\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a, b);
final String s = String.format(
"\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a,
b);
//System.out.println(s);
assertEquals(s, bi, ll.toBigInteger());
Assert.assertEquals(s, bi, ll.toBigInteger());
}
@Test
public void testMultiplication() {
for(int i = 0; i < 100; i++) {
final long a = nextPositiveLong();
@ -51,18 +57,23 @@ public class TestLongLong extends junit.framework.TestCase {
final LongLong ll = new LongLong().set(a, b);
final BigInteger bi = ll.toBigInteger();
for(int i = 0; i < LongLong.SIZE >> 1; i++) {
for (int i = 0; i < LongLong.SIZE >> 1; i++) {
final long result = ll.shiftRight(i) & MASK;
final long expected = bi.shiftRight(i).longValue() & MASK;
final String s = String.format("\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a, b);
assertEquals(s, expected, result);
final String s = String.format(
"\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a,
b);
Assert.assertEquals(s, expected, result);
}
final String s = String.format("\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a, b);
final String s = String.format(
"\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a,
b);
//System.out.println(s);
assertEquals(s, bi, ll.toBigInteger());
Assert.assertEquals(s, bi, ll.toBigInteger());
}
@Test
public void testRightShift() {
for(int i = 0; i < 1000; i++) {
final long a = nextPositiveLong();

View File

@ -21,8 +21,10 @@ import java.math.BigInteger;
import java.util.Random;
import org.apache.hadoop.examples.pi.Util.Timer;
import org.junit.Assert;
import org.junit.Test;
public class TestModular extends junit.framework.TestCase {
public class TestModular{
private static final Random RANDOM = new Random();
private static final BigInteger TWO = BigInteger.valueOf(2);
@ -49,13 +51,14 @@ public class TestModular extends junit.framework.TestCase {
return sum < DIV_LIMIT? sum: sum - DIV_LIMIT;
}
@Test
public void testDiv() {
for(long n = 2; n < 100; n++)
for(long r = 1; r < n; r++) {
final long a = div(0, r, n);
final long b = (long)((r*1.0/n) * (1L << DIV_VALID_BIT));
final String s = String.format("r=%d, n=%d, a=%X, b=%X", r, n, a, b);
assertEquals(s, b, a);
Assert.assertEquals(s, b, a);
}
}
@ -147,8 +150,11 @@ public class TestModular extends junit.framework.TestCase {
final long r = rn[i][j][0];
final long answer = rn[i][j][1];
final long s = square_slow(r, n);
if (s != answer)
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
if (s != answer) {
Assert.assertEquals(
"r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
answer, s);
}
}
}
t.tick("square_slow");
@ -161,8 +167,11 @@ public class TestModular extends junit.framework.TestCase {
final long r = rn[i][j][0];
final long answer = rn[i][j][1];
final long s = square(r, n, r2p64);
if (s != answer)
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
if (s != answer) {
Assert.assertEquals(
"r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
answer, s);
}
}
}
t.tick("square");
@ -175,8 +184,11 @@ public class TestModular extends junit.framework.TestCase {
final long answer = rn[i][j][1];
final BigInteger R = BigInteger.valueOf(r);
final long s = R.multiply(R).mod(N).longValue();
if (s != answer)
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
if (s != answer) {
Assert.assertEquals(
"r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
answer, s);
}
}
}
t.tick("R.multiply(R).mod(N)");
@ -189,8 +201,11 @@ public class TestModular extends junit.framework.TestCase {
final long answer = rn[i][j][1];
final BigInteger R = BigInteger.valueOf(r);
final long s = R.modPow(TWO, N).longValue();
if (s != answer)
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
if (s != answer) {
Assert.assertEquals(
"r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
answer, s);
}
}
}
t.tick("R.modPow(TWO, N)");
@ -283,8 +298,11 @@ public class TestModular extends junit.framework.TestCase {
final long e = en[i][j][0];
final long answer = en[i][j][1];
final long s = Modular.mod(e, n);
if (s != answer)
assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
if (s != answer) {
Assert.assertEquals(
"e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
answer, s);
}
}
}
t.tick("Modular.mod");
@ -297,8 +315,11 @@ public class TestModular extends junit.framework.TestCase {
final long e = en[i][j][0];
final long answer = en[i][j][1];
final long s = m2.mod(e);
if (s != answer)
assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
if (s != answer) {
Assert.assertEquals(
"e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
answer, s);
}
}
}
t.tick("montgomery.mod");
@ -310,8 +331,11 @@ public class TestModular extends junit.framework.TestCase {
final long e = en[i][j][0];
final long answer = en[i][j][1];
final long s = m2.mod2(e);
if (s != answer)
assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
if (s != answer) {
Assert.assertEquals(
"e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
answer, s);
}
}
}
t.tick("montgomery.mod2");
@ -323,8 +347,11 @@ public class TestModular extends junit.framework.TestCase {
final long e = en[i][j][0];
final long answer = en[i][j][1];
final long s = TWO.modPow(BigInteger.valueOf(e), N).longValue();
if (s != answer)
assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
if (s != answer) {
Assert.assertEquals(
"e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
answer, s);
}
}
}
t.tick("BigInteger.modPow(e, n)");

View File

@ -28,14 +28,19 @@ import org.apache.hadoop.examples.pi.Container;
import org.apache.hadoop.examples.pi.Util;
import org.apache.hadoop.examples.pi.Util.Timer;
import org.apache.hadoop.examples.pi.math.TestModular.Montgomery2;
import org.junit.Test;
import org.junit.Assert;
public class TestSummation extends junit.framework.TestCase {
public class TestSummation {
static final Random RANDOM = new Random();
static final BigInteger TWO = BigInteger.valueOf(2);
private static final double DOUBLE_DELTA = 0.000000001f;
private static Summation2 newSummation(final long base, final long range, final long delta) {
final ArithmeticProgression N = new ArithmeticProgression('n', base+3, delta, base+3+range);
final ArithmeticProgression E = new ArithmeticProgression('e', base+range, -delta, base);
final ArithmeticProgression N = new ArithmeticProgression('n', base + 3,
delta, base + 3 + range);
final ArithmeticProgression E = new ArithmeticProgression('e', base + range,
-delta, base);
return new Summation2(N, E);
}
@ -53,10 +58,11 @@ public class TestSummation extends junit.framework.TestCase {
final List<Summation> combined = Util.combine(a);
// Util.out.println("combined=" + combined);
assertEquals(1, combined.size());
assertEquals(sigma, combined.get(0));
Assert.assertEquals(1, combined.size());
Assert.assertEquals(sigma, combined.get(0));
}
@Test
public void testSubtract() {
final Summation sigma = newSummation(3, 10000, 20);
final int size = 10;
@ -112,7 +118,9 @@ public class TestSummation extends junit.framework.TestCase {
long n = N.value;
double s = 0;
for(; e > E.limit; e += E.delta) {
s = Modular.addMod(s, TWO.modPow(BigInteger.valueOf(e), BigInteger.valueOf(n)).doubleValue()/n);
s = Modular.addMod(s,
TWO.modPow(BigInteger.valueOf(e), BigInteger.valueOf(n))
.doubleValue() / n);
n += N.delta;
}
return s;
@ -124,16 +132,16 @@ public class TestSummation extends junit.framework.TestCase {
t.tick("sigma=" + sigma);
final double value = sigma.compute();
t.tick("compute=" + value);
assertEquals(value, sigma.compute_modular());
Assert.assertEquals(value, sigma.compute_modular(), DOUBLE_DELTA);
t.tick("compute_modular");
assertEquals(value, sigma.compute_montgomery());
Assert.assertEquals(value, sigma.compute_montgomery(), DOUBLE_DELTA);
t.tick("compute_montgomery");
assertEquals(value, sigma.compute_montgomery2());
Assert.assertEquals(value, sigma.compute_montgomery2(), DOUBLE_DELTA);
t.tick("compute_montgomery2");
assertEquals(value, sigma.compute_modBigInteger());
Assert.assertEquals(value, sigma.compute_modBigInteger(), DOUBLE_DELTA);
t.tick("compute_modBigInteger");
assertEquals(value, sigma.compute_modPow());
Assert.assertEquals(value, sigma.compute_modPow(), DOUBLE_DELTA);
t.tick("compute_modPow");
}

View File

@ -20,10 +20,10 @@ package org.apache.hadoop.contrib.utils.join;
import java.io.IOException;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import junit.extensions.TestSetup;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
@ -36,24 +36,27 @@ import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
public class TestDataJoin extends TestCase {
/**
* Class to test JOIN between 2 data
* sources.
*/
public class TestDataJoin {
private static MiniDFSCluster cluster = null;
public static Test suite() {
TestSetup setup = new TestSetup(new TestSuite(TestDataJoin.class)) {
protected void setUp() throws Exception {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
}
protected void tearDown() throws Exception {
if (cluster != null) {
cluster.shutdown();
}
}
};
return setup;
@Before
public void setUp() throws Exception {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
}
@After
public void tearDown() throws Exception {
if (cluster != null) {
cluster.shutdown();
}
}
@Test
public void testDataJoin() throws Exception {
final int srcs = 4;
JobConf job = new JobConf();

View File

@ -38,11 +38,13 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.MiniMRClientClusterFactory;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.event.Level;
import static org.slf4j.LoggerFactory.getLogger;
public class TestDistCh extends junit.framework.TestCase {
public class TestDistCh {
{
GenericTestUtils.setLogLevel(
getLogger("org.apache.hadoop.hdfs.StateChange"), Level.ERROR);
@ -75,20 +77,20 @@ public class TestDistCh extends junit.framework.TestCase {
Path createSmallFile(Path dir) throws IOException {
final Path f = new Path(dir, "f" + ++fcount);
assertTrue(!fs.exists(f));
Assert.assertTrue(!fs.exists(f));
final DataOutputStream out = fs.create(f);
try {
out.writeBytes("createSmallFile: f=" + f);
} finally {
out.close();
}
assertTrue(fs.exists(f));
Assert.assertTrue(fs.exists(f));
return f;
}
Path mkdir(Path dir) throws IOException {
assertTrue(fs.mkdirs(dir));
assertTrue(fs.getFileStatus(dir).isDirectory());
Assert.assertTrue(fs.mkdirs(dir));
Assert.assertTrue(fs.getFileStatus(dir).isDirectory());
return dir;
}
@ -128,6 +130,7 @@ public class TestDistCh extends junit.framework.TestCase {
}
}
@Test
public void testDistCh() throws Exception {
final Configuration conf = new Configuration();
@ -190,13 +193,13 @@ public class TestDistCh extends junit.framework.TestCase {
}
static void checkFileStatus(ChPermissionStatus expected, FileStatus actual) {
assertEquals(expected.getUserName(), actual.getOwner());
assertEquals(expected.getGroupName(), actual.getGroup());
Assert.assertEquals(expected.getUserName(), actual.getOwner());
Assert.assertEquals(expected.getGroupName(), actual.getGroup());
FsPermission perm = expected.getPermission();
if (actual.isFile() && expected.defaultPerm) {
perm = perm.applyUMask(UMASK);
}
assertEquals(perm, actual.getPermission());
Assert.assertEquals(perm, actual.getPermission());
}
private static String runLsr(final FsShell shell, String root, int returnvalue
@ -210,7 +213,7 @@ public class TestDistCh extends junit.framework.TestCase {
System.setErr(out);
final String results;
try {
assertEquals(returnvalue, shell.run(new String[]{"-lsr", root}));
Assert.assertEquals(returnvalue, shell.run(new String[]{"-lsr", root}));
results = bytes.toString();
} finally {
IOUtils.closeStream(out);

View File

@ -26,10 +26,12 @@ import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestTypedBytesWritable extends TestCase {
public class TestTypedBytesWritable {
@Test
public void testToString() {
TypedBytesWritable tbw = new TypedBytesWritable();
tbw.setValue(true);
@ -46,6 +48,7 @@ public class TestTypedBytesWritable extends TestCase {
assertEquals("random text", tbw.toString());
}
@Test
public void testIO() throws IOException {
TypedBytesWritable tbw = new TypedBytesWritable();
tbw.setValue(12345);

View File

@ -22,20 +22,23 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.FilterContainer;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import org.junit.Test;
public class TestAmFilterInitializer extends TestCase {
/**
* Test class for {@Link AmFilterInitializer}.
*/
public class TestAmFilterInitializer {
@Override
protected void setUp() throws Exception {
super.setUp();
@Before
public void setUp() throws Exception {
NetUtils.addStaticResolution("host1", "172.0.0.1");
NetUtils.addStaticResolution("host2", "172.0.0.1");
NetUtils.addStaticResolution("host3", "172.0.0.1");