diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 94ad08a2ef3..4a151d98226 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -959,6 +959,9 @@ Release 2.8.0 - UNRELEASED HADOOP-12568. Update core-default.xml to describe posixGroups support. (Wei-Chiu Chuang via aajisaka) + HADOOP-12564. Upgrade JUnit3 TestCase to JUnit 4 in + org.apache.hadoop.io package. (Dustin Cote via ozawa) + OPTIMIZATIONS HADOOP-11785. Reduce the number of listStatus operation in distcp diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java index 74e9cc86bd3..ec76ea00807 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java @@ -28,7 +28,7 @@ import org.apache.avro.reflect.ReflectDatumWriter; import org.apache.avro.reflect.ReflectDatumReader; import org.apache.avro.io.DecoderFactory; -import static junit.framework.TestCase.assertEquals; +import static org.junit.Assert.assertEquals; public class AvroTestUtil { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java index ecf1db0501f..8c8da4a1eb5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java @@ -20,16 +20,22 @@ package org.apache.hadoop.io; import java.io.*; -import junit.framework.TestCase; import org.apache.commons.logging.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.conf.*; +import org.junit.Test; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.fail; /** Support for flat files of binary key/value pairs. */ -public class TestArrayFile extends TestCase { +public class TestArrayFile { private static final Log LOG = LogFactory.getLog(TestArrayFile.class); private static final Path TEST_DIR = new Path( @@ -37,10 +43,7 @@ public class TestArrayFile extends TestCase { TestMapFile.class.getSimpleName()); private static String TEST_FILE = new Path(TEST_DIR, "test.array").toString(); - public TestArrayFile(String name) { - super(name); - } - + @Test public void testArrayFile() throws Exception { Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); @@ -49,6 +52,7 @@ public class TestArrayFile extends TestCase { readTest(fs, data, TEST_FILE, conf); } + @Test public void testEmptyFile() throws Exception { Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); @@ -119,6 +123,7 @@ public class TestArrayFile extends TestCase { * {@code next(), seek()} in and out of range. * */ + @Test public void testArrayFileIteration() { int SIZE = 10; Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayPrimitiveWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayPrimitiveWritable.java index 6367b8bba4c..b75d1654511 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayPrimitiveWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayPrimitiveWritable.java @@ -22,13 +22,15 @@ import java.io.*; import java.util.Arrays; import org.apache.hadoop.util.StringUtils; -import org.junit.*; +import org.junit.Test; +import org.junit.Before; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; -import junit.framework.TestCase; /** Unit tests for {@link ArrayPrimitiveWritable} */ -public class TestArrayPrimitiveWritable extends TestCase { - +public class TestArrayPrimitiveWritable { static final boolean[] b = {true, true, false}; static final char[] c = {'a', 'b', 'c'}; static final byte[] by = {1, 2, 3}; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java index a2008db805a..a75a7bb6fb6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java @@ -20,27 +20,26 @@ package org.apache.hadoop.io; import java.io.*; -import org.junit.Assert; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertArrayEquals; +import org.junit.Test; -import junit.framework.TestCase; /** Unit tests for ArrayWritable */ -public class TestArrayWritable extends TestCase { - +public class TestArrayWritable { static class TextArrayWritable extends ArrayWritable { public TextArrayWritable() { super(Text.class); } } - public TestArrayWritable(String name) { - super(name); - } - /** * If valueClass is undefined, readFields should throw an exception indicating * that the field is null. Otherwise, readFields should succeed. */ + @Test public void testThrowUndefinedValueException() throws IOException { // Get a buffer containing a simple text array Text[] elements = {new Text("zero"), new Text("one"), new Text("two")}; @@ -67,6 +66,7 @@ public class TestArrayWritable extends TestCase { /** * test {@link ArrayWritable} toArray() method */ + @Test public void testArrayWritableToArray() { Text[] elements = {new Text("zero"), new Text("one"), new Text("two")}; TextArrayWritable arrayWritable = new TextArrayWritable(); @@ -84,6 +84,7 @@ public class TestArrayWritable extends TestCase { /** * test {@link ArrayWritable} constructor with null */ + @Test public void testNullArgument() { try { Class valueClass = null; @@ -100,12 +101,13 @@ public class TestArrayWritable extends TestCase { * test {@link ArrayWritable} constructor with {@code String[]} as a parameter */ @SuppressWarnings("deprecation") + @Test public void testArrayWritableStringConstructor() { String[] original = { "test1", "test2", "test3" }; ArrayWritable arrayWritable = new ArrayWritable(original); assertEquals("testArrayWritableStringConstructor class error!!!", UTF8.class, arrayWritable.getValueClass()); - Assert.assertArrayEquals("testArrayWritableStringConstructor toString error!!!", + assertArrayEquals("testArrayWritableStringConstructor toString error!!!", original, arrayWritable.toStrings()); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java index b1ea000e9e4..55a91884e19 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java @@ -27,8 +27,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; @@ -41,9 +39,15 @@ import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.util.Progressable; -import org.junit.Assert; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import org.junit.Before; +import org.junit.Test; -public class TestBloomMapFile extends TestCase { +public class TestBloomMapFile { private static Configuration conf = new Configuration(); private static final Path TEST_ROOT = new Path( System.getProperty("test.build.data", "/tmp"), @@ -51,16 +55,17 @@ public class TestBloomMapFile extends TestCase { private static final Path TEST_DIR = new Path(TEST_ROOT, "testfile"); private static final Path TEST_FILE = new Path(TEST_ROOT, "testfile"); - @Override + @Before public void setUp() throws Exception { LocalFileSystem fs = FileSystem.getLocal(conf); if (fs.exists(TEST_ROOT) && !fs.delete(TEST_ROOT, true)) { - Assert.fail("Can't clean up test root dir"); + fail("Can't clean up test root dir"); } fs.mkdirs(TEST_ROOT); } @SuppressWarnings("deprecation") + @Test public void testMembershipTest() throws Exception { // write the file FileSystem fs = FileSystem.getLocal(conf); @@ -107,7 +112,7 @@ public class TestBloomMapFile extends TestCase { } @SuppressWarnings("deprecation") - private void checkMembershipVaryingSizedKeys(String name, List keys) + private void checkMembershipVaryingSizedKeys(List keys) throws Exception { FileSystem fs = FileSystem.getLocal(conf); Path qualifiedDirName = fs.makeQualified(TEST_DIR); @@ -135,23 +140,26 @@ public class TestBloomMapFile extends TestCase { } } + @Test public void testMembershipVaryingSizedKeysTest1() throws Exception { ArrayList list = new ArrayList(); list.add(new Text("A")); list.add(new Text("BB")); - checkMembershipVaryingSizedKeys(getName(), list); + checkMembershipVaryingSizedKeys(list); } + @Test public void testMembershipVaryingSizedKeysTest2() throws Exception { ArrayList list = new ArrayList(); list.add(new Text("AA")); list.add(new Text("B")); - checkMembershipVaryingSizedKeys(getName(), list); + checkMembershipVaryingSizedKeys(list); } /** * test {@code BloomMapFile.delete()} method */ + @Test public void testDeleteFile() { BloomMapFile.Writer writer = null; try { @@ -173,6 +181,7 @@ public class TestBloomMapFile extends TestCase { * test {@link BloomMapFile.Reader} constructor with * IOException */ + @Test public void testIOExceptionInWriterConstructor() { Path dirNameSpy = spy(TEST_FILE); BloomMapFile.Reader reader = null; @@ -198,8 +207,9 @@ public class TestBloomMapFile extends TestCase { } /** - * test {@link BloomMapFile.Reader.get()} method + * test {@link BloomMapFile.Reader#get(WritableComparable, Writable)} method */ + @Test public void testGetBloomMapFile() { int SIZE = 10; BloomMapFile.Reader reader = null; @@ -235,6 +245,7 @@ public class TestBloomMapFile extends TestCase { * test {@code BloomMapFile.Writer} constructors */ @SuppressWarnings("deprecation") + @Test public void testBloomMapFileConstructors() { BloomMapFile.Writer writer = null; try { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java index 44215278ca6..191fc652062 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java @@ -18,22 +18,25 @@ package org.apache.hadoop.io; +import org.junit.Test; + import java.io.IOException; -import junit.framework.TestCase; +import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Random; /** Unit tests for BoundedByteArrayOutputStream */ -public class TestBoundedByteArrayOutputStream extends TestCase { +public class TestBoundedByteArrayOutputStream { private static final int SIZE = 1024; private static final byte[] INPUT = new byte[SIZE]; static { new Random().nextBytes(INPUT); } - + + @Test public void testBoundedStream() throws IOException { BoundedByteArrayOutputStream stream = @@ -102,7 +105,8 @@ public class TestBoundedByteArrayOutputStream extends TestCase { } } - + + @Test public void testResetBuffer() throws IOException { ResettableBoundedByteArrayOutputStream stream = diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java index c96cc732938..bd8f2ef537f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java @@ -21,19 +21,20 @@ package org.apache.hadoop.io; import java.io.IOException; import java.util.Random; -import junit.framework.TestCase; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.junit.Test; +import static org.junit.Assert.assertEquals; -public class TestDefaultStringifier extends TestCase { +public class TestDefaultStringifier { private static Configuration conf = new Configuration(); private static final Log LOG = LogFactory.getLog(TestDefaultStringifier.class); private char[] alphabet = "abcdefghijklmnopqrstuvwxyz".toCharArray(); + @Test public void testWithWritable() throws Exception { conf.set("io.serializations", "org.apache.hadoop.io.serializer.WritableSerialization"); @@ -61,6 +62,7 @@ public class TestDefaultStringifier extends TestCase { } } + @Test public void testWithJavaSerialization() throws Exception { conf.set("io.serializations", "org.apache.hadoop.io.serializer.JavaSerialization"); @@ -77,6 +79,7 @@ public class TestDefaultStringifier extends TestCase { assertEquals(testInt, claimedInt); } + @Test public void testStoreLoad() throws IOException { LOG.info("Testing DefaultStringifier#store() and #load()"); @@ -92,6 +95,7 @@ public class TestDefaultStringifier extends TestCase { } + @Test public void testStoreLoadArray() throws IOException { LOG.info("Testing DefaultStringifier#storeArray() and #loadArray()"); conf.set("io.serializations", "org.apache.hadoop.io.serializer.JavaSerialization"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java index f48d3089650..5e71601742f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java @@ -18,15 +18,20 @@ package org.apache.hadoop.io; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; + + import java.io.IOException; import java.util.EnumSet; import java.util.Iterator; import java.lang.reflect.Type; -import junit.framework.TestCase; /** Unit test for EnumSetWritable */ -public class TestEnumSetWritable extends TestCase { +public class TestEnumSetWritable { enum TestEnumSet { CREATE, OVERWRITE, APPEND; @@ -37,6 +42,7 @@ public class TestEnumSetWritable extends TestCase { new EnumSetWritable(nonEmptyFlag); @SuppressWarnings("unchecked") + @Test public void testSerializeAndDeserializeNonEmpty() throws IOException { DataOutputBuffer out = new DataOutputBuffer(); ObjectWritable.writeObject(out, nonEmptyFlagWritable, nonEmptyFlagWritable @@ -51,6 +57,7 @@ public class TestEnumSetWritable extends TestCase { EnumSet emptyFlag = EnumSet.noneOf(TestEnumSet.class); @SuppressWarnings("unchecked") + @Test public void testSerializeAndDeserializeEmpty() throws IOException { boolean gotException = false; @@ -78,6 +85,7 @@ public class TestEnumSetWritable extends TestCase { } @SuppressWarnings("unchecked") + @Test public void testSerializeAndDeserializeNull() throws IOException { boolean gotException = false; @@ -107,6 +115,7 @@ public class TestEnumSetWritable extends TestCase { public EnumSetWritable testField; + @Test public void testAvroReflect() throws Exception { String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\"," + "\"name\":\"TestEnumSet\"," @@ -121,6 +130,7 @@ public class TestEnumSetWritable extends TestCase { /** * test {@link EnumSetWritable} equals() method */ + @Test public void testEnumSetWritableEquals() { EnumSetWritable eset1 = new EnumSetWritable( EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class); @@ -139,6 +149,7 @@ public class TestEnumSetWritable extends TestCase { * test {@code EnumSetWritable.write(DataOutputBuffer out)} * and iteration by TestEnumSet through iterator(). */ + @Test public void testEnumSetWritableWriteRead() throws Exception { EnumSetWritable srcSet = new EnumSetWritable( EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java index 880bba0e8b8..2f576441645 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java @@ -22,24 +22,27 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; /** * TestCase for {@link GenericWritable} class. * @see TestWritable#testWritable(Writable) */ -public class TestGenericWritable extends TestCase { +public class TestGenericWritable { private Configuration conf; public static final String CONF_TEST_KEY = "test.generic.writable"; public static final String CONF_TEST_VALUE = "dummy"; - @Override - protected void setUp() throws Exception { - super.setUp(); + @Before + public void setUp() throws Exception { conf = new Configuration(); //set the configuration parameter conf.set(CONF_TEST_KEY, CONF_TEST_VALUE); @@ -121,6 +124,7 @@ public class TestGenericWritable extends TestCase { } } + @Test public void testFooWritable() throws Exception { System.out.println("Testing Writable wrapped in GenericWritable"); FooGenericWritable generic = new FooGenericWritable(); @@ -130,6 +134,7 @@ public class TestGenericWritable extends TestCase { TestWritable.testWritable(generic); } + @Test public void testBarWritable() throws Exception { System.out.println("Testing Writable, Configurable wrapped in GenericWritable"); FooGenericWritable generic = new FooGenericWritable(); @@ -148,6 +153,7 @@ public class TestGenericWritable extends TestCase { assertNotNull(((Configurable)after.get()).getConf()); } + @Test public void testBazWritable() throws Exception { System.out.println("Testing for GenericWritable to find class names"); FooGenericWritable generic = new FooGenericWritable(); @@ -157,6 +163,7 @@ public class TestGenericWritable extends TestCase { TestWritable.testWritable(generic, conf); } + @Test public void testSet() throws Exception { Foo foo = new Foo(); FooGenericWritable generic = new FooGenericWritable(); @@ -174,6 +181,7 @@ public class TestGenericWritable extends TestCase { } + @Test public void testGet() throws Exception { Foo foo = new Foo(); FooGenericWritable generic = new FooGenericWritable(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java index 509d75e807d..e3f5df046e1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java @@ -18,8 +18,11 @@ package org.apache.hadoop.io; -import org.apache.hadoop.io.TestWritable; -import junit.framework.TestCase; +import org.junit.Test; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + import java.io.ByteArrayInputStream; import java.io.IOException; @@ -27,8 +30,7 @@ import java.security.MessageDigest; import java.util.Random; /** Unit tests for MD5Hash. */ -public class TestMD5Hash extends TestCase { - public TestMD5Hash(String name) { super(name); } +public class TestMD5Hash { private static final Random RANDOM = new Random(); @@ -42,7 +44,8 @@ public class TestMD5Hash extends TestCase { protected static byte[] D00 = new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; protected static byte[] DFF = new byte[] {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}; - + + @Test public void testMD5Hash() throws Exception { MD5Hash md5Hash = getTestHash(); @@ -116,6 +119,7 @@ public class TestMD5Hash extends TestCase { t2.join(); } + @Test public void testFactoryReturnsClearedHashes() throws IOException { // A stream that will throw an IOE after reading some bytes ByteArrayInputStream failingStream = new ByteArrayInputStream( diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapWritable.java index 4597b909d95..ecdb7f8d7df 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapWritable.java @@ -17,20 +17,25 @@ */ package org.apache.hadoop.io; +import org.junit.Test; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.util.Map; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; /** * Tests MapWritable */ -public class TestMapWritable extends TestCase { +public class TestMapWritable { /** the test */ @SuppressWarnings("unchecked") + @Test public void testMapWritable() { Text[] keys = { new Text("key1"), @@ -91,6 +96,7 @@ public class TestMapWritable extends TestCase { * Test that number of "unknown" classes is propagated across multiple copies. */ @SuppressWarnings("deprecation") + @Test public void testForeignClass() { MapWritable inMap = new MapWritable(); inMap.put(new Text("key"), new UTF8("value")); @@ -105,10 +111,11 @@ public class TestMapWritable extends TestCase { * @throws Exception * @see HADOOP-2244 */ + @Test public void testMultipleCallsToReadFieldsAreSafe() throws Exception { // Create an instance and add a key/value. MapWritable m = new MapWritable(); - final Text t = new Text(getName()); + final Text t = new Text("testMultipleCallsToReadFieldsAreSafe"); m.put(t, t); // Get current size of map. Key values are 't'. int count = m.size(); @@ -130,6 +137,7 @@ public class TestMapWritable extends TestCase { dis.close(); } + @Test public void testEquality() { MapWritable map1 = new MapWritable(); MapWritable map2 = new MapWritable(); @@ -151,6 +159,7 @@ public class TestMapWritable extends TestCase { } /** Verify text command outputs a useful representation for MapWritable. */ + @Test public void testToString() { MapWritable map = new MapWritable(); final IntWritable key = new IntWritable(5); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java index 99c97db5483..e12792843de 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java @@ -20,7 +20,6 @@ package org.apache.hadoop.io; import java.io.*; import java.util.*; -import junit.framework.TestCase; import org.apache.commons.logging.*; @@ -32,20 +31,23 @@ import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.conf.*; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertNotNull; import org.mockito.Mockito; /** Support for flat files of binary key/value pairs. */ -public class TestSequenceFile extends TestCase { +public class TestSequenceFile { private static final Log LOG = LogFactory.getLog(TestSequenceFile.class); private Configuration conf = new Configuration(); - - public TestSequenceFile() { } - - public TestSequenceFile(String name) { super(name); } /** Unit tests for SequenceFile. */ + @Test public void testZlibSequenceFile() throws Exception { LOG.info("Testing SequenceFile with DefaultCodec"); compressedSeqFileTest(new DefaultCodec()); @@ -309,6 +311,7 @@ public class TestSequenceFile extends TestCase { } /** Unit tests for SequenceFile metadata. */ + @Test public void testSequenceFileMetadata() throws Exception { LOG.info("Testing SequenceFile with metadata"); int count = 1024 * 10; @@ -410,6 +413,7 @@ public class TestSequenceFile extends TestCase { sorter.sort(new Path[] { unsortedFile }, sortedFile, false); } + @Test public void testClose() throws IOException { Configuration conf = new Configuration(); LocalFileSystem fs = FileSystem.getLocal(conf); @@ -466,6 +470,7 @@ public class TestSequenceFile extends TestCase { * Test that makes sure the FileSystem passed to createWriter * @throws Exception */ + @Test public void testCreateUsesFsArg() throws Exception { FileSystem fs = FileSystem.getLocal(conf); FileSystem spyFs = Mockito.spy(fs); @@ -494,6 +499,7 @@ public class TestSequenceFile extends TestCase { } } + @Test public void testCloseForErroneousSequenceFile() throws IOException { Configuration conf = new Configuration(); @@ -526,6 +532,7 @@ public class TestSequenceFile extends TestCase { * Test to makes sure zero length sequence file is handled properly while * initializing. */ + @Test public void testInitZeroLengthSequenceFile() throws IOException { Configuration conf = new Configuration(); LocalFileSystem fs = FileSystem.getLocal(conf); @@ -548,6 +555,7 @@ public class TestSequenceFile extends TestCase { * already created * @throws IOException */ + @Test public void testCreateWriterOnExistingFile() throws IOException { Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); @@ -560,6 +568,7 @@ public class TestSequenceFile extends TestCase { CompressionType.NONE, null, new Metadata()); } + @Test public void testRecursiveSeqFileCreate() throws IOException { FileSystem fs = FileSystem.getLocal(conf); Path name = new Path(new Path(System.getProperty("test.build.data","."), @@ -582,6 +591,7 @@ public class TestSequenceFile extends TestCase { // should succeed, fails if exception thrown } + @Test public void testSerializationAvailability() throws IOException { Configuration conf = new Configuration(); Path path = new Path(System.getProperty("test.build.data", "."), diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSerialization.java index a78c015d435..3ca9187f895 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSerialization.java @@ -18,32 +18,37 @@ package org.apache.hadoop.io; -import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.SequenceFile.Reader; import org.apache.hadoop.io.SequenceFile.Writer; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; -public class TestSequenceFileSerialization extends TestCase { - +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertEquals; + +public class TestSequenceFileSerialization { private Configuration conf; private FileSystem fs; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { conf = new Configuration(); conf.set("io.serializations", "org.apache.hadoop.io.serializer.JavaSerialization"); fs = FileSystem.getLocal(conf); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { fs.close(); } - + + @Test public void testJavaSerialization() throws Exception { Path file = new Path(System.getProperty("test.build.data",".") + "/testseqser.seq"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java index a248171c373..ff92b371ca6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java @@ -22,24 +22,28 @@ import java.io.*; import java.util.*; import java.util.concurrent.atomic.AtomicReference; -import junit.framework.TestCase; import org.apache.commons.logging.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.io.SequenceFile.CompressionType; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; /** Support for flat files of binary key/value pairs. */ -public class TestSetFile extends TestCase { +public class TestSetFile { private static final Log LOG = LogFactory.getLog(TestSetFile.class); private static String FILE = System.getProperty("test.build.data",".") + "/test.set"; private static Configuration conf = new Configuration(); - - public TestSetFile(String name) { super(name); } + @Test public void testSetFile() throws Exception { FileSystem fs = FileSystem.getLocal(conf); try { @@ -58,8 +62,9 @@ public class TestSetFile extends TestCase { * test {@code SetFile.Reader} methods * next(), get() in combination */ - public void testSetFileAccessMethods() { - try { + @Test + public void testSetFileAccessMethods() { + try { FileSystem fs = FileSystem.getLocal(conf); int size = 10; writeData(fs, size); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java index 56b199a4223..9771fd1a966 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java @@ -18,7 +18,6 @@ package org.apache.hadoop.io; -import junit.framework.TestCase; import java.io.IOException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; @@ -26,11 +25,14 @@ import java.nio.charset.CharacterCodingException; import java.util.Random; import com.google.common.base.Charsets; import com.google.common.primitives.Bytes; +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; /** Unit tests for LargeUTF8. */ -public class TestText extends TestCase { +public class TestText { private static final int NUM_ITERATIONS = 100; - public TestText(String name) { super(name); } private static final Random RANDOM = new Random(1); @@ -70,6 +72,7 @@ public class TestText extends TestCase { return buffer.toString(); } + @Test public void testWritable() throws Exception { for (int i = 0; i < NUM_ITERATIONS; i++) { String str; @@ -82,6 +85,7 @@ public class TestText extends TestCase { } + @Test public void testCoding() throws Exception { String before = "Bad \t encoding \t testcase"; Text text = new Text(before); @@ -101,15 +105,15 @@ public class TestText extends TestCase { byte[] utf8Text = bb.array(); byte[] utf8Java = before.getBytes("UTF-8"); assertEquals(0, WritableComparator.compareBytes( - utf8Text, 0, bb.limit(), - utf8Java, 0, utf8Java.length)); - + utf8Text, 0, bb.limit(), + utf8Java, 0, utf8Java.length)); // test utf8 to string after = Text.decode(utf8Java); assertTrue(before.equals(after)); } } - + + @Test public void testIO() throws Exception { DataOutputBuffer out = new DataOutputBuffer(); DataInputBuffer in = new DataInputBuffer(); @@ -166,13 +170,15 @@ public class TestText extends TestCase { after = Text.readString(in, len + 1); assertTrue(str.equals(after)); } - + + @Test public void testLimitedIO() throws Exception { doTestLimitedIO("abcd", 3); doTestLimitedIO("foo bar baz", 10); doTestLimitedIO("1", 0); } + @Test public void testCompare() throws Exception { DataOutputBuffer out1 = new DataOutputBuffer(); DataOutputBuffer out2 = new DataOutputBuffer(); @@ -222,15 +228,17 @@ public class TestText extends TestCase { out3.getData(), 0, out3.getLength())); } } - + + @Test public void testFind() throws Exception { Text text = new Text("abcd\u20acbdcd\u20ac"); assertTrue(text.find("abd")==-1); - assertTrue(text.find("ac")==-1); - assertTrue(text.find("\u20ac")==4); + assertTrue(text.find("ac") ==-1); + assertTrue(text.find("\u20ac") == 4); assertTrue(text.find("\u20ac", 5)==11); } + @Test public void testFindAfterUpdatingContents() throws Exception { Text text = new Text("abcd"); text.set("a".getBytes()); @@ -239,6 +247,7 @@ public class TestText extends TestCase { assertEquals(text.find("b"), -1); } + @Test public void testValidate() throws Exception { Text text = new Text("abcd\u20acbdcd\u20ac"); byte [] utf8 = text.getBytes(); @@ -246,14 +255,15 @@ public class TestText extends TestCase { Text.validateUTF8(utf8, 0, length); } + @Test public void testClear() throws Exception { // Test lengths on an empty text object Text text = new Text(); assertEquals( - "Actual string on an empty text object must be an empty string", + "Actual string on an empty text object must be an empty string", "", text.toString()); assertEquals("Underlying byte array length must be zero", - 0, text.getBytes().length); + 0, text.getBytes().length); assertEquals("String's length must be zero", 0, text.getLength()); @@ -262,14 +272,15 @@ public class TestText extends TestCase { int len = text.getLength(); text.clear(); assertEquals("String must be empty after clear()", - "", text.toString()); + "", text.toString()); assertTrue( - "Length of the byte array must not decrease after clear()", + "Length of the byte array must not decrease after clear()", text.getBytes().length >= len); assertEquals("Length of the string must be reset to 0 after clear()", 0, text.getLength()); } + @Test public void testTextText() throws CharacterCodingException { Text a=new Text("abc"); Text b=new Text("a"); @@ -309,7 +320,8 @@ public class TestText extends TestCase { } } } - + + @Test public void testConcurrentEncodeDecode() throws Exception{ Thread thread1 = new ConcurrentEncodeDecodeThread("apache"); Thread thread2 = new ConcurrentEncodeDecodeThread("hadoop"); @@ -321,15 +333,17 @@ public class TestText extends TestCase { thread2.join(); } + @Test public void testAvroReflect() throws Exception { AvroTestUtil.testReflect - (new Text("foo"), - "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.io.Text\"}"); + (new Text("foo"), + "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.io.Text\"}"); } /** * */ + @Test public void testCharAt() { String line = "adsawseeeeegqewgasddga"; Text text = new Text(line); @@ -343,6 +357,7 @@ public class TestText extends TestCase { /** * test {@code Text} readFields/write operations */ + @Test public void testReadWriteOperations() { String line = "adsawseeeeegqewgasddga"; byte[] inputBytes = line.getBytes(); @@ -365,6 +380,7 @@ public class TestText extends TestCase { } } + @Test public void testReadWithKnownLength() throws IOException { String line = "hello world"; byte[] inputBytes = line.getBytes(Charsets.UTF_8); @@ -391,18 +407,20 @@ public class TestText extends TestCase { * with {@code BufferUnderflowException} * */ + @Test public void testBytesToCodePoint() { try { ByteBuffer bytes = ByteBuffer.wrap(new byte[] {-2, 45, 23, 12, 76, 89}); - Text.bytesToCodePoint(bytes); - assertTrue("testBytesToCodePoint error !!!", bytes.position() == 6 ); + Text.bytesToCodePoint(bytes); + assertTrue("testBytesToCodePoint error !!!", bytes.position() == 6 ); } catch (BufferUnderflowException ex) { fail("testBytesToCodePoint unexp exception"); } catch (Exception e) { fail("testBytesToCodePoint unexp exception"); } } - + + @Test public void testbytesToCodePointWithInvalidUTF() { try { Text.bytesToCodePoint(ByteBuffer.wrap(new byte[] {-2})); @@ -412,30 +430,21 @@ public class TestText extends TestCase { fail("testbytesToCodePointWithInvalidUTF error unexp exception !!!"); } } - + + @Test public void testUtf8Length() { assertEquals("testUtf8Length1 error !!!", - 1, Text.utf8Length(new String(new char[]{(char)1}))); + 1, Text.utf8Length(new String(new char[]{(char) 1}))); assertEquals("testUtf8Length127 error !!!", - 1, Text.utf8Length(new String(new char[]{(char)127}))); + 1, Text.utf8Length(new String(new char[]{(char) 127}))); assertEquals("testUtf8Length128 error !!!", - 2, Text.utf8Length(new String(new char[]{(char)128}))); + 2, Text.utf8Length(new String(new char[]{(char) 128}))); assertEquals("testUtf8Length193 error !!!", - 2, Text.utf8Length(new String(new char[]{(char)193}))); + 2, Text.utf8Length(new String(new char[]{(char) 193}))); assertEquals("testUtf8Length225 error !!!", - 2, Text.utf8Length(new String(new char[]{(char)225}))); + 2, Text.utf8Length(new String(new char[]{(char) 225}))); assertEquals("testUtf8Length254 error !!!", 2, Text.utf8Length(new String(new char[]{(char)254}))); } - - public static void main(String[] args) throws Exception - { - TestText test = new TestText("main"); - test.testIO(); - test.testCompare(); - test.testCoding(); - test.testWritable(); - test.testFind(); - test.testValidate(); - } + } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java index b68ff610427..d09865b0be6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java @@ -18,14 +18,17 @@ package org.apache.hadoop.io; -import junit.framework.TestCase; +import org.junit.Test; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.nio.charset.MalformedInputException; import java.util.Arrays; /** Unit tests for NonUTF8. */ -public class TestTextNonUTF8 extends TestCase { +public class TestTextNonUTF8 { + @Test public void testNonUTF8() throws Exception{ // this is a non UTF8 byte array byte b[] = {-0x01, -0x01, -0x01, -0x01, -0x01, -0x01, -0x01}; @@ -44,9 +47,4 @@ public class TestTextNonUTF8 extends TestCase { assertTrue(Arrays.equals(b, ret)); } - public static void main(String[] args) throws Exception - { - TestTextNonUTF8 test = new TestTextNonUTF8(); - test.testNonUTF8(); - } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java index ede59406768..2d60b5ecca1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java @@ -18,7 +18,6 @@ package org.apache.hadoop.io; -import junit.framework.TestCase; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; @@ -28,11 +27,14 @@ import java.util.Random; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; /** Unit tests for UTF8. */ @SuppressWarnings("deprecation") -public class TestUTF8 extends TestCase { - public TestUTF8(String name) { super(name); } +public class TestUTF8 { private static final Random RANDOM = new Random(); @@ -45,12 +47,14 @@ public class TestUTF8 extends TestCase { return buffer.toString(); } + @Test public void testWritable() throws Exception { for (int i = 0; i < 10000; i++) { TestWritable.testWritable(new UTF8(getTestString())); } } + @Test public void testGetBytes() throws Exception { for (int i = 0; i < 10000; i++) { @@ -73,6 +77,7 @@ public class TestUTF8 extends TestCase { return dis.readUTF(); } + @Test public void testIO() throws Exception { DataOutputBuffer out = new DataOutputBuffer(); DataInputBuffer in = new DataInputBuffer(); @@ -98,6 +103,7 @@ public class TestUTF8 extends TestCase { } + @Test public void testNullEncoding() throws Exception { String s = new String(new char[] { 0 }); @@ -112,6 +118,7 @@ public class TestUTF8 extends TestCase { * * This is a regression test for HADOOP-9103. */ + @Test public void testNonBasicMultilingualPlane() throws Exception { // Test using the "CAT FACE" character (U+1F431) // See http://www.fileformat.info/info/unicode/char/1f431/index.htm @@ -130,6 +137,7 @@ public class TestUTF8 extends TestCase { /** * Test that decoding invalid UTF8 throws an appropriate error message. */ + @Test public void testInvalidUTF8() throws Exception { byte[] invalid = new byte[] { 0x01, 0x02, (byte)0xff, (byte)0xff, 0x01, 0x02, 0x03, 0x04, 0x05 }; @@ -145,6 +153,7 @@ public class TestUTF8 extends TestCase { /** * Test for a 5-byte UTF8 sequence, which is now considered illegal. */ + @Test public void test5ByteUtf8Sequence() throws Exception { byte[] invalid = new byte[] { 0x01, 0x02, (byte)0xf8, (byte)0x88, (byte)0x80, @@ -162,6 +171,7 @@ public class TestUTF8 extends TestCase { * Test that decoding invalid UTF8 due to truncation yields the correct * exception type. */ + @Test public void testInvalidUTF8Truncated() throws Exception { // Truncated CAT FACE character -- this is a 4-byte sequence, but we // only have the first three bytes. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java index f7d45b9da76..3276289a39d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java @@ -18,17 +18,15 @@ package org.apache.hadoop.io; +import org.junit.Test; + import java.io.*; import java.util.Random; -import junit.framework.TestCase; /** Unit tests for VersionedWritable. */ -public class TestVersionedWritable extends TestCase { +public class TestVersionedWritable { - public TestVersionedWritable(String name) { super(name); } - - /** Example class used in test cases below. */ public static class SimpleVersionedWritable extends VersionedWritable { @@ -149,16 +147,19 @@ public class TestVersionedWritable extends TestCase { /** Test 1: Check that SimpleVersionedWritable. */ + @Test public void testSimpleVersionedWritable() throws Exception { TestWritable.testWritable(new SimpleVersionedWritable()); } /** Test 2: Check that AdvancedVersionedWritable Works (well, why wouldn't it!). */ + @Test public void testAdvancedVersionedWritable() throws Exception { TestWritable.testWritable(new AdvancedVersionedWritable()); } /** Test 3: Check that SimpleVersionedWritable throws an Exception. */ + @Test public void testSimpleVersionedWritableMismatch() throws Exception { TestVersionedWritable.testVersionedWritable(new SimpleVersionedWritable(), new SimpleVersionedWritableV2()); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java index 41dfb7a73d4..8d9f6c064a8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java @@ -27,17 +27,20 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ReflectionUtils; -import junit.framework.TestCase; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; /** Unit tests for Writable. */ -public class TestWritable extends TestCase { +public class TestWritable { private static final String TEST_CONFIG_PARAM = "frob.test"; private static final String TEST_CONFIG_VALUE = "test"; private static final String TEST_WRITABLE_CONFIG_PARAM = "test.writable"; private static final String TEST_WRITABLE_CONFIG_VALUE = TEST_CONFIG_VALUE; - public TestWritable(String name) { super(name); } - /** Example class used in test cases below. */ public static class SimpleWritable implements Writable { private static final Random RANDOM = new Random(); @@ -90,18 +93,19 @@ private static final String TEST_WRITABLE_CONFIG_VALUE = TEST_CONFIG_VALUE; } /** Test 1: Check that SimpleWritable. */ + @Test public void testSimpleWritable() throws Exception { testWritable(new SimpleWritable()); } - + @Test public void testByteWritable() throws Exception { testWritable(new ByteWritable((byte)128)); } - + @Test public void testShortWritable() throws Exception { testWritable(new ShortWritable((byte)256)); } - + @Test public void testDoubleWritable() throws Exception { testWritable(new DoubleWritable(1.0)); } @@ -180,6 +184,7 @@ private static final String TEST_WRITABLE_CONFIG_VALUE = TEST_CONFIG_VALUE; * Test a user comparator that relies on deserializing both arguments for each * compare. */ + @Test public void testShortWritableComparator() throws Exception { ShortWritable writable1 = new ShortWritable((short)256); ShortWritable writable2 = new ShortWritable((short) 128); @@ -206,6 +211,7 @@ private static final String TEST_WRITABLE_CONFIG_VALUE = TEST_CONFIG_VALUE; /** * Test that Writable's are configured by Comparator. */ + @Test public void testConfigurableWritableComparator() throws Exception { Configuration conf = new Configuration(); conf.set(TEST_WRITABLE_CONFIG_PARAM, TEST_WRITABLE_CONFIG_VALUE); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java index 396079c3948..5950142220e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java @@ -24,14 +24,12 @@ import java.io.IOException; import java.util.Random; import org.apache.hadoop.conf.Configuration; +import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertTrue; /** Unit tests for WritableName. */ -public class TestWritableName extends TestCase { - public TestWritableName(String name) { - super(name); - } +public class TestWritableName { /** Example class used in test cases below. */ public static class SimpleWritable implements Writable { @@ -67,12 +65,14 @@ public class TestWritableName extends TestCase { private static final String testName = "mystring"; + @Test public void testGoodName() throws Exception { Configuration conf = new Configuration(); Class test = WritableName.getClass("long",conf); assertTrue(test != null); } + @Test public void testSetName() throws Exception { Configuration conf = new Configuration(); WritableName.setName(SimpleWritable.class, testName); @@ -81,7 +81,7 @@ public class TestWritableName extends TestCase { assertTrue(test.equals(SimpleWritable.class)); } - + @Test public void testAddName() throws Exception { Configuration conf = new Configuration(); String altName = testName + ".alt"; @@ -98,6 +98,7 @@ public class TestWritableName extends TestCase { } + @Test public void testBadName() throws Exception { Configuration conf = new Configuration(); try { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java index a1b2c319244..92fb4ec94bc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java @@ -22,13 +22,15 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.junit.Test; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; -public class TestWritableUtils extends TestCase { +public class TestWritableUtils { private static final Log LOG = LogFactory.getLog(TestWritableUtils.class); - public static void testValue(int val, int vintlen) throws IOException { + private void testValue(int val, int vintlen) throws IOException { DataOutputBuffer buf = new DataOutputBuffer(); DataInputBuffer inbuf = new DataInputBuffer(); WritableUtils.writeVInt(buf, val); @@ -44,8 +46,7 @@ public class TestWritableUtils extends TestCase { assertEquals(vintlen, WritableUtils.getVIntSize(val)); assertEquals(vintlen, WritableUtils.decodeVIntSize(buf.getData()[0])); } - - public static void testReadInRange(long val, int lower, + private void testReadInRange(long val, int lower, int upper, boolean expectSuccess) throws IOException { DataOutputBuffer buf = new DataOutputBuffer(); DataInputBuffer inbuf = new DataInputBuffer(); @@ -65,7 +66,8 @@ public class TestWritableUtils extends TestCase { } } - public static void testVInt() throws Exception { + @Test + public void testVInt() throws Exception { testValue(12, 1); testValue(127, 1); testValue(-112, 1); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java index 3b81a3f9abb..edab634a0b8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java @@ -23,12 +23,15 @@ import java.io.InputStream; import java.io.OutputStream; import java.util.*; -import junit.framework.TestCase; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.Configuration; -public class TestCodecFactory extends TestCase { +import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +public class TestCodecFactory { private static class BaseCodec implements CompressionCodec { private Configuration conf; @@ -138,9 +141,10 @@ public class TestCodecFactory extends TestCase { expected.getName(), actual.getClass().getName()); } - - public static void testFinding() { - CompressionCodecFactory factory = + + @Test + public void testFinding() { + CompressionCodecFactory factory = new CompressionCodecFactory(new Configuration()); CompressionCodec codec = factory.getCodec(new Path("/tmp/foo.bar")); assertEquals("default factory foo codec", null, codec); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java index 2c285944539..2d75a2d2f8c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java @@ -35,9 +35,10 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel; import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy; import org.apache.hadoop.util.ReflectionUtils; -import junit.framework.TestCase; +import org.junit.Test; +import static org.junit.Assert.assertTrue; -public class TestCompressionStreamReuse extends TestCase { +public class TestCompressionStreamReuse { private static final Log LOG = LogFactory .getLog(TestCompressionStreamReuse.class); @@ -45,16 +46,19 @@ public class TestCompressionStreamReuse extends TestCase { private int count = 10000; private int seed = new Random().nextInt(); + @Test public void testBZip2Codec() throws IOException { resetStateTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec"); } + @Test public void testGzipCompressStreamReuse() throws IOException { resetStateTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec"); } + @Test public void testGzipCompressStreamReuseWithParam() throws IOException { Configuration conf = new Configuration(this.conf); ZlibFactory @@ -65,7 +69,7 @@ public class TestCompressionStreamReuse extends TestCase { "org.apache.hadoop.io.compress.GzipCodec"); } - private static void resetStateTest(Configuration conf, int seed, int count, + private void resetStateTest(Configuration conf, int seed, int count, String codecClass) throws IOException { // Create the codec CompressionCodec codec = null; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java index af52e7557e3..df3f48dec2d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java @@ -22,7 +22,6 @@ import java.io.DataOutputStream; import java.io.IOException; import java.util.Arrays; -import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; @@ -32,12 +31,18 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.file.tfile.TFile.Reader; import org.apache.hadoop.io.file.tfile.TFile.Writer; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; /** * test tfile features. * */ -public class TestTFile extends TestCase { +public class TestTFile { private static String ROOT = System.getProperty("test.build.data", "/tmp/tfile-test"); private FileSystem fs; @@ -46,13 +51,13 @@ public class TestTFile extends TestCase { private static final int largeVal = 3 * 1024 * 1024; private static final String localFormatter = "%010d"; - @Override + @Before public void setUp() throws IOException { conf = new Configuration(); fs = FileSystem.get(conf); } - @Override + @After public void tearDown() throws IOException { // do nothing } @@ -348,12 +353,14 @@ public class TestTFile extends TestCase { fs.delete(uTfile, true); } + @Test public void testTFileFeatures() throws IOException { basicWithSomeCodec("none"); basicWithSomeCodec("gz"); } // test unsorted t files. + @Test public void testUnsortedTFileFeatures() throws IOException { unsortedWithSomeCodec("none"); unsortedWithSomeCodec("gz"); @@ -414,6 +421,7 @@ public class TestTFile extends TestCase { } // test meta blocks for tfiles + @Test public void testMetaBlocks() throws IOException { Path mFile = new Path(ROOT, "meta.tfile"); FSDataOutputStream fout = createFSOutput(mFile); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java index d7df1c3e40b..198000b22b1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java @@ -20,7 +20,10 @@ package org.apache.hadoop.io.file.tfile; import java.io.IOException; import org.junit.Assert; -import junit.framework.TestCase; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -34,7 +37,7 @@ import org.apache.hadoop.io.file.tfile.TFile.Writer; * and LZO compression classes. * */ -public class TestTFileComparators extends TestCase { +public class TestTFileComparators { private static String ROOT = System.getProperty("test.build.data", "/tmp/tfile-test"); @@ -56,7 +59,7 @@ public class TestTFileComparators extends TestCase { private int records1stBlock = 4480; private int records2ndBlock = 4263; - @Override + @Before public void setUp() throws IOException { conf = new Configuration(); path = new Path(ROOT, outputFile); @@ -64,12 +67,13 @@ public class TestTFileComparators extends TestCase { out = fs.create(path); } - @Override + @After public void tearDown() throws IOException { fs.delete(path, true); } // bad comparator format + @Test public void testFailureBadComparatorNames() throws IOException { try { writer = new Writer(out, BLOCK_SIZE, compression, "badcmp", conf); @@ -82,6 +86,7 @@ public class TestTFileComparators extends TestCase { } // jclass that doesn't exist + @Test public void testFailureBadJClassNames() throws IOException { try { writer = @@ -96,6 +101,7 @@ public class TestTFileComparators extends TestCase { } // class exists but not a RawComparator + @Test public void testFailureBadJClasses() throws IOException { try { writer = diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java index d6bbcad359c..bd72962b7d7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java @@ -21,7 +21,11 @@ import java.io.IOException; import java.util.Random; import java.util.StringTokenizer; -import junit.framework.TestCase; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -46,7 +50,7 @@ import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; * test the performance for seek. * */ -public class TestTFileSeek extends TestCase { +public class TestTFileSeek { private MyOptions options; private Configuration conf; private Path path; @@ -56,7 +60,7 @@ public class TestTFileSeek extends TestCase { private DiscreteRNG keyLenGen; private KVGenerator kvGen; - @Override + @Before public void setUp() throws IOException { if (options == null) { options = new MyOptions(new String[0]); @@ -83,7 +87,7 @@ public class TestTFileSeek extends TestCase { options.dictSize); } - @Override + @After public void tearDown() throws IOException { fs.delete(path, true); } @@ -175,7 +179,8 @@ public class TestTFileSeek extends TestCase { (double) totalBytes / 1024 / (options.seekCount - miss)); } - + + @Test public void testSeeks() throws IOException { String[] supported = TFile.getSupportedCompressionAlgorithms(); boolean proceed = false; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java index e89d0e4c33e..ea355787961 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java @@ -23,7 +23,10 @@ import java.text.SimpleDateFormat; import java.util.Random; import java.util.StringTokenizer; -import junit.framework.TestCase; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -33,7 +36,6 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -45,7 +47,7 @@ import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner.Entry; import org.apache.hadoop.util.Time; -public class TestTFileSeqFileComparison extends TestCase { +public class TestTFileSeqFileComparison { MyOptions options; private FileSystem fs; @@ -55,7 +57,7 @@ public class TestTFileSeqFileComparison extends TestCase { private DateFormat formatter; byte[][] dictionary; - @Override + @Before public void setUp() throws IOException { if (options == null) { options = new MyOptions(new String[0]); @@ -82,7 +84,7 @@ public class TestTFileSeqFileComparison extends TestCase { } } - @Override + @After public void tearDown() throws IOException { // do nothing } @@ -479,6 +481,7 @@ public class TestTFileSeqFileComparison extends TestCase { readSeqFile(parameters, true); } + @Test public void testRunComparisons() throws IOException { String[] compresses = new String[] { "none", "lzo", "gz" }; for (String compress : compresses) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java index 4f84f0af99c..08695d95e6d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java @@ -19,8 +19,10 @@ package org.apache.hadoop.io.file.tfile; import java.io.IOException; import java.util.Random; -import org.junit.Assert; -import junit.framework.TestCase; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -31,7 +33,7 @@ import org.apache.hadoop.io.file.tfile.TFile.Reader; import org.apache.hadoop.io.file.tfile.TFile.Writer; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; -public class TestTFileSplit extends TestCase { +public class TestTFileSplit { private static String ROOT = System.getProperty("test.build.data", "/tmp/tfile-test"); @@ -86,10 +88,10 @@ public class TestTFileSplit extends TestCase { scanner.advance(); } scanner.close(); - Assert.assertTrue(count > 0); + assertTrue(count > 0); rowCount += count; } - Assert.assertEquals(rowCount, reader.getEntryCount()); + assertEquals(rowCount, reader.getEntryCount()); reader.close(); } @@ -122,11 +124,11 @@ public class TestTFileSplit extends TestCase { ++x; } scanner.close(); - Assert.assertTrue(count == (endRec - startRec)); + assertTrue(count == (endRec - startRec)); } // make sure specifying range at the end gives zero records. Scanner scanner = reader.createScannerByRecordNum(totalRecords, -1); - Assert.assertTrue(scanner.atEnd()); + assertTrue(scanner.atEnd()); } static String composeSortedKey(String prefix, int total, int value) { @@ -175,7 +177,8 @@ public class TestTFileSplit extends TestCase { .getRecordNumByLocation(reader.getLocationByRecordNum(x))); } } - + + @Test public void testSplit() throws IOException { System.out.println("testSplit"); createFile(100000, Compression.Algorithm.NONE.getName()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java index 8c67625137a..6524c374cd2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java @@ -22,8 +22,12 @@ import java.io.EOFException; import java.io.IOException; import java.util.Random; -import org.junit.Assert; -import junit.framework.TestCase; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertTrue; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -41,7 +45,7 @@ import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; * */ -public class TestTFileStreams extends TestCase { +public class TestTFileStreams { private static String ROOT = System.getProperty("test.build.data", "/tmp/tfile-test"); @@ -64,7 +68,7 @@ public class TestTFileStreams extends TestCase { this.comparator = comparator; } - @Override + @Before public void setUp() throws IOException { conf = new Configuration(); path = new Path(ROOT, outputFile); @@ -73,7 +77,7 @@ public class TestTFileStreams extends TestCase { writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf); } - @Override + @After public void tearDown() throws IOException { if (!skip) { try { @@ -85,6 +89,7 @@ public class TestTFileStreams extends TestCase { } } + @Test public void testNoEntry() throws IOException { if (skip) return; @@ -92,6 +97,7 @@ public class TestTFileStreams extends TestCase { TestTFileByteArrays.readRecords(fs, path, 0, conf); } + @Test public void testOneEntryKnownLength() throws IOException { if (skip) return; @@ -100,6 +106,7 @@ public class TestTFileStreams extends TestCase { TestTFileByteArrays.readRecords(fs, path, 1, conf); } + @Test public void testOneEntryUnknownLength() throws IOException { if (skip) return; @@ -111,6 +118,7 @@ public class TestTFileStreams extends TestCase { } // known key length, unknown value length + @Test public void testOneEntryMixedLengths1() throws IOException { if (skip) return; @@ -120,6 +128,7 @@ public class TestTFileStreams extends TestCase { } // unknown key length, known value length + @Test public void testOneEntryMixedLengths2() throws IOException { if (skip) return; @@ -128,6 +137,7 @@ public class TestTFileStreams extends TestCase { TestTFileByteArrays.readRecords(fs, path, 1, conf); } + @Test public void testTwoEntriesKnownLength() throws IOException { if (skip) return; @@ -137,6 +147,7 @@ public class TestTFileStreams extends TestCase { } // Negative test + @Test public void testFailureAddKeyWithoutValue() throws IOException { if (skip) return; @@ -151,6 +162,7 @@ public class TestTFileStreams extends TestCase { } } + @Test public void testFailureAddValueWithoutKey() throws IOException { if (skip) return; @@ -170,6 +182,7 @@ public class TestTFileStreams extends TestCase { } } + @Test public void testFailureOneEntryKnownLength() throws IOException { if (skip) return; @@ -192,6 +205,7 @@ public class TestTFileStreams extends TestCase { } } + @Test public void testFailureKeyTooLong() throws IOException { if (skip) return; @@ -199,7 +213,7 @@ public class TestTFileStreams extends TestCase { try { outKey.write("key0".getBytes()); outKey.close(); - Assert.fail("Key is longer than requested."); + fail("Key is longer than requested."); } catch (Exception e) { // noop, expecting an exception @@ -208,6 +222,7 @@ public class TestTFileStreams extends TestCase { } } + @Test public void testFailureKeyTooShort() throws IOException { if (skip) return; @@ -218,7 +233,7 @@ public class TestTFileStreams extends TestCase { try { outValue.write("value0".getBytes()); outValue.close(); - Assert.fail("Value is shorter than expected."); + fail("Value is shorter than expected."); } catch (Exception e) { // noop, expecting an exception @@ -227,6 +242,7 @@ public class TestTFileStreams extends TestCase { } } + @Test public void testFailureValueTooLong() throws IOException { if (skip) return; @@ -237,7 +253,7 @@ public class TestTFileStreams extends TestCase { try { outValue.write("value0".getBytes()); outValue.close(); - Assert.fail("Value is longer than expected."); + fail("Value is longer than expected."); } catch (Exception e) { // noop, expecting an exception @@ -248,10 +264,11 @@ public class TestTFileStreams extends TestCase { outKey.close(); } catch (Exception e) { - Assert.fail("Second or more close() should have no effect."); + fail("Second or more close() should have no effect."); } } + @Test public void testFailureValueTooShort() throws IOException { if (skip) return; @@ -259,7 +276,7 @@ public class TestTFileStreams extends TestCase { try { outKey.write("key0".getBytes()); outKey.close(); - Assert.fail("Key is shorter than expected."); + fail("Key is shorter than expected."); } catch (Exception e) { // noop, expecting an exception @@ -268,6 +285,7 @@ public class TestTFileStreams extends TestCase { } } + @Test public void testFailureCloseKeyStreamManyTimesInWriter() throws IOException { if (skip) return; @@ -289,15 +307,16 @@ public class TestTFileStreams extends TestCase { } outKey.close(); outKey.close(); - Assert.assertTrue("Multiple close should have no effect.", true); + assertTrue("Multiple close should have no effect.", true); } + @Test public void testFailureKeyLongerThan64K() throws IOException { if (skip) return; try { DataOutputStream outKey = writer.prepareAppendKey(64 * K + 1); - Assert.fail("Failed to handle key longer than 64K."); + fail("Failed to handle key longer than 64K."); } catch (IndexOutOfBoundsException e) { // noop, expecting exceptions @@ -305,6 +324,7 @@ public class TestTFileStreams extends TestCase { closeOutput(); } + @Test public void testFailureKeyLongerThan64K_2() throws IOException { if (skip) return; @@ -317,7 +337,7 @@ public class TestTFileStreams extends TestCase { outKey.write(buf); } outKey.close(); - Assert.fail("Failed to handle key longer than 64K."); + fail("Failed to handle key longer than 64K."); } catch (EOFException e) { // noop, expecting exceptions @@ -332,6 +352,7 @@ public class TestTFileStreams extends TestCase { } } + @Test public void testFailureNegativeOffset() throws IOException { if (skip) return; @@ -342,7 +363,7 @@ public class TestTFileStreams extends TestCase { byte[] buf = new byte[K]; try { scanner.entry().getKey(buf, -1); - Assert.fail("Failed to handle key negative offset."); + fail("Failed to handle key negative offset."); } catch (Exception e) { // noop, expecting exceptions @@ -358,22 +379,24 @@ public class TestTFileStreams extends TestCase { * * @throws IOException */ + @Test public void testFailureCompressionNotWorking() throws IOException { if (skip) return; long rawDataSize = writeRecords(10000, false, false, false); if (!compression.equalsIgnoreCase(Compression.Algorithm.NONE.getName())) { - Assert.assertTrue(out.getPos() < rawDataSize); + assertTrue(out.getPos() < rawDataSize); } closeOutput(); } + @Test public void testFailureCompressionNotWorking2() throws IOException { if (skip) return; long rawDataSize = writeRecords(10000, true, true, false); if (!compression.equalsIgnoreCase(Compression.Algorithm.NONE.getName())) { - Assert.assertTrue(out.getPos() < rawDataSize); + assertTrue(out.getPos() < rawDataSize); } closeOutput(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java index 2f7f2de459c..235e5e477ac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java @@ -19,8 +19,8 @@ package org.apache.hadoop.io.file.tfile; import java.io.IOException; +import org.junit.After; import org.junit.Assert; -import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -29,8 +29,10 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.file.tfile.TFile.Reader; import org.apache.hadoop.io.file.tfile.TFile.Writer; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; +import org.junit.Before; +import org.junit.Test; -public class TestTFileUnsortedByteArrays extends TestCase { +public class TestTFileUnsortedByteArrays { private static String ROOT = System.getProperty("test.build.data", "/tmp/tfile-test"); @@ -61,7 +63,7 @@ public class TestTFileUnsortedByteArrays extends TestCase { this.records2ndBlock = numRecords2ndBlock; } - @Override + @Before public void setUp() throws IOException { conf = new Configuration(); path = new Path(ROOT, outputFile); @@ -75,12 +77,13 @@ public class TestTFileUnsortedByteArrays extends TestCase { closeOutput(); } - @Override + @After public void tearDown() throws IOException { fs.delete(path, true); } // we still can scan records in an unsorted TFile + @Test public void testFailureScannerWithKeys() throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); @@ -101,6 +104,7 @@ public class TestTFileUnsortedByteArrays extends TestCase { } // we still can scan records in an unsorted TFile + @Test public void testScan() throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); @@ -142,6 +146,7 @@ public class TestTFileUnsortedByteArrays extends TestCase { } // we still can scan records in an unsorted TFile + @Test public void testScanRange() throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); @@ -182,6 +187,7 @@ public class TestTFileUnsortedByteArrays extends TestCase { } } + @Test public void testFailureSeek() throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java index a2bb21971b1..9efd2717d22 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java @@ -21,16 +21,18 @@ package org.apache.hadoop.io.file.tfile; import java.io.IOException; import java.util.Random; +import org.junit.After; import org.junit.Assert; -import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.junit.Before; +import org.junit.Test; -public class TestVLong extends TestCase { +public class TestVLong { private static String ROOT = System.getProperty("test.build.data", "/tmp/tfile-test"); private Configuration conf; @@ -38,7 +40,7 @@ public class TestVLong extends TestCase { private Path path; private String outputFile = "TestVLong"; - @Override + @Before public void setUp() throws IOException { conf = new Configuration(); path = new Path(ROOT, outputFile); @@ -48,13 +50,14 @@ public class TestVLong extends TestCase { } } - @Override + @After public void tearDown() throws IOException { if (fs.exists(path)) { fs.delete(path, false); } } + @Test public void testVLongByte() throws IOException { FSDataOutputStream out = fs.create(path); for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; ++i) { @@ -90,7 +93,8 @@ public class TestVLong extends TestCase { fs.delete(path, false); return ret; } - + + @Test public void testVLongShort() throws IOException { long size = writeAndVerify(0); Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 2 @@ -98,18 +102,21 @@ public class TestVLong extends TestCase { * (1 << Byte.SIZE) - 128 - 32, size); } + @Test public void testVLong3Bytes() throws IOException { long size = writeAndVerify(Byte.SIZE); Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 3 + ((1 << Byte.SIZE) - 32) * (1 << Byte.SIZE) - 40 - 1, size); } + @Test public void testVLong4Bytes() throws IOException { long size = writeAndVerify(Byte.SIZE * 2); Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 4 + ((1 << Byte.SIZE) - 16) * (1 << Byte.SIZE) - 32 - 2, size); } + @Test public void testVLong5Bytes() throws IOException { long size = writeAndVerify(Byte.SIZE * 3); Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 6 - 256 @@ -121,18 +128,23 @@ public class TestVLong extends TestCase { Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * (bytes + 1) - 256 - bytes + 1, size); } + + @Test public void testVLong6Bytes() throws IOException { verifySixOrMoreBytes(6); } - + + @Test public void testVLong7Bytes() throws IOException { verifySixOrMoreBytes(7); } + @Test public void testVLong8Bytes() throws IOException { verifySixOrMoreBytes(8); } + @Test public void testVLongRandom() throws IOException { int count = 1024 * 1024; long data[] = new long[count]; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java index 1926ec55e53..b2d2a8d100f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java @@ -18,16 +18,19 @@ package org.apache.hadoop.io.serializer.avro; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.SerializationTestUtil; +import org.junit.Test; -public class TestAvroSerialization extends TestCase { +public class TestAvroSerialization { private static final Configuration conf = new Configuration(); + @Test public void testSpecific() throws Exception { AvroRecord before = new AvroRecord(); before.intField = 5; @@ -35,6 +38,7 @@ public class TestAvroSerialization extends TestCase { assertEquals(before, after); } + @Test public void testReflectPkg() throws Exception { Record before = new Record(); before.x = 10; @@ -44,12 +48,14 @@ public class TestAvroSerialization extends TestCase { assertEquals(before, after); } + @Test public void testAcceptHandlingPrimitivesAndArrays() throws Exception { SerializationFactory factory = new SerializationFactory(conf); assertNull(factory.getSerializer(byte[].class)); assertNull(factory.getSerializer(byte.class)); } + @Test public void testReflectInnerClass() throws Exception { InnerRecord before = new InnerRecord(); before.x = 10; @@ -59,6 +65,7 @@ public class TestAvroSerialization extends TestCase { assertEquals(before, after); } + @Test public void testReflect() throws Exception { RefSerializable before = new RefSerializable(); before.x = 10;