HBASE-4625 Convert @deprecated HBaseTestCase tests JUnit4 style tests

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
Ashish Singhi 2014-11-03 12:48:36 +05:30 committed by stack
parent d4504afdd4
commit f7adec0548
13 changed files with 84 additions and 9 deletions

View File

@ -45,6 +45,9 @@ import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Writable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
@ -67,12 +70,12 @@ public class TestHFile extends HBaseTestCase {
private static CacheConfig cacheConf = null;
private Map<String, Long> startingMetrics;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
}
@ -83,6 +86,7 @@ public class TestHFile extends HBaseTestCase {
* Test all features work reasonably when hfile is empty of entries.
* @throws IOException
*/
@Test
public void testEmptyHFile() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, getName());
@ -99,6 +103,7 @@ public class TestHFile extends HBaseTestCase {
/**
* Create 0-length hfile and show that it fails
*/
@Test
public void testCorrupt0LengthHFile() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, getName());
@ -132,6 +137,7 @@ public class TestHFile extends HBaseTestCase {
/**
* Create a truncated hfile and verify that exception thrown.
*/
@Test
public void testCorruptTruncatedHFile() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, getName());
@ -281,11 +287,13 @@ public class TestHFile extends HBaseTestCase {
fs.delete(ncTFile, true);
}
@Test
public void testTFileFeatures() throws IOException {
testTFilefeaturesInternals(false);
testTFilefeaturesInternals(true);
}
@Test
protected void testTFilefeaturesInternals(boolean useTags) throws IOException {
basicWithSomeCodec("none", useTags);
basicWithSomeCodec("gz", useTags);
@ -353,11 +361,13 @@ public class TestHFile extends HBaseTestCase {
}
// test meta blocks for tfiles
@Test
public void testMetaBlocks() throws Exception {
metablocks("none");
metablocks("gz");
}
@Test
public void testNullMetaBlocks() throws Exception {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
for (Compression.Algorithm compressAlgo :

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
@ -97,6 +98,7 @@ public class TestSeekTo extends HBaseTestCase {
return ncTFile;
}
@Test
public void testSeekBefore() throws Exception {
testSeekBeforeInternals(TagUsage.NO_TAG);
testSeekBeforeInternals(TagUsage.ONLY_TAG);
@ -138,6 +140,7 @@ public class TestSeekTo extends HBaseTestCase {
reader.close();
}
@Test
public void testSeekBeforeWithReSeekTo() throws Exception {
testSeekBeforeWithReSeekToInternals(TagUsage.NO_TAG);
testSeekBeforeWithReSeekToInternals(TagUsage.ONLY_TAG);
@ -227,6 +230,7 @@ public class TestSeekTo extends HBaseTestCase {
assertEquals("k", toRowStr(scanner.getKeyValue()));
}
@Test
public void testSeekTo() throws Exception {
testSeekToInternals(TagUsage.NO_TAG);
testSeekToInternals(TagUsage.ONLY_TAG);
@ -255,6 +259,8 @@ public class TestSeekTo extends HBaseTestCase {
reader.close();
}
@Test
public void testBlockContainingKey() throws Exception {
testBlockContainingKeyInternals(TagUsage.NO_TAG);
testBlockContainingKeyInternals(TagUsage.ONLY_TAG);

View File

@ -48,6 +48,8 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -75,13 +77,13 @@ public class TestBlocksRead extends HBaseTestCase {
* @see org.apache.hadoop.hbase.HBaseTestCase#setUp()
*/
@SuppressWarnings("deprecation")
@Override
@Before
protected void setUp() throws Exception {
super.setUp();
}
@SuppressWarnings("deprecation")
@Override
@After
protected void tearDown() throws Exception {
super.tearDown();
EnvironmentEdgeManagerTestHelper.reset();

View File

@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.CacheStats;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -49,7 +50,7 @@ public class TestBlocksScanned extends HBaseTestCase {
private static HBaseTestingUtility TEST_UTIL = null;
@Override
@Before
public void setUp() throws Exception {
super.setUp();

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
@ -65,6 +66,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
@Test
public void testUsingMetaAndBinary() throws IOException {
FileSystem filesystem = FileSystem.get(conf);
Path rootdir = testDir;
@ -188,6 +190,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
* Test file of multiple deletes and with deletes as final key.
* @see <a href="https://issues.apache.org/jira/browse/HBASE-751">HBASE-751</a>
*/
@Test
public void testGetClosestRowBefore3() throws IOException{
HRegion region = null;
byte [] c0 = COLUMNS[0];
@ -296,6 +299,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
}
/** For HBASE-694 */
@Test
public void testGetClosestRowBefore2() throws IOException{
HRegion region = null;
byte [] c0 = COLUMNS[0];

View File

@ -31,6 +31,8 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionBackedScanner;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
@ -52,6 +54,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
private byte[] col4;
private byte[] col5;
@Before
public void setUp() throws Exception {
super.setUp();
data = Bytes.toBytes("data");
@ -66,6 +69,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
col5 = Bytes.toBytes("col5");
}
@Test
public void testSorted() throws IOException{
//Cases that need to be checked are:
//1. The "smallest" KeyValue is in the same scanners as current
@ -128,6 +132,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
}
@Test
public void testSeek() throws IOException {
//Cases:
//1. Seek KeyValue that is not in scanner
@ -176,6 +181,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
}
@Test
public void testScannerLeak() throws IOException {
// Test for unclosed scanners (HBASE-1927)

View File

@ -39,6 +39,8 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
@ -64,6 +66,7 @@ public class TestQueryMatcher extends HBaseTestCase {
KVComparator rowComparator;
private Scan scan;
@Before
public void setUp() throws Exception {
super.setUp();
row1 = Bytes.toBytes("row1");
@ -124,6 +127,7 @@ public class TestQueryMatcher extends HBaseTestCase {
}
}
@Test
public void testMatch_ExplicitColumns()
throws IOException {
//Moving up from the Tracker by using Gets and List<KeyValue> instead
@ -141,6 +145,7 @@ public class TestQueryMatcher extends HBaseTestCase {
_testMatch_ExplicitColumns(scan, expected);
}
@Test
public void testMatch_ExplicitColumnsWithLookAhead()
throws IOException {
//Moving up from the Tracker by using Gets and List<KeyValue> instead
@ -161,6 +166,7 @@ public class TestQueryMatcher extends HBaseTestCase {
}
@Test
public void testMatch_Wildcard()
throws IOException {
//Moving up from the Tracker by using Gets and List<KeyValue> instead
@ -215,6 +221,7 @@ public class TestQueryMatcher extends HBaseTestCase {
*
* @throws IOException
*/
@Test
public void testMatch_ExpiredExplicit()
throws IOException {
@ -269,6 +276,7 @@ public class TestQueryMatcher extends HBaseTestCase {
*
* @throws IOException
*/
@Test
public void testMatch_ExpiredWildcard()
throws IOException {
@ -314,6 +322,7 @@ public class TestQueryMatcher extends HBaseTestCase {
}
}
@Test
public void testMatch_PartialRangeDropDeletes() throws Exception {
// Some ranges.
testDropDeletes(

View File

@ -25,6 +25,8 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
@ -34,11 +36,13 @@ public class TestScanDeleteTracker extends HBaseTestCase {
private long timestamp = 10L;
private byte deleteType = 0;
@Before
public void setUp() throws Exception {
super.setUp();
sdt = new ScanDeleteTracker();
}
@Test
public void testDeletedBy_Delete() {
KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"),
Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.Delete);
@ -47,6 +51,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(DeleteResult.VERSION_DELETED, ret);
}
@Test
public void testDeletedBy_DeleteColumn() {
KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"),
Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.DeleteColumn);
@ -58,6 +63,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(DeleteResult.COLUMN_DELETED, ret);
}
@Test
public void testDeletedBy_DeleteFamily() {
KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"),
Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.DeleteFamily);
@ -69,6 +75,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(DeleteResult.FAMILY_DELETED, ret);
}
@Test
public void testDeletedBy_DeleteFamilyVersion() {
byte [] qualifier1 = Bytes.toBytes("qualifier1");
byte [] qualifier2 = Bytes.toBytes("qualifier2");
@ -113,6 +120,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
}
@Test
public void testDelete_DeleteColumn() {
byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.Delete.getCode();
@ -134,6 +142,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
}
@Test
public void testDeleteColumn_Delete() {
byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.DeleteColumn.getCode();
@ -154,6 +163,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
//Testing new way where we save the Delete in case of a Delete for specific
//ts, could have just added the last line to the first test, but rather keep
//them separated
@Test
public void testDelete_KeepDelete(){
byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.Delete.getCode();
@ -164,6 +174,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(false ,sdt.isEmpty());
}
@Test
public void testDelete_KeepVersionZero(){
byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.Delete.getCode();

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
@ -35,6 +36,7 @@ public class TestScanWildcardColumnTracker extends HBaseTestCase {
final static int VERSIONS = 2;
@Test
public void testCheckColumn_Ok() throws IOException {
ScanWildcardColumnTracker tracker =
new ScanWildcardColumnTracker(0, VERSIONS, Long.MIN_VALUE);
@ -68,6 +70,7 @@ public class TestScanWildcardColumnTracker extends HBaseTestCase {
}
}
@Test
public void testCheckColumn_EnforceVersions() throws IOException {
ScanWildcardColumnTracker tracker =
new ScanWildcardColumnTracker(0, VERSIONS, Long.MIN_VALUE);

View File

@ -58,6 +58,9 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ChecksumType;
import org.apache.hadoop.hbase.util.FSUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
@ -78,12 +81,12 @@ public class TestStoreFile extends HBaseTestCase {
private static final int CKBYTES = 512;
private static String TEST_FAMILY = "cf";
@Override
@Before
public void setUp() throws Exception {
super.setUp();
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
}
@ -93,6 +96,7 @@ public class TestStoreFile extends HBaseTestCase {
* using two HalfMapFiles.
* @throws Exception
*/
@Test
public void testBasicHalfMapFile() throws Exception {
final HRegionInfo hri =
new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb"));
@ -145,6 +149,7 @@ public class TestStoreFile extends HBaseTestCase {
* store files in other regions works.
* @throws IOException
*/
@Test
public void testReference() throws IOException {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb"));
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
@ -188,6 +193,7 @@ public class TestStoreFile extends HBaseTestCase {
assertTrue(Bytes.equals(kv.getRow(), finalRow));
}
@Test
public void testHFileLink() throws IOException {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb"));
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
@ -230,6 +236,7 @@ public class TestStoreFile extends HBaseTestCase {
* This test creates an hfile and then the dir structures and files to verify that references
* to hfilelinks (created by snapshot clones) can be properly interpreted.
*/
@Test
public void testReferenceToHFileLink() throws IOException {
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf);
@ -494,6 +501,7 @@ public class TestStoreFile extends HBaseTestCase {
private static final int BLOCKSIZE_SMALL = 8192;
@Test
public void testBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
@ -514,6 +522,7 @@ public class TestStoreFile extends HBaseTestCase {
bloomWriteRead(writer, fs);
}
@Test
public void testDeleteFamilyBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
@ -576,6 +585,7 @@ public class TestStoreFile extends HBaseTestCase {
/**
* Test for HBASE-8012
*/
@Test
public void testReseek() throws Exception {
// write the file
Path f = new Path(ROOT_DIR, getName());
@ -600,6 +610,7 @@ public class TestStoreFile extends HBaseTestCase {
assertNotNull("Intial reseek should position at the beginning of the file", s.peek());
}
@Test
public void testBloomTypes() throws Exception {
float err = (float) 0.01;
FileSystem fs = FileSystem.getLocal(conf);
@ -688,6 +699,7 @@ public class TestStoreFile extends HBaseTestCase {
}
}
@Test
public void testSeqIdComparator() {
assertOrdering(StoreFile.Comparators.SEQ_ID,
mockStoreFile(true, 100, 1000, -1, "/foo/123"),
@ -766,6 +778,7 @@ public class TestStoreFile extends HBaseTestCase {
* Test to ensure correctness when using StoreFile with multiple timestamps
* @throws IOException
*/
@Test
public void testMultipleTimestamps() throws IOException {
byte[] family = Bytes.toBytes("familyname");
byte[] qualifier = Bytes.toBytes("qualifier");
@ -816,6 +829,7 @@ public class TestStoreFile extends HBaseTestCase {
assertTrue(!scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE));
}
@Test
public void testCacheOnWriteEvictOnClose() throws Exception {
Configuration conf = this.conf;
@ -988,6 +1002,7 @@ public class TestStoreFile extends HBaseTestCase {
* Check if data block encoding information is saved correctly in HFile's
* file info.
*/
@Test
public void testDataBlockEncodingMetaData() throws IOException {
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
Path dir = new Path(new Path(testDir, "7e0102"), "familyname");

View File

@ -23,6 +23,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.io.HFileLink;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
@ -35,6 +36,7 @@ public class TestStoreFileInfo extends HBaseTestCase {
/**
* Validate that we can handle valid tables with '.', '_', and '-' chars.
*/
@Test
public void testStoreFileNames() {
String[] legalHFileLink = { "MyTable_02=abc012-def345", "MyTable_02.300=abc012-def345",
"MyTable_02-400=abc012-def345", "MyTable_02-400.200=abc012-def345",

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
@ -85,6 +86,7 @@ public class TestWideScanner extends HBaseTestCase {
return count;
}
@Test
public void testWideScanBatching() throws IOException {
final int batch = 256;
try {

View File

@ -46,6 +46,9 @@ import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/** Test stand alone merge tool that can merge arbitrary regions */
@ -63,7 +66,7 @@ public class TestMergeTool extends HBaseTestCase {
private byte [][][] rows;
private MiniDFSCluster dfsCluster = null;
@Override
@Before
public void setUp() throws Exception {
// Set the timeout down else this test will take a while to complete.
this.conf.setLong("hbase.zookeeper.recoverable.waittime", 10);
@ -175,7 +178,7 @@ public class TestMergeTool extends HBaseTestCase {
}
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
for (int i = 0; i < sourceRegions.length; i++) {
@ -255,6 +258,7 @@ public class TestMergeTool extends HBaseTestCase {
* Test merge tool.
* @throws Exception
*/
@Test
public void testMergeTool() throws Exception {
// First verify we can read the rows from the source regions and that they
// contain the right data.