HBASE-4625 Convert @deprecated HBaseTestCase tests JUnit4 style tests

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
Ashish Singhi 2014-11-03 12:48:36 +05:30 committed by stack
parent d4504afdd4
commit f7adec0548
13 changed files with 84 additions and 9 deletions

View File

@ -45,6 +45,9 @@ import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
@ -67,12 +70,12 @@ public class TestHFile extends HBaseTestCase {
private static CacheConfig cacheConf = null; private static CacheConfig cacheConf = null;
private Map<String, Long> startingMetrics; private Map<String, Long> startingMetrics;
@Override @Before
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
} }
@Override @After
public void tearDown() throws Exception { public void tearDown() throws Exception {
super.tearDown(); super.tearDown();
} }
@ -83,6 +86,7 @@ public class TestHFile extends HBaseTestCase {
* Test all features work reasonably when hfile is empty of entries. * Test all features work reasonably when hfile is empty of entries.
* @throws IOException * @throws IOException
*/ */
@Test
public void testEmptyHFile() throws IOException { public void testEmptyHFile() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf); if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, getName()); Path f = new Path(ROOT_DIR, getName());
@ -99,6 +103,7 @@ public class TestHFile extends HBaseTestCase {
/** /**
* Create 0-length hfile and show that it fails * Create 0-length hfile and show that it fails
*/ */
@Test
public void testCorrupt0LengthHFile() throws IOException { public void testCorrupt0LengthHFile() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf); if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, getName()); Path f = new Path(ROOT_DIR, getName());
@ -132,6 +137,7 @@ public class TestHFile extends HBaseTestCase {
/** /**
* Create a truncated hfile and verify that exception thrown. * Create a truncated hfile and verify that exception thrown.
*/ */
@Test
public void testCorruptTruncatedHFile() throws IOException { public void testCorruptTruncatedHFile() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf); if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, getName()); Path f = new Path(ROOT_DIR, getName());
@ -281,11 +287,13 @@ public class TestHFile extends HBaseTestCase {
fs.delete(ncTFile, true); fs.delete(ncTFile, true);
} }
@Test
public void testTFileFeatures() throws IOException { public void testTFileFeatures() throws IOException {
testTFilefeaturesInternals(false); testTFilefeaturesInternals(false);
testTFilefeaturesInternals(true); testTFilefeaturesInternals(true);
} }
@Test
protected void testTFilefeaturesInternals(boolean useTags) throws IOException { protected void testTFilefeaturesInternals(boolean useTags) throws IOException {
basicWithSomeCodec("none", useTags); basicWithSomeCodec("none", useTags);
basicWithSomeCodec("gz", useTags); basicWithSomeCodec("gz", useTags);
@ -353,11 +361,13 @@ public class TestHFile extends HBaseTestCase {
} }
// test meta blocks for tfiles // test meta blocks for tfiles
@Test
public void testMetaBlocks() throws Exception { public void testMetaBlocks() throws Exception {
metablocks("none"); metablocks("none");
metablocks("gz"); metablocks("gz");
} }
@Test
public void testNullMetaBlocks() throws Exception { public void testNullMetaBlocks() throws Exception {
if (cacheConf == null) cacheConf = new CacheConfig(conf); if (cacheConf == null) cacheConf = new CacheConfig(conf);
for (Compression.Algorithm compressAlgo : for (Compression.Algorithm compressAlgo :

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
@ -97,6 +98,7 @@ public class TestSeekTo extends HBaseTestCase {
return ncTFile; return ncTFile;
} }
@Test
public void testSeekBefore() throws Exception { public void testSeekBefore() throws Exception {
testSeekBeforeInternals(TagUsage.NO_TAG); testSeekBeforeInternals(TagUsage.NO_TAG);
testSeekBeforeInternals(TagUsage.ONLY_TAG); testSeekBeforeInternals(TagUsage.ONLY_TAG);
@ -138,6 +140,7 @@ public class TestSeekTo extends HBaseTestCase {
reader.close(); reader.close();
} }
@Test
public void testSeekBeforeWithReSeekTo() throws Exception { public void testSeekBeforeWithReSeekTo() throws Exception {
testSeekBeforeWithReSeekToInternals(TagUsage.NO_TAG); testSeekBeforeWithReSeekToInternals(TagUsage.NO_TAG);
testSeekBeforeWithReSeekToInternals(TagUsage.ONLY_TAG); testSeekBeforeWithReSeekToInternals(TagUsage.ONLY_TAG);
@ -227,6 +230,7 @@ public class TestSeekTo extends HBaseTestCase {
assertEquals("k", toRowStr(scanner.getKeyValue())); assertEquals("k", toRowStr(scanner.getKeyValue()));
} }
@Test
public void testSeekTo() throws Exception { public void testSeekTo() throws Exception {
testSeekToInternals(TagUsage.NO_TAG); testSeekToInternals(TagUsage.NO_TAG);
testSeekToInternals(TagUsage.ONLY_TAG); testSeekToInternals(TagUsage.ONLY_TAG);
@ -255,6 +259,8 @@ public class TestSeekTo extends HBaseTestCase {
reader.close(); reader.close();
} }
@Test
public void testBlockContainingKey() throws Exception { public void testBlockContainingKey() throws Exception {
testBlockContainingKeyInternals(TagUsage.NO_TAG); testBlockContainingKeyInternals(TagUsage.NO_TAG);
testBlockContainingKeyInternals(TagUsage.ONLY_TAG); testBlockContainingKeyInternals(TagUsage.ONLY_TAG);

View File

@ -48,6 +48,8 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.junit.After;
import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -75,13 +77,13 @@ public class TestBlocksRead extends HBaseTestCase {
* @see org.apache.hadoop.hbase.HBaseTestCase#setUp() * @see org.apache.hadoop.hbase.HBaseTestCase#setUp()
*/ */
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@Override @Before
protected void setUp() throws Exception { protected void setUp() throws Exception {
super.setUp(); super.setUp();
} }
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@Override @After
protected void tearDown() throws Exception { protected void tearDown() throws Exception {
super.tearDown(); super.tearDown();
EnvironmentEdgeManagerTestHelper.reset(); EnvironmentEdgeManagerTestHelper.reset();

View File

@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.CacheStats; import org.apache.hadoop.hbase.io.hfile.CacheStats;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -49,7 +50,7 @@ public class TestBlocksScanned extends HBaseTestCase {
private static HBaseTestingUtility TEST_UTIL = null; private static HBaseTestingUtility TEST_UTIL = null;
@Override @Before
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
@ -65,6 +66,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
@Test
public void testUsingMetaAndBinary() throws IOException { public void testUsingMetaAndBinary() throws IOException {
FileSystem filesystem = FileSystem.get(conf); FileSystem filesystem = FileSystem.get(conf);
Path rootdir = testDir; Path rootdir = testDir;
@ -188,6 +190,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
* Test file of multiple deletes and with deletes as final key. * Test file of multiple deletes and with deletes as final key.
* @see <a href="https://issues.apache.org/jira/browse/HBASE-751">HBASE-751</a> * @see <a href="https://issues.apache.org/jira/browse/HBASE-751">HBASE-751</a>
*/ */
@Test
public void testGetClosestRowBefore3() throws IOException{ public void testGetClosestRowBefore3() throws IOException{
HRegion region = null; HRegion region = null;
byte [] c0 = COLUMNS[0]; byte [] c0 = COLUMNS[0];
@ -296,6 +299,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
} }
/** For HBASE-694 */ /** For HBASE-694 */
@Test
public void testGetClosestRowBefore2() throws IOException{ public void testGetClosestRowBefore2() throws IOException{
HRegion region = null; HRegion region = null;
byte [] c0 = COLUMNS[0]; byte [] c0 = COLUMNS[0];

View File

@ -31,6 +31,8 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionBackedScanner; import org.apache.hadoop.hbase.util.CollectionBackedScanner;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class}) @Category({RegionServerTests.class, SmallTests.class})
@ -52,6 +54,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
private byte[] col4; private byte[] col4;
private byte[] col5; private byte[] col5;
@Before
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
data = Bytes.toBytes("data"); data = Bytes.toBytes("data");
@ -66,6 +69,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
col5 = Bytes.toBytes("col5"); col5 = Bytes.toBytes("col5");
} }
@Test
public void testSorted() throws IOException{ public void testSorted() throws IOException{
//Cases that need to be checked are: //Cases that need to be checked are:
//1. The "smallest" KeyValue is in the same scanners as current //1. The "smallest" KeyValue is in the same scanners as current
@ -128,6 +132,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
} }
@Test
public void testSeek() throws IOException { public void testSeek() throws IOException {
//Cases: //Cases:
//1. Seek KeyValue that is not in scanner //1. Seek KeyValue that is not in scanner
@ -176,6 +181,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
} }
@Test
public void testScannerLeak() throws IOException { public void testScannerLeak() throws IOException {
// Test for unclosed scanners (HBASE-1927) // Test for unclosed scanners (HBASE-1927)

View File

@ -39,6 +39,8 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode; import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class}) @Category({RegionServerTests.class, SmallTests.class})
@ -64,6 +66,7 @@ public class TestQueryMatcher extends HBaseTestCase {
KVComparator rowComparator; KVComparator rowComparator;
private Scan scan; private Scan scan;
@Before
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
row1 = Bytes.toBytes("row1"); row1 = Bytes.toBytes("row1");
@ -124,6 +127,7 @@ public class TestQueryMatcher extends HBaseTestCase {
} }
} }
@Test
public void testMatch_ExplicitColumns() public void testMatch_ExplicitColumns()
throws IOException { throws IOException {
//Moving up from the Tracker by using Gets and List<KeyValue> instead //Moving up from the Tracker by using Gets and List<KeyValue> instead
@ -141,6 +145,7 @@ public class TestQueryMatcher extends HBaseTestCase {
_testMatch_ExplicitColumns(scan, expected); _testMatch_ExplicitColumns(scan, expected);
} }
@Test
public void testMatch_ExplicitColumnsWithLookAhead() public void testMatch_ExplicitColumnsWithLookAhead()
throws IOException { throws IOException {
//Moving up from the Tracker by using Gets and List<KeyValue> instead //Moving up from the Tracker by using Gets and List<KeyValue> instead
@ -161,6 +166,7 @@ public class TestQueryMatcher extends HBaseTestCase {
} }
@Test
public void testMatch_Wildcard() public void testMatch_Wildcard()
throws IOException { throws IOException {
//Moving up from the Tracker by using Gets and List<KeyValue> instead //Moving up from the Tracker by using Gets and List<KeyValue> instead
@ -215,6 +221,7 @@ public class TestQueryMatcher extends HBaseTestCase {
* *
* @throws IOException * @throws IOException
*/ */
@Test
public void testMatch_ExpiredExplicit() public void testMatch_ExpiredExplicit()
throws IOException { throws IOException {
@ -269,6 +276,7 @@ public class TestQueryMatcher extends HBaseTestCase {
* *
* @throws IOException * @throws IOException
*/ */
@Test
public void testMatch_ExpiredWildcard() public void testMatch_ExpiredWildcard()
throws IOException { throws IOException {
@ -314,6 +322,7 @@ public class TestQueryMatcher extends HBaseTestCase {
} }
} }
@Test
public void testMatch_PartialRangeDropDeletes() throws Exception { public void testMatch_PartialRangeDropDeletes() throws Exception {
// Some ranges. // Some ranges.
testDropDeletes( testDropDeletes(

View File

@ -25,6 +25,8 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult; import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class}) @Category({RegionServerTests.class, SmallTests.class})
@ -34,11 +36,13 @@ public class TestScanDeleteTracker extends HBaseTestCase {
private long timestamp = 10L; private long timestamp = 10L;
private byte deleteType = 0; private byte deleteType = 0;
@Before
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
sdt = new ScanDeleteTracker(); sdt = new ScanDeleteTracker();
} }
@Test
public void testDeletedBy_Delete() { public void testDeletedBy_Delete() {
KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"),
Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.Delete); Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.Delete);
@ -47,6 +51,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(DeleteResult.VERSION_DELETED, ret); assertEquals(DeleteResult.VERSION_DELETED, ret);
} }
@Test
public void testDeletedBy_DeleteColumn() { public void testDeletedBy_DeleteColumn() {
KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"),
Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.DeleteColumn); Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.DeleteColumn);
@ -58,6 +63,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(DeleteResult.COLUMN_DELETED, ret); assertEquals(DeleteResult.COLUMN_DELETED, ret);
} }
@Test
public void testDeletedBy_DeleteFamily() { public void testDeletedBy_DeleteFamily() {
KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"), KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"),
Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.DeleteFamily); Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.DeleteFamily);
@ -69,6 +75,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(DeleteResult.FAMILY_DELETED, ret); assertEquals(DeleteResult.FAMILY_DELETED, ret);
} }
@Test
public void testDeletedBy_DeleteFamilyVersion() { public void testDeletedBy_DeleteFamilyVersion() {
byte [] qualifier1 = Bytes.toBytes("qualifier1"); byte [] qualifier1 = Bytes.toBytes("qualifier1");
byte [] qualifier2 = Bytes.toBytes("qualifier2"); byte [] qualifier2 = Bytes.toBytes("qualifier2");
@ -113,6 +120,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
} }
@Test
public void testDelete_DeleteColumn() { public void testDelete_DeleteColumn() {
byte [] qualifier = Bytes.toBytes("qualifier"); byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.Delete.getCode(); deleteType = KeyValue.Type.Delete.getCode();
@ -134,6 +142,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
} }
@Test
public void testDeleteColumn_Delete() { public void testDeleteColumn_Delete() {
byte [] qualifier = Bytes.toBytes("qualifier"); byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.DeleteColumn.getCode(); deleteType = KeyValue.Type.DeleteColumn.getCode();
@ -154,6 +163,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
//Testing new way where we save the Delete in case of a Delete for specific //Testing new way where we save the Delete in case of a Delete for specific
//ts, could have just added the last line to the first test, but rather keep //ts, could have just added the last line to the first test, but rather keep
//them separated //them separated
@Test
public void testDelete_KeepDelete(){ public void testDelete_KeepDelete(){
byte [] qualifier = Bytes.toBytes("qualifier"); byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.Delete.getCode(); deleteType = KeyValue.Type.Delete.getCode();
@ -164,6 +174,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(false ,sdt.isEmpty()); assertEquals(false ,sdt.isEmpty());
} }
@Test
public void testDelete_KeepVersionZero(){ public void testDelete_KeepVersionZero(){
byte [] qualifier = Bytes.toBytes("qualifier"); byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.Delete.getCode(); deleteType = KeyValue.Type.Delete.getCode();

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class}) @Category({RegionServerTests.class, SmallTests.class})
@ -35,6 +36,7 @@ public class TestScanWildcardColumnTracker extends HBaseTestCase {
final static int VERSIONS = 2; final static int VERSIONS = 2;
@Test
public void testCheckColumn_Ok() throws IOException { public void testCheckColumn_Ok() throws IOException {
ScanWildcardColumnTracker tracker = ScanWildcardColumnTracker tracker =
new ScanWildcardColumnTracker(0, VERSIONS, Long.MIN_VALUE); new ScanWildcardColumnTracker(0, VERSIONS, Long.MIN_VALUE);
@ -68,6 +70,7 @@ public class TestScanWildcardColumnTracker extends HBaseTestCase {
} }
} }
@Test
public void testCheckColumn_EnforceVersions() throws IOException { public void testCheckColumn_EnforceVersions() throws IOException {
ScanWildcardColumnTracker tracker = ScanWildcardColumnTracker tracker =
new ScanWildcardColumnTracker(0, VERSIONS, Long.MIN_VALUE); new ScanWildcardColumnTracker(0, VERSIONS, Long.MIN_VALUE);

View File

@ -58,6 +58,9 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ChecksumType;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.mockito.Mockito; import org.mockito.Mockito;
@ -78,12 +81,12 @@ public class TestStoreFile extends HBaseTestCase {
private static final int CKBYTES = 512; private static final int CKBYTES = 512;
private static String TEST_FAMILY = "cf"; private static String TEST_FAMILY = "cf";
@Override @Before
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
} }
@Override @After
public void tearDown() throws Exception { public void tearDown() throws Exception {
super.tearDown(); super.tearDown();
} }
@ -93,6 +96,7 @@ public class TestStoreFile extends HBaseTestCase {
* using two HalfMapFiles. * using two HalfMapFiles.
* @throws Exception * @throws Exception
*/ */
@Test
public void testBasicHalfMapFile() throws Exception { public void testBasicHalfMapFile() throws Exception {
final HRegionInfo hri = final HRegionInfo hri =
new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb")); new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb"));
@ -145,6 +149,7 @@ public class TestStoreFile extends HBaseTestCase {
* store files in other regions works. * store files in other regions works.
* @throws IOException * @throws IOException
*/ */
@Test
public void testReference() throws IOException { public void testReference() throws IOException {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb")); final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb"));
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
@ -188,6 +193,7 @@ public class TestStoreFile extends HBaseTestCase {
assertTrue(Bytes.equals(kv.getRow(), finalRow)); assertTrue(Bytes.equals(kv.getRow(), finalRow));
} }
@Test
public void testHFileLink() throws IOException { public void testHFileLink() throws IOException {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb")); final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb"));
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
@ -230,6 +236,7 @@ public class TestStoreFile extends HBaseTestCase {
* This test creates an hfile and then the dir structures and files to verify that references * This test creates an hfile and then the dir structures and files to verify that references
* to hfilelinks (created by snapshot clones) can be properly interpreted. * to hfilelinks (created by snapshot clones) can be properly interpreted.
*/ */
@Test
public void testReferenceToHFileLink() throws IOException { public void testReferenceToHFileLink() throws IOException {
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf); Configuration testConf = new Configuration(this.conf);
@ -494,6 +501,7 @@ public class TestStoreFile extends HBaseTestCase {
private static final int BLOCKSIZE_SMALL = 8192; private static final int BLOCKSIZE_SMALL = 8192;
@Test
public void testBloomFilter() throws Exception { public void testBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf); FileSystem fs = FileSystem.getLocal(conf);
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01); conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
@ -514,6 +522,7 @@ public class TestStoreFile extends HBaseTestCase {
bloomWriteRead(writer, fs); bloomWriteRead(writer, fs);
} }
@Test
public void testDeleteFamilyBloomFilter() throws Exception { public void testDeleteFamilyBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf); FileSystem fs = FileSystem.getLocal(conf);
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01); conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
@ -576,6 +585,7 @@ public class TestStoreFile extends HBaseTestCase {
/** /**
* Test for HBASE-8012 * Test for HBASE-8012
*/ */
@Test
public void testReseek() throws Exception { public void testReseek() throws Exception {
// write the file // write the file
Path f = new Path(ROOT_DIR, getName()); Path f = new Path(ROOT_DIR, getName());
@ -600,6 +610,7 @@ public class TestStoreFile extends HBaseTestCase {
assertNotNull("Intial reseek should position at the beginning of the file", s.peek()); assertNotNull("Intial reseek should position at the beginning of the file", s.peek());
} }
@Test
public void testBloomTypes() throws Exception { public void testBloomTypes() throws Exception {
float err = (float) 0.01; float err = (float) 0.01;
FileSystem fs = FileSystem.getLocal(conf); FileSystem fs = FileSystem.getLocal(conf);
@ -688,6 +699,7 @@ public class TestStoreFile extends HBaseTestCase {
} }
} }
@Test
public void testSeqIdComparator() { public void testSeqIdComparator() {
assertOrdering(StoreFile.Comparators.SEQ_ID, assertOrdering(StoreFile.Comparators.SEQ_ID,
mockStoreFile(true, 100, 1000, -1, "/foo/123"), mockStoreFile(true, 100, 1000, -1, "/foo/123"),
@ -766,6 +778,7 @@ public class TestStoreFile extends HBaseTestCase {
* Test to ensure correctness when using StoreFile with multiple timestamps * Test to ensure correctness when using StoreFile with multiple timestamps
* @throws IOException * @throws IOException
*/ */
@Test
public void testMultipleTimestamps() throws IOException { public void testMultipleTimestamps() throws IOException {
byte[] family = Bytes.toBytes("familyname"); byte[] family = Bytes.toBytes("familyname");
byte[] qualifier = Bytes.toBytes("qualifier"); byte[] qualifier = Bytes.toBytes("qualifier");
@ -816,6 +829,7 @@ public class TestStoreFile extends HBaseTestCase {
assertTrue(!scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE)); assertTrue(!scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE));
} }
@Test
public void testCacheOnWriteEvictOnClose() throws Exception { public void testCacheOnWriteEvictOnClose() throws Exception {
Configuration conf = this.conf; Configuration conf = this.conf;
@ -988,6 +1002,7 @@ public class TestStoreFile extends HBaseTestCase {
* Check if data block encoding information is saved correctly in HFile's * Check if data block encoding information is saved correctly in HFile's
* file info. * file info.
*/ */
@Test
public void testDataBlockEncodingMetaData() throws IOException { public void testDataBlockEncodingMetaData() throws IOException {
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
Path dir = new Path(new Path(testDir, "7e0102"), "familyname"); Path dir = new Path(new Path(testDir, "7e0102"), "familyname");

View File

@ -23,6 +23,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.HFileLink;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
@ -35,6 +36,7 @@ public class TestStoreFileInfo extends HBaseTestCase {
/** /**
* Validate that we can handle valid tables with '.', '_', and '-' chars. * Validate that we can handle valid tables with '.', '_', and '-' chars.
*/ */
@Test
public void testStoreFileNames() { public void testStoreFileNames() {
String[] legalHFileLink = { "MyTable_02=abc012-def345", "MyTable_02.300=abc012-def345", String[] legalHFileLink = { "MyTable_02=abc012-def345", "MyTable_02.300=abc012-def345",
"MyTable_02-400=abc012-def345", "MyTable_02-400.200=abc012-def345", "MyTable_02-400=abc012-def345", "MyTable_02-400.200=abc012-def345",

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class}) @Category({RegionServerTests.class, SmallTests.class})
@ -85,6 +86,7 @@ public class TestWideScanner extends HBaseTestCase {
return count; return count;
} }
@Test
public void testWideScanBatching() throws IOException { public void testWideScanBatching() throws IOException {
final int batch = 256; final int batch = 256;
try { try {

View File

@ -46,6 +46,9 @@ import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** Test stand alone merge tool that can merge arbitrary regions */ /** Test stand alone merge tool that can merge arbitrary regions */
@ -63,7 +66,7 @@ public class TestMergeTool extends HBaseTestCase {
private byte [][][] rows; private byte [][][] rows;
private MiniDFSCluster dfsCluster = null; private MiniDFSCluster dfsCluster = null;
@Override @Before
public void setUp() throws Exception { public void setUp() throws Exception {
// Set the timeout down else this test will take a while to complete. // Set the timeout down else this test will take a while to complete.
this.conf.setLong("hbase.zookeeper.recoverable.waittime", 10); this.conf.setLong("hbase.zookeeper.recoverable.waittime", 10);
@ -175,7 +178,7 @@ public class TestMergeTool extends HBaseTestCase {
} }
} }
@Override @After
public void tearDown() throws Exception { public void tearDown() throws Exception {
super.tearDown(); super.tearDown();
for (int i = 0; i < sourceRegions.length; i++) { for (int i = 0; i < sourceRegions.length; i++) {
@ -255,6 +258,7 @@ public class TestMergeTool extends HBaseTestCase {
* Test merge tool. * Test merge tool.
* @throws Exception * @throws Exception
*/ */
@Test
public void testMergeTool() throws Exception { public void testMergeTool() throws Exception {
// First verify we can read the rows from the source regions and that they // First verify we can read the rows from the source regions and that they
// contain the right data. // contain the right data.