diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
index d92ca97e587..506a6e07a07 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
@@ -65,17 +65,16 @@ public abstract class HBaseTestCase extends TestCase {
protected static final char LAST_CHAR = 'z';
protected static final String PUNCTUATION = "~`@#$%^&*()-_+=:;',.<>/?[]{}|";
protected static final byte [] START_KEY_BYTES = {FIRST_CHAR, FIRST_CHAR, FIRST_CHAR};
- protected String START_KEY;
+ protected String START_KEY = new String(START_KEY_BYTES, HConstants.UTF8_CHARSET);
protected static final int MAXVERSIONS = 3;
protected final HBaseTestingUtility testUtil = new HBaseTestingUtility();
- public volatile Configuration conf;
+ public volatile Configuration conf = HBaseConfiguration.create();
/** constructor */
public HBaseTestCase() {
super();
- init();
}
/**
@@ -83,12 +82,6 @@ public abstract class HBaseTestCase extends TestCase {
*/
public HBaseTestCase(String name) {
super(name);
- init();
- }
-
- private void init() {
- conf = HBaseConfiguration.create();
- START_KEY = new String(START_KEY_BYTES, HConstants.UTF8_CHARSET);
}
/**
@@ -246,7 +239,7 @@ public abstract class HBaseTestCase extends TestCase {
* @throws IOException
* @return count of what we added.
*/
- protected static long addContent(final HRegion r, final byte [] columnFamily)
+ public static long addContent(final HRegion r, final byte [] columnFamily)
throws IOException {
return addContent(r, columnFamily, null);
}
@@ -265,7 +258,7 @@ public abstract class HBaseTestCase extends TestCase {
return addContent(updater, columnFamily, START_KEY_BYTES, null);
}
- protected static long addContent(final Incommon updater, final String family,
+ public static long addContent(final Incommon updater, final String family,
final String column) throws IOException {
return addContent(updater, family, column, START_KEY_BYTES, null);
}
@@ -287,7 +280,7 @@ public abstract class HBaseTestCase extends TestCase {
return addContent(updater, columnFamily, null, startKeyBytes, endKey, -1);
}
- protected static long addContent(final Incommon updater, final String family,
+ public static long addContent(final Incommon updater, final String family,
final String column, final byte [] startKeyBytes,
final byte [] endKey) throws IOException {
return addContent(updater, family, column, startKeyBytes, endKey, -1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 0b67a85ff44..e28b3614766 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.ChecksumUtil;
import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.RegionStates;
@@ -80,7 +81,9 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.MultiVersionConsistencyControl;
+import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.tool.Canary;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
@@ -93,7 +96,6 @@ import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.apache.hadoop.hbase.zookeeper.ZKAssign;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-import org.apache.hadoop.hbase.tool.Canary;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -217,6 +219,32 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
ChecksumUtil.generateExceptionForChecksumFailureForTest(true);
}
+ /**
+ * Create an HBaseTestingUtility where all tmp files are written to the local test data dir.
+ * It is needed to properly base FSUtil.getRootDirs so that they drop temp files in the proper
+ * test dir. Use this when you aren't using an Mini HDFS cluster.
+ * @return HBaseTestingUtility that use local fs for temp files.
+ */
+ public static HBaseTestingUtility createLocalHTU() {
+ Configuration c = HBaseConfiguration.create();
+ return createLocalHTU(c);
+ }
+
+ /**
+ * Create an HBaseTestingUtility where all tmp files are written to the local test data dir.
+ * It is needed to properly base FSUtil.getRootDirs so that they drop temp files in the proper
+ * test dir. Use this when you aren't using an Mini HDFS cluster.
+ * @param c Configuration (will be modified)
+ * @return HBaseTestingUtility that use local fs for temp files.
+ */
+ public static HBaseTestingUtility createLocalHTU(Configuration c) {
+ HBaseTestingUtility htu = new HBaseTestingUtility(c);
+ String dataTestDir = htu.getDataTestDir().toString();
+ htu.getConfiguration().set(HConstants.HBASE_DIR, dataTestDir);
+ LOG.debug("Setting " + HConstants.HBASE_DIR + " to " + dataTestDir);
+ return htu;
+ }
+
/**
* Returns this classes's instance of {@link Configuration}. Be careful how
* you use the returned Configuration since {@link HConnection} instances
@@ -1439,6 +1467,81 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
getHBaseAdmin().deleteTable(tableName);
}
+ // ==========================================================================
+ // Canned table and table descriptor creation
+ // TODO replace HBaseTestCase
+
+ public final static byte [] fam1 = Bytes.toBytes("colfamily11");
+ public final static byte [] fam2 = Bytes.toBytes("colfamily21");
+ public final static byte [] fam3 = Bytes.toBytes("colfamily31");
+ public static final byte[][] COLUMNS = {fam1, fam2, fam3};
+ private static final int MAXVERSIONS = 3;
+
+ private static final char FIRST_CHAR = 'a';
+ public static final byte [] START_KEY_BYTES = {FIRST_CHAR, FIRST_CHAR, FIRST_CHAR};
+
+
+ /**
+ * Create a table of name name
with {@link COLUMNS} for
+ * families.
+ * @param name Name to give table.
+ * @param versions How many versions to allow per column.
+ * @return Column descriptor.
+ */
+ public HTableDescriptor createTableDescriptor(final String name,
+ final int minVersions, final int versions, final int ttl, boolean keepDeleted) {
+ HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
+ for (byte[] cfName : new byte[][]{ fam1, fam2, fam3 }) {
+ htd.addFamily(new HColumnDescriptor(cfName)
+ .setMinVersions(minVersions)
+ .setMaxVersions(versions)
+ .setKeepDeletedCells(keepDeleted)
+ .setBlockCacheEnabled(false)
+ .setTimeToLive(ttl)
+ );
+ }
+ return htd;
+ }
+
+ /**
+ * Create a table of name name
with {@link COLUMNS} for
+ * families.
+ * @param name Name to give table.
+ * @return Column descriptor.
+ */
+ public HTableDescriptor createTableDescriptor(final String name) {
+ return createTableDescriptor(name, HColumnDescriptor.DEFAULT_MIN_VERSIONS,
+ MAXVERSIONS, HConstants.FOREVER, HColumnDescriptor.DEFAULT_KEEP_DELETED);
+ }
+
+ /**
+ * Create an HRegion that writes to the local tmp dirs
+ * @param desc
+ * @param startKey
+ * @param endKey
+ * @return
+ * @throws IOException
+ */
+ public HRegion createLocalHRegion(HTableDescriptor desc, byte [] startKey,
+ byte [] endKey)
+ throws IOException {
+ HRegionInfo hri = new HRegionInfo(desc.getTableName(), startKey, endKey);
+ return createLocalHRegion(hri, desc);
+ }
+
+ /**
+ * Create an HRegion that writes to the local tmp dirs
+ * @param info
+ * @param desc
+ * @return
+ * @throws IOException
+ */
+ public HRegion createLocalHRegion(HRegionInfo info, HTableDescriptor desc) throws IOException {
+ return HRegion.createHRegion(info, getDataTestDir(), getConfiguration(), desc);
+ }
+
+ //
+ // ==========================================================================
/**
* Provide an existing table name to truncate
@@ -1984,6 +2087,34 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
LOG.info("Mini mapreduce cluster stopped");
}
+ /**
+ * Create a stubbed out RegionServerService, mainly for getting FS.
+ */
+ public RegionServerServices createMockRegionServerService() throws IOException {
+ return createMockRegionServerService((ServerName)null);
+ }
+
+ /**
+ * Create a stubbed out RegionServerService, mainly for getting FS.
+ * This version is used by TestTokenAuthentication
+ */
+ public RegionServerServices createMockRegionServerService(RpcServerInterface rpc) throws IOException {
+ final MockRegionServerServices rss = new MockRegionServerServices(getZooKeeperWatcher());
+ rss.setFileSystem(getTestFileSystem());
+ rss.setRpcServer(rpc);
+ return rss;
+ }
+
+ /**
+ * Create a stubbed out RegionServerService, mainly for getting FS.
+ * This version is used by TestOpenRegionHandler
+ */
+ public RegionServerServices createMockRegionServerService(ServerName name) throws IOException {
+ final MockRegionServerServices rss = new MockRegionServerServices(getZooKeeperWatcher(), name);
+ rss.setFileSystem(getTestFileSystem());
+ return rss;
+ }
+
/**
* Switches the logger for the given class to DEBUG level.
*
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
index 651d499b125..f4a8b1e8959 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
@@ -66,7 +66,7 @@ import org.mockito.stubbing.Answer;
public class TestZooKeeperTableArchiveClient {
private static final Log LOG = LogFactory.getLog(TestZooKeeperTableArchiveClient.class);
- private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+ private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU();
private static final String STRING_TABLE_NAME = "test";
private static final byte[] TEST_FAM = Bytes.toBytes("fam");
private static final byte[] TABLE_NAME = Bytes.toBytes(STRING_TABLE_NAME);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
index 1ea5d1a8f94..4ff2570d514 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
@@ -18,21 +18,21 @@
package org.apache.hadoop.hbase.client;
-import java.util.ArrayList;
-import java.util.List;
-
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import java.util.ArrayList;
+import java.util.List;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
-import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -51,10 +50,9 @@ import org.junit.experimental.categories.Category;
*/
@Category(MediumTests.class)
public class TestSnapshotMetadata {
-
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final int NUM_RS = 2;
- private static final String STRING_TABLE_NAME = "testtable";
+ private static final String STRING_TABLE_NAME = "TestSnapshotMetadata";
private static final String MAX_VERSIONS_FAM_STR = "fam_max_columns";
private static final byte[] MAX_VERSIONS_FAM = Bytes.toBytes(MAX_VERSIONS_FAM_STR);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
index 7d2f54183ae..8b68ab315a8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
@@ -19,6 +19,14 @@
package org.apache.hadoop.hbase.coprocessor;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam3;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import static org.mockito.Mockito.when;
import java.io.IOException;
@@ -33,7 +41,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -43,6 +50,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -54,14 +62,17 @@ import org.apache.hadoop.hbase.regionserver.SplitTransaction;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.util.PairOfSameType;
+import org.junit.Rule;
+import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
import org.mockito.Mockito;
@Category(SmallTests.class)
-public class TestCoprocessorInterface extends HBaseTestCase {
+public class TestCoprocessorInterface {
+ @Rule public TestName name = new TestName();
static final Log LOG = LogFactory.getLog(TestCoprocessorInterface.class);
- private static final HBaseTestingUtility TEST_UTIL =
- new HBaseTestingUtility();
+ private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
static final Path DIR = TEST_UTIL.getDataTestDir();
private static class CustomScanner implements RegionScanner {
@@ -262,17 +273,17 @@ public class TestCoprocessorInterface extends HBaseTestCase {
}
}
+ @Test
public void testSharedData() throws IOException {
- TableName tableName =
- TableName.valueOf("testtable");
+ TableName tableName = TableName.valueOf(name.getMethodName());
byte [][] families = { fam1, fam2, fam3 };
Configuration hc = initSplit();
- HRegion region = initHRegion(tableName, getName(), hc,
+ HRegion region = initHRegion(tableName, name.getMethodName(), hc,
new Class>[]{}, families);
for (int i = 0; i < 3; i++) {
- addContent(region, fam3);
+ HBaseTestCase.addContent(region, fam3);
region.flushcache();
}
@@ -340,16 +351,16 @@ public class TestCoprocessorInterface extends HBaseTestCase {
assertFalse(o3 == o2);
}
+ @Test
public void testCoprocessorInterface() throws IOException {
- TableName tableName =
- TableName.valueOf("testtable");
+ TableName tableName = TableName.valueOf(name.getMethodName());
byte [][] families = { fam1, fam2, fam3 };
Configuration hc = initSplit();
- HRegion region = initHRegion(tableName, getName(), hc,
+ HRegion region = initHRegion(tableName, name.getMethodName(), hc,
new Class>[]{CoprocessorImpl.class}, families);
for (int i = 0; i < 3; i++) {
- addContent(region, fam3);
+ HBaseTestCase.addContent(region, fam3);
region.flushcache();
}
@@ -402,6 +413,7 @@ public class TestCoprocessorInterface extends HBaseTestCase {
// is secretly loaded at OpenRegionHandler. we don't really
// start a region server here, so just manually create cphost
// and set it to region.
+ Configuration conf = TEST_UTIL.getConfiguration();
RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf);
r.setCoprocessorHost(host);
@@ -499,7 +511,4 @@ public class TestCoprocessorInterface extends HBaseTestCase {
return regions;
}
-}
-
-
-
+}
\ No newline at end of file
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
index 428f304f5d8..01fdeeeefdf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
@@ -27,7 +27,6 @@ import java.util.Map;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Get;
@@ -66,7 +65,7 @@ public class TestEncodedSeekers {
private static final int NUM_HFILES = 4;
private static final int NUM_ROWS_PER_FLUSH = NUM_ROWS / NUM_HFILES;
- private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
+ private final HBaseTestingUtility testUtil = HBaseTestingUtility.createLocalHTU();
private final DataBlockEncoding encoding;
private final boolean encodeOnDisk;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index a25aee5022b..5dcf2c914df 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -69,8 +69,7 @@ public class TestCacheOnWrite {
private static final Log LOG = LogFactory.getLog(TestCacheOnWrite.class);
- private static final HBaseTestingUtility TEST_UTIL =
- new HBaseTestingUtility();
+ private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
private Configuration conf;
private CacheConfig cacheConf;
private FileSystem fs;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
index 915ee9b190a..665a7f3b019 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
@@ -16,15 +16,11 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
-import java.util.Map;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -52,8 +48,7 @@ import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class TestForceCacheImportantBlocks {
- private final HBaseTestingUtility TEST_UTIL =
- new HBaseTestingUtility();
+ private final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
private static final String TABLE = "myTable";
private static final String CF = "myCF";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java
index 4f2f5adfa25..f68c0151330 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java
@@ -65,9 +65,9 @@ public class TestOpenedRegionHandler {
@Before
public void setUp() throws Exception {
conf = HBaseConfiguration.create();
- TEST_UTIL = new HBaseTestingUtility(conf);
+ TEST_UTIL = HBaseTestingUtility.createLocalHTU(conf);
}
-
+
@After
public void tearDown() throws Exception {
// Stop the cluster
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
index 59bc2dc2ece..3a4fcf592e0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
@@ -16,7 +16,11 @@
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
-
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.ArrayList;
@@ -33,7 +37,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -59,8 +62,10 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
+import org.junit.Rule;
+import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
/**
@@ -68,8 +73,9 @@ import org.junit.experimental.categories.Category;
* and HRegion.append
*/
@Category(MediumTests.class) // Starts 100 threads
-public class TestAtomicOperation extends HBaseTestCase {
+public class TestAtomicOperation {
static final Log LOG = LogFactory.getLog(TestAtomicOperation.class);
+ @Rule public TestName name = new TestName();
HRegion region = null;
private HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -86,20 +92,6 @@ public class TestAtomicOperation extends HBaseTestCase {
static final byte [] row = Bytes.toBytes("rowA");
static final byte [] row2 = Bytes.toBytes("rowB");
- /**
- * @see org.apache.hadoop.hbase.HBaseTestCase#setUp()
- */
- @Override
- protected void setUp() throws Exception {
- super.setUp();
- }
-
- @Override
- protected void tearDown() throws Exception {
- super.tearDown();
- EnvironmentEdgeManagerTestHelper.reset();
- }
-
//////////////////////////////////////////////////////////////////////////////
// New tests that doesn't spin up a mini cluster but rather just test the
// individual code pieces in the HRegion.
@@ -110,8 +102,9 @@ public class TestAtomicOperation extends HBaseTestCase {
* More tests in
* @see org.apache.hadoop.hbase.client.TestFromClientSide#testAppend()
*/
+ @Test
public void testAppend() throws IOException {
- initHRegion(tableName, getName(), fam1);
+ initHRegion(tableName, name.getMethodName(), fam1);
String v1 = "Ultimate Answer to the Ultimate Question of Life,"+
" The Universe, and Everything";
String v2 = " is... 42.";
@@ -131,11 +124,12 @@ public class TestAtomicOperation extends HBaseTestCase {
/**
* Test multi-threaded increments.
*/
+ @Test
public void testIncrementMultiThreads() throws IOException {
LOG.info("Starting test testIncrementMultiThreads");
// run a with mixed column families (1 and 3 versions)
- initHRegion(tableName, getName(), new int[] {1,3}, fam1, fam2);
+ initHRegion(tableName, name.getMethodName(), new int[] {1,3}, fam1, fam2);
// create 100 threads, each will increment by its own quantity
int numThreads = 100;
@@ -202,6 +196,7 @@ public class TestAtomicOperation extends HBaseTestCase {
}
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
Path path = new Path(DIR + callingMethod);
+ FileSystem fs = TEST_UTIL.getTestFileSystem();
if (fs.exists(path)) {
if (!fs.delete(path, true)) {
throw new IOException("Failed delete of " + path);
@@ -250,10 +245,11 @@ public class TestAtomicOperation extends HBaseTestCase {
}
}
+ @Test
public void testAppendMultiThreads() throws IOException {
LOG.info("Starting test testAppendMultiThreads");
// run a with mixed column families (1 and 3 versions)
- initHRegion(tableName, getName(), new int[] {1,3}, fam1, fam2);
+ initHRegion(tableName, name.getMethodName(), new int[] {1,3}, fam1, fam2);
int numThreads = 100;
int opsPerThread = 100;
@@ -310,10 +306,11 @@ public class TestAtomicOperation extends HBaseTestCase {
/**
* Test multi-threaded row mutations.
*/
+ @Test
public void testRowMutationMultiThreads() throws IOException {
LOG.info("Starting test testRowMutationMultiThreads");
- initHRegion(tableName, getName(), fam1);
+ initHRegion(tableName, name.getMethodName(), fam1);
// create 10 threads, each will alternate between adding and
// removing a column
@@ -397,10 +394,11 @@ public class TestAtomicOperation extends HBaseTestCase {
/**
* Test multi-threaded region mutations.
*/
+ @Test
public void testMultiRowMutationMultiThreads() throws IOException {
LOG.info("Starting test testMultiRowMutationMultiThreads");
- initHRegion(tableName, getName(), fam1);
+ initHRegion(tableName, name.getMethodName(), fam1);
// create 10 threads, each will alternate between adding and
// removing a column
@@ -518,6 +516,7 @@ public class TestAtomicOperation extends HBaseTestCase {
*
* Moved into TestAtomicOperation from its original location, TestHBase7051
*/
+ @Test
public void testPutAndCheckAndPutInParallel() throws Exception {
final String tableName = "testPutAndCheckAndPut";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
index 89e893649a4..35fbce55244 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
@@ -36,14 +36,16 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
@Category(SmallTests.class)
public class TestColumnSeeking {
+ @Rule public TestName name = new TestName();
- private final static HBaseTestingUtility TEST_UTIL =
- new HBaseTestingUtility();
+ private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
static final Log LOG = LogFactory.getLog(TestColumnSeeking.class);
@@ -52,7 +54,7 @@ public class TestColumnSeeking {
public void testDuplicateVersions() throws IOException {
String family = "Family";
byte[] familyBytes = Bytes.toBytes("Family");
- TableName table = TableName.valueOf("TestDuplicateVersions");
+ TableName table = TableName.valueOf(name.getMethodName());
HColumnDescriptor hcd =
new HColumnDescriptor(familyBytes).setMaxVersions(1000);
@@ -60,9 +62,8 @@ public class TestColumnSeeking {
HTableDescriptor htd = new HTableDescriptor(table);
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(table, null, null, false);
- HRegion region =
- HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), TEST_UTIL
- .getConfiguration(), htd);
+ // Set this so that the archiver writes to the temp dir as well.
+ HRegion region = TEST_UTIL.createLocalHRegion(info, htd);
try {
List rows = generateRandomWords(10, "row");
List allColumns = generateRandomWords(10, "column");
@@ -166,8 +167,7 @@ public class TestColumnSeeking {
public void testReseeking() throws IOException {
String family = "Family";
byte[] familyBytes = Bytes.toBytes("Family");
- TableName table =
- TableName.valueOf("TestSingleVersions");
+ TableName table = TableName.valueOf(name.getMethodName());
HTableDescriptor htd = new HTableDescriptor(table);
HColumnDescriptor hcd = new HColumnDescriptor(family);
@@ -175,9 +175,7 @@ public class TestColumnSeeking {
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(table, null, null, false);
- HRegion region =
- HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), TEST_UTIL
- .getConfiguration(), htd);
+ HRegion region = TEST_UTIL.createLocalHRegion(info, htd);
List rows = generateRandomWords(10, "row");
List allColumns = generateRandomWords(100, "column");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
index bbf38686409..d391c64215b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
@@ -17,11 +17,17 @@
*/
package org.apache.hadoop.hbase.regionserver;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.COLUMNS;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
@@ -35,10 +41,16 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
@Category(SmallTests.class)
-public class TestKeepDeletes extends HBaseTestCase {
+public class TestKeepDeletes {
+ HBaseTestingUtility hbu = HBaseTestingUtility.createLocalHTU();
private final byte[] T0 = Bytes.toBytes("0");
private final byte[] T1 = Bytes.toBytes("1");
private final byte[] T2 = Bytes.toBytes("2");
@@ -50,9 +62,10 @@ public class TestKeepDeletes extends HBaseTestCase {
private final byte[] c0 = COLUMNS[0];
private final byte[] c1 = COLUMNS[1];
- @Override
- protected void setUp() throws Exception {
- super.setUp();
+ @Rule public TestName name = new TestName();
+
+ @Before
+ public void setUp() throws Exception {
/* HBASE-6832: [WINDOWS] Tests should use explicit timestamp for Puts, and not rely on
* implicit RS timing.
* Use an explicit timer (IncrementingEnvironmentEdge) so that the put, delete
@@ -66,9 +79,8 @@ public class TestKeepDeletes extends HBaseTestCase {
EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
}
- @Override
- protected void tearDown() throws Exception {
- super.tearDown();
+ @After
+ public void tearDown() throws Exception {
EnvironmentEdgeManager.reset();
}
@@ -78,11 +90,12 @@ public class TestKeepDeletes extends HBaseTestCase {
* Column Delete markers are versioned
* Time range scan of deleted rows are possible
*/
+ @Test
public void testBasicScenario() throws Exception {
// keep 3 versions, rows do not expire
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, true);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis();
Put p = new Put(T1, ts);
@@ -174,11 +187,12 @@ public class TestKeepDeletes extends HBaseTestCase {
* if the store does not have KEEP_DELETED_CELLS enabled.
* (can be changed easily)
*/
+ @Test
public void testRawScanWithoutKeepingDeletes() throws Exception {
// KEEP_DELETED_CELLS is NOT enabled
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, false);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis();
Put p = new Put(T1, ts);
@@ -218,11 +232,12 @@ public class TestKeepDeletes extends HBaseTestCase {
/**
* basic verification of existing behavior
*/
+ @Test
public void testWithoutKeepingDeletes() throws Exception {
// KEEP_DELETED_CELLS is NOT enabled
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, false);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis();
Put p = new Put(T1, ts);
@@ -262,10 +277,11 @@ public class TestKeepDeletes extends HBaseTestCase {
/**
* The ExplicitColumnTracker does not support "raw" scanning.
*/
+ @Test
public void testRawScanWithColumns() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, true);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
Scan s = new Scan();
s.setRaw(true);
@@ -273,7 +289,7 @@ public class TestKeepDeletes extends HBaseTestCase {
s.addColumn(c0, c0);
try {
- InternalScanner scan = region.getScanner(s);
+ region.getScanner(s);
fail("raw scanner with columns should have failed");
} catch (org.apache.hadoop.hbase.DoNotRetryIOException dnre) {
// ok!
@@ -285,10 +301,11 @@ public class TestKeepDeletes extends HBaseTestCase {
/**
* Verify that "raw" scanning mode return delete markers and deletes rows.
*/
+ @Test
public void testRawScan() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, true);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis();
Put p = new Put(T1, ts);
@@ -320,11 +337,11 @@ public class TestKeepDeletes extends HBaseTestCase {
scan.next(kvs);
assertEquals(8, kvs.size());
assertTrue(kvs.get(0).isDeleteFamily());
- assertEquals(kvs.get(1).getValue(), T3);
+ assertArrayEquals(kvs.get(1).getValue(), T3);
assertTrue(kvs.get(2).isDelete());
assertTrue(kvs.get(3).isDeleteType());
- assertEquals(kvs.get(4).getValue(), T2);
- assertEquals(kvs.get(5).getValue(), T1);
+ assertArrayEquals(kvs.get(4).getValue(), T2);
+ assertArrayEquals(kvs.get(5).getValue(), T1);
// we have 3 CFs, so there are two more delete markers
assertTrue(kvs.get(6).isDeleteFamily());
assertTrue(kvs.get(7).isDeleteFamily());
@@ -350,7 +367,7 @@ public class TestKeepDeletes extends HBaseTestCase {
scan.next(kvs);
assertEquals(4, kvs.size());
assertTrue(kvs.get(0).isDeleteFamily());
- assertEquals(kvs.get(1).getValue(), T1);
+ assertArrayEquals(kvs.get(1).getValue(), T1);
// we have 3 CFs
assertTrue(kvs.get(2).isDeleteFamily());
assertTrue(kvs.get(3).isDeleteFamily());
@@ -364,7 +381,7 @@ public class TestKeepDeletes extends HBaseTestCase {
kvs = new ArrayList();
scan.next(kvs);
assertEquals(2, kvs.size());
- assertEquals(kvs.get(0).getValue(), T3);
+ assertArrayEquals(kvs.get(0).getValue(), T3);
assertTrue(kvs.get(1).isDelete());
@@ -374,10 +391,11 @@ public class TestKeepDeletes extends HBaseTestCase {
/**
* Verify that delete markers are removed from an otherwise empty store.
*/
+ @Test
public void testDeleteMarkerExpirationEmptyStore() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 1,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, true);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis();
@@ -416,10 +434,11 @@ public class TestKeepDeletes extends HBaseTestCase {
/**
* Test delete marker removal from store files.
*/
+ @Test
public void testDeleteMarkerExpiration() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 1,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, true);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis();
@@ -478,10 +497,11 @@ public class TestKeepDeletes extends HBaseTestCase {
/**
* Verify correct range demarcation
*/
+ @Test
public void testRanges() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 3,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, true);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis();
Put p = new Put(T1, ts);
@@ -559,10 +579,11 @@ public class TestKeepDeletes extends HBaseTestCase {
* with their respective puts and removed correctly by
* versioning (i.e. not relying on the store earliestPutTS).
*/
+ @Test
public void testDeleteMarkerVersioning() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 1,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, true);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis();
Put p = new Put(T1, ts);
@@ -652,9 +673,9 @@ public class TestKeepDeletes extends HBaseTestCase {
* Verify scenarios with multiple CFs and columns
*/
public void testWithMixedCFs() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 0, 1,
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, true);
- HRegion region = createNewHRegion(htd, null, null);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis();
@@ -702,9 +723,10 @@ public class TestKeepDeletes extends HBaseTestCase {
* Test keeping deleted rows together with min versions set
* @throws Exception
*/
+ @Test
public void testWithMinVersions() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 3, 1000, 1, true);
- HRegion region = createNewHRegion(htd, null, null);
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, true);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000; // 2s in the past
@@ -811,7 +833,7 @@ public class TestKeepDeletes extends HBaseTestCase {
List kvs = r.getColumn(fam, col);
assertEquals(kvs.size(), vals.length);
for (int i=0;i stores = region.getStores();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
index e840667191d..2b2c1a0d4c8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
@@ -18,10 +18,18 @@
*/
package org.apache.hadoop.hbase.regionserver;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.COLUMNS;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
@@ -29,13 +37,17 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.filter.TimestampsFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.junit.Rule;
+import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
/**
* Test Minimum Versions feature (HBASE-4071).
*/
@Category(SmallTests.class)
-public class TestMinVersions extends HBaseTestCase {
+public class TestMinVersions {
+ HBaseTestingUtility hbu = HBaseTestingUtility.createLocalHTU();
private final byte[] T0 = Bytes.toBytes("0");
private final byte[] T1 = Bytes.toBytes("1");
private final byte[] T2 = Bytes.toBytes("2");
@@ -45,12 +57,15 @@ public class TestMinVersions extends HBaseTestCase {
private final byte[] c0 = COLUMNS[0];
+ @Rule public TestName name = new TestName();
+
/**
* Verify behavior of getClosestBefore(...)
*/
+ @Test
public void testGetClosestBefore() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 1, 1000, 1, false);
- HRegion region = createNewHRegion(htd, null, null);
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 1, 1000, 1, false);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
try {
// 2s in the past
@@ -95,10 +110,11 @@ public class TestMinVersions extends HBaseTestCase {
* Test mixed memstore and storefile scanning
* with minimum versions.
*/
+ @Test
public void testStoreMemStore() throws Exception {
// keep 3 versions minimum
- HTableDescriptor htd = createTableDescriptor(getName(), 3, 1000, 1, false);
- HRegion region = createNewHRegion(htd, null, null);
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, false);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
@@ -149,9 +165,10 @@ public class TestMinVersions extends HBaseTestCase {
/**
* Make sure the Deletes behave as expected with minimum versions
*/
+ @Test
public void testDelete() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 3, 1000, 1, false);
- HRegion region = createNewHRegion(htd, null, null);
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, false);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
@@ -206,9 +223,10 @@ public class TestMinVersions extends HBaseTestCase {
/**
* Make sure the memstor behaves correctly with minimum versions
*/
+ @Test
public void testMemStore() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 2, 1000, 1, false);
- HRegion region = createNewHRegion(htd, null, null);
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, false);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000;
@@ -279,10 +297,11 @@ public class TestMinVersions extends HBaseTestCase {
/**
* Verify basic minimum versions functionality
*/
+ @Test
public void testBaseCase() throws Exception {
// 1 version minimum, 1000 versions maximum, ttl = 1s
- HTableDescriptor htd = createTableDescriptor(getName(), 2, 1000, 1, false);
- HRegion region = createNewHRegion(htd, null, null);
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, false);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
try {
// 2s in the past
@@ -370,9 +389,10 @@ public class TestMinVersions extends HBaseTestCase {
* Verify that basic filters still behave correctly with
* minimum versions enabled.
*/
+ @Test
public void testFilters() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName(), 2, 1000, 1, false);
- HRegion region = createNewHRegion(htd, null, null);
+ HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, false);
+ HRegion region = hbu.createLocalHRegion(htd, null, null);
final byte [] c1 = COLUMNS[1];
// 2s in the past
@@ -444,7 +464,7 @@ public class TestMinVersions extends HBaseTestCase {
List kvs = r.getColumn(col, col);
assertEquals(kvs.size(), vals.length);
for (int i=0;i parameters() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
index 7d8acc5ca7f..e979ba3862c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
@@ -18,6 +18,15 @@
*/
package org.apache.hadoop.hbase.regionserver;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.START_KEY_BYTES;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
+import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -25,6 +34,8 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.hadoop.hbase.HBaseTestCase.HRegionIncommon;
+import org.apache.hadoop.hbase.HBaseTestCase.ScannerIncommon;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -44,14 +55,20 @@ import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Rule;
+import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
/**
* Test of a long-lived scanner validating as we go.
*/
@Category(SmallTests.class)
-public class TestScanner extends HBaseTestCase {
+public class TestScanner {
+ @Rule public TestName name = new TestName();
private final Log LOG = LogFactory.getLog(this.getClass());
+ private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
private static final byte [] FIRST_ROW = HConstants.EMPTY_START_ROW;
private static final byte [][] COLS = { HConstants.CATALOG_FAMILY };
@@ -104,12 +121,13 @@ public class TestScanner extends HBaseTestCase {
* Test basic stop row filter works.
* @throws Exception
*/
+ @Test
public void testStopRow() throws Exception {
byte [] startrow = Bytes.toBytes("bbb");
byte [] stoprow = Bytes.toBytes("ccc");
try {
- this.r = createNewHRegion(TESTTABLEDESC, null, null);
- addContent(this.r, HConstants.CATALOG_FAMILY);
+ this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
+ HBaseTestCase.addContent(this.r, HConstants.CATALOG_FAMILY);
List results = new ArrayList();
// Do simple test of getting one row only first.
Scan scan = new Scan(Bytes.toBytes("abc"), Bytes.toBytes("abd"));
@@ -178,10 +196,11 @@ public class TestScanner extends HBaseTestCase {
s.close();
}
+ @Test
public void testFilters() throws IOException {
try {
- this.r = createNewHRegion(TESTTABLEDESC, null, null);
- addContent(this.r, HConstants.CATALOG_FAMILY);
+ this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
+ HBaseTestCase.addContent(this.r, HConstants.CATALOG_FAMILY);
byte [] prefix = Bytes.toBytes("ab");
Filter newFilter = new PrefixFilter(prefix);
Scan scan = new Scan();
@@ -204,10 +223,11 @@ public class TestScanner extends HBaseTestCase {
* NPEs but instead a UnknownScannerException. HBASE-2503
* @throws Exception
*/
+ @Test
public void testRaceBetweenClientAndTimeout() throws Exception {
try {
- this.r = createNewHRegion(TESTTABLEDESC, null, null);
- addContent(this.r, HConstants.CATALOG_FAMILY);
+ this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
+ HBaseTestCase.addContent(this.r, HConstants.CATALOG_FAMILY);
Scan scan = new Scan();
InternalScanner s = r.getScanner(scan);
List results = new ArrayList();
@@ -228,9 +248,10 @@ public class TestScanner extends HBaseTestCase {
/** The test!
* @throws IOException
*/
+ @Test
public void testScanner() throws IOException {
try {
- r = createNewHRegion(TESTTABLEDESC, null, null);
+ r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
region = new HRegionIncommon(r);
// Write information to the meta table
@@ -250,7 +271,7 @@ public class TestScanner extends HBaseTestCase {
// Close and re-open
r.close();
- r = openClosedRegion(r);
+ r = HRegion.openHRegion(r, null);
region = new HRegionIncommon(r);
// Verify we can get the data back now that it is on disk.
@@ -288,7 +309,7 @@ public class TestScanner extends HBaseTestCase {
// Close and reopen
r.close();
- r = openClosedRegion(r);
+ r = HRegion.openHRegion(r,null);
region = new HRegionIncommon(r);
// Validate again
@@ -323,7 +344,7 @@ public class TestScanner extends HBaseTestCase {
// Close and reopen
r.close();
- r = openClosedRegion(r);
+ r = HRegion.openHRegion(r,null);
region = new HRegionIncommon(r);
// Validate again
@@ -440,11 +461,12 @@ public class TestScanner extends HBaseTestCase {
* HBase-910.
* @throws Exception
*/
+ @Test
public void testScanAndSyncFlush() throws Exception {
- this.r = createNewHRegion(TESTTABLEDESC, null, null);
+ this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
HRegionIncommon hri = new HRegionIncommon(r);
try {
- LOG.info("Added: " + addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
+ LOG.info("Added: " + HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
int count = count(hri, -1, false);
assertEquals(count, count(hri, 100, false)); // do a sync flush.
@@ -462,11 +484,12 @@ public class TestScanner extends HBaseTestCase {
*
* @throws Exception
*/
+ @Test
public void testScanAndRealConcurrentFlush() throws Exception {
- this.r = createNewHRegion(TESTTABLEDESC, null, null);
+ this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
HRegionIncommon hri = new HRegionIncommon(r);
try {
- LOG.info("Added: " + addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
+ LOG.info("Added: " + HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
int count = count(hri, -1, false);
assertEquals(count, count(hri, 100, true)); // do a true concurrent background thread flush
@@ -484,16 +507,17 @@ public class TestScanner extends HBaseTestCase {
*
* @throws Exception
*/
+ @Test
@SuppressWarnings("deprecation")
public void testScanAndConcurrentMajorCompact() throws Exception {
- HTableDescriptor htd = createTableDescriptor(getName());
- this.r = createNewHRegion(htd, null, null);
+ HTableDescriptor htd = TEST_UTIL.createTableDescriptor(name.getMethodName());
+ this.r = TEST_UTIL.createLocalHRegion(htd, null, null);
HRegionIncommon hri = new HRegionIncommon(r);
try {
- addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
+ HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
firstRowBytes, secondRowBytes);
- addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
+ HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
firstRowBytes, secondRowBytes);
Delete dc = new Delete(firstRowBytes);
@@ -502,9 +526,9 @@ public class TestScanner extends HBaseTestCase {
r.delete(dc);
r.flushcache();
- addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
+ HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
secondRowBytes, thirdRowBytes);
- addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
+ HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
secondRowBytes, thirdRowBytes);
r.flushcache();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
index 8146e8355f3..0699c753950 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
@@ -117,8 +117,7 @@ public class TestSeekOptimizations {
private long totalSeekDiligent, totalSeekLazy;
- private final static HBaseTestingUtility TEST_UTIL =
- new HBaseTestingUtility();
+ private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
@Parameters
public static final Collection