HBASE-15915 Set timeouts on hanging tests. Tests run: 8, Time elapsed: 94.191 sec - in org.apache.hadoop.hbase.master.procedure.TestMasterFailoverWithProcedures Tests run: 103, Time elapsed: 69.057 sec - in org.apache.hadoop.hbase.regionserver.TestHRegion Tests run: 103, Time elapsed: 67.957 sec - in org.apache.hadoop.hbase.regionserver.TestHRegionWithInMemoryFlush Tests run: 5, Time elapsed: 34.629 sec - in org.apache.hadoop.hbase.regionserver.TestRegionMergeTransactionOnCluster Tests run: 9, Time elapsed: 82.913 sec - in org.apache.hadoop.hbase.snapshot.TestFlushSnapshotFromClient Tests run: 9, Time elapsed: 80.458 sec - in org.apache.hadoop.hbase.snapshot.TestMobFlushSnapshotFromClient (Apekshit)

Change-Id: Ia7b986ca6276ff5498f588c0b1b9c570e2a9d798

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
Apekshit 2016-05-29 22:33:41 -07:00 committed by stack
parent 0923346b61
commit 1a27278624
9 changed files with 83 additions and 35 deletions

View File

@ -32,6 +32,15 @@ import org.junit.rules.Timeout;
*/ */
public class CategoryBasedTimeout extends Timeout { public class CategoryBasedTimeout extends Timeout {
public static Timeout forClass(Class<?> clazz) {
return CategoryBasedTimeout.builder().withTimeout(clazz).withLookingForStuckThread(true)
.build();
}
public static Builder builder() {
return new CategoryBasedTimeout.Builder();
}
@Deprecated @Deprecated
public CategoryBasedTimeout(int millis) { public CategoryBasedTimeout(int millis) {
super(millis); super(millis);
@ -45,10 +54,6 @@ public class CategoryBasedTimeout extends Timeout {
super(builder); super(builder);
} }
public static Builder builder() {
return new CategoryBasedTimeout.Builder();
}
public static class Builder extends Timeout.Builder { public static class Builder extends Timeout.Builder {
public Timeout.Builder withTimeout(Class<?> clazz) { public Timeout.Builder withTimeout(Class<?> clazz) {
Annotation annotation = clazz.getAnnotation(Category.class); Annotation annotation = clazz.getAnnotation(Category.class);

View File

@ -22,13 +22,16 @@ package org.apache.hadoop.hbase.client;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils; import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
/** /**
* Test to verify that the cloned table is independent of the table from which it was cloned * Test to verify that the cloned table is independent of the table from which it was cloned
@ -37,6 +40,10 @@ import org.junit.experimental.categories.Category;
public class TestMobSnapshotCloneIndependence extends TestSnapshotCloneIndependence { public class TestMobSnapshotCloneIndependence extends TestSnapshotCloneIndependence {
private static final Log LOG = LogFactory.getLog(TestMobSnapshotCloneIndependence.class); private static final Log LOG = LogFactory.getLog(TestMobSnapshotCloneIndependence.class);
@ClassRule
public static final TestRule timeout =
CategoryBasedTimeout.forClass( TestMobSnapshotCloneIndependence.class);
/** /**
* Setup the config for the cluster and start it * Setup the config for the cluster and start it
* @throws Exception on failure * @throws Exception on failure

View File

@ -24,6 +24,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -33,8 +34,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.snapshot.TestRestoreFlushSnapshotFromClient;
import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
@ -43,21 +44,23 @@ import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName; import org.junit.rules.TestName;
import org.junit.rules.Timeout; import org.junit.rules.TestRule;
/** /**
* Test to verify that the cloned table is independent of the table from which it was cloned * Test to verify that the cloned table is independent of the table from which it was cloned
*/ */
@Category({MediumTests.class, ClientTests.class}) @Category({LargeTests.class, ClientTests.class})
public class TestSnapshotCloneIndependence { public class TestSnapshotCloneIndependence {
private static final Log LOG = LogFactory.getLog(TestSnapshotCloneIndependence.class); private static final Log LOG = LogFactory.getLog(TestSnapshotCloneIndependence.class);
@Rule @ClassRule
public Timeout globalTimeout = Timeout.seconds(60); public static final TestRule timeout =
CategoryBasedTimeout.forClass(TestSnapshotCloneIndependence.class);
@Rule @Rule
public TestName testName = new TestName(); public TestName testName = new TestName();

View File

@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.TestMobSnapshotCloneIndependence;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
@ -54,6 +55,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.ModifyRegionUtils; import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -63,8 +65,10 @@ import org.mockito.Mockito;
@Category({MasterTests.class, LargeTests.class}) @Category({MasterTests.class, LargeTests.class})
public class TestMasterFailoverWithProcedures { public class TestMasterFailoverWithProcedures {
private static final Log LOG = LogFactory.getLog(TestMasterFailoverWithProcedures.class); private static final Log LOG = LogFactory.getLog(TestMasterFailoverWithProcedures.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build(); @ClassRule
public static final TestRule timeout =
CategoryBasedTimeout.forClass(TestMasterFailoverWithProcedures.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();

View File

@ -54,6 +54,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
@ -89,6 +90,7 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TestMobSnapshotCloneIndependence;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
@ -101,6 +103,7 @@ import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.master.procedure.TestMasterFailoverWithProcedures;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.monitoring.TaskMonitor;
@ -145,10 +148,12 @@ import org.apache.hadoop.hbase.wal.WALSplitter;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName; import org.junit.rules.TestName;
import org.junit.rules.TestRule;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher; import org.mockito.ArgumentMatcher;
import org.mockito.Mockito; import org.mockito.Mockito;
@ -193,6 +198,9 @@ public class TestHRegion {
// over in TestHRegionOnCluster. // over in TestHRegionOnCluster.
private static final Log LOG = LogFactory.getLog(TestHRegion.class); private static final Log LOG = LogFactory.getLog(TestHRegion.class);
@Rule public TestName name = new TestName(); @Rule public TestName name = new TestName();
@ClassRule
public static final TestRule timeout =
CategoryBasedTimeout.forClass(TestHRegion.class);
private static final String COLUMN_FAMILY = "MyCF"; private static final String COLUMN_FAMILY = "MyCF";
private static final byte [] COLUMN_FAMILY_BYTES = Bytes.toBytes(COLUMN_FAMILY); private static final byte [] COLUMN_FAMILY_BYTES = Bytes.toBytes(COLUMN_FAMILY);
@ -244,7 +252,7 @@ public class TestHRegion {
* Test that I can use the max flushed sequence id after the close. * Test that I can use the max flushed sequence id after the close.
* @throws IOException * @throws IOException
*/ */
@Test (timeout = 100000) @Test
public void testSequenceId() throws IOException { public void testSequenceId() throws IOException {
HRegion region = initHRegion(tableName, name.getMethodName(), CONF, COLUMN_FAMILY_BYTES); HRegion region = initHRegion(tableName, name.getMethodName(), CONF, COLUMN_FAMILY_BYTES);
assertEquals(HConstants.NO_SEQNUM, region.getMaxFlushedSeqId()); assertEquals(HConstants.NO_SEQNUM, region.getMaxFlushedSeqId());
@ -279,7 +287,7 @@ public class TestHRegion {
* flushes for region close." * flushes for region close."
* @throws IOException * @throws IOException
*/ */
@Test (timeout=60000) @Test
public void testCloseCarryingSnapshot() throws IOException { public void testCloseCarryingSnapshot() throws IOException {
HRegion region = initHRegion(tableName, name.getMethodName(), CONF, COLUMN_FAMILY_BYTES); HRegion region = initHRegion(tableName, name.getMethodName(), CONF, COLUMN_FAMILY_BYTES);
Store store = region.getStore(COLUMN_FAMILY_BYTES); Store store = region.getStore(COLUMN_FAMILY_BYTES);
@ -305,7 +313,7 @@ public class TestHRegion {
* This test is for verifying memstore snapshot size is correctly updated in case of rollback * This test is for verifying memstore snapshot size is correctly updated in case of rollback
* See HBASE-10845 * See HBASE-10845
*/ */
@Test (timeout=60000) @Test
public void testMemstoreSnapshotSize() throws IOException { public void testMemstoreSnapshotSize() throws IOException {
class MyFaultyFSLog extends FaultyFSLog { class MyFaultyFSLog extends FaultyFSLog {
StoreFlushContext storeFlushCtx; StoreFlushContext storeFlushCtx;
@ -466,7 +474,7 @@ public class TestHRegion {
* if memstoreSize is not larger than 0." * if memstoreSize is not larger than 0."
* @throws Exception * @throws Exception
*/ */
@Test (timeout=60000) @Test
public void testFlushSizeAccounting() throws Exception { public void testFlushSizeAccounting() throws Exception {
final Configuration conf = HBaseConfiguration.create(CONF); final Configuration conf = HBaseConfiguration.create(CONF);
final String callingMethod = name.getMethodName(); final String callingMethod = name.getMethodName();
@ -531,7 +539,7 @@ public class TestHRegion {
FileSystem.closeAllForUGI(user.getUGI()); FileSystem.closeAllForUGI(user.getUGI());
} }
@Test (timeout=60000) @Test
public void testCloseWithFailingFlush() throws Exception { public void testCloseWithFailingFlush() throws Exception {
final Configuration conf = HBaseConfiguration.create(CONF); final Configuration conf = HBaseConfiguration.create(CONF);
final String callingMethod = name.getMethodName(); final String callingMethod = name.getMethodName();
@ -1139,7 +1147,7 @@ public class TestHRegion {
} }
} }
@Test (timeout=60000) @Test
public void testFlushMarkersWALFail() throws Exception { public void testFlushMarkersWALFail() throws Exception {
// test the cases where the WAL append for flush markers fail. // test the cases where the WAL append for flush markers fail.
String method = name.getMethodName(); String method = name.getMethodName();
@ -5235,7 +5243,7 @@ public class TestHRegion {
Bytes.toString(CellUtil.cloneValue(kv))); Bytes.toString(CellUtil.cloneValue(kv)));
} }
@Test (timeout=60000) @Test
public void testReverseScanner_FromMemStore_SingleCF_Normal() public void testReverseScanner_FromMemStore_SingleCF_Normal()
throws IOException { throws IOException {
byte[] rowC = Bytes.toBytes("rowC"); byte[] rowC = Bytes.toBytes("rowC");
@ -5294,7 +5302,7 @@ public class TestHRegion {
} }
} }
@Test (timeout=60000) @Test
public void testReverseScanner_FromMemStore_SingleCF_LargerKey() public void testReverseScanner_FromMemStore_SingleCF_LargerKey()
throws IOException { throws IOException {
byte[] rowC = Bytes.toBytes("rowC"); byte[] rowC = Bytes.toBytes("rowC");
@ -5354,7 +5362,7 @@ public class TestHRegion {
} }
} }
@Test (timeout=60000) @Test
public void testReverseScanner_FromMemStore_SingleCF_FullScan() public void testReverseScanner_FromMemStore_SingleCF_FullScan()
throws IOException { throws IOException {
byte[] rowC = Bytes.toBytes("rowC"); byte[] rowC = Bytes.toBytes("rowC");
@ -5411,7 +5419,7 @@ public class TestHRegion {
} }
} }
@Test (timeout=60000) @Test
public void testReverseScanner_moreRowsMayExistAfter() throws IOException { public void testReverseScanner_moreRowsMayExistAfter() throws IOException {
// case for "INCLUDE_AND_SEEK_NEXT_ROW & SEEK_NEXT_ROW" endless loop // case for "INCLUDE_AND_SEEK_NEXT_ROW & SEEK_NEXT_ROW" endless loop
byte[] rowA = Bytes.toBytes("rowA"); byte[] rowA = Bytes.toBytes("rowA");
@ -5493,7 +5501,7 @@ public class TestHRegion {
} }
} }
@Test (timeout=60000) @Test
public void testReverseScanner_smaller_blocksize() throws IOException { public void testReverseScanner_smaller_blocksize() throws IOException {
// case to ensure no conflict with HFile index optimization // case to ensure no conflict with HFile index optimization
byte[] rowA = Bytes.toBytes("rowA"); byte[] rowA = Bytes.toBytes("rowA");
@ -5577,7 +5585,7 @@ public class TestHRegion {
} }
} }
@Test (timeout=60000) @Test
public void testReverseScanner_FromMemStoreAndHFiles_MultiCFs1() public void testReverseScanner_FromMemStoreAndHFiles_MultiCFs1()
throws IOException { throws IOException {
byte[] row0 = Bytes.toBytes("row0"); // 1 kv byte[] row0 = Bytes.toBytes("row0"); // 1 kv
@ -5746,7 +5754,7 @@ public class TestHRegion {
} }
} }
@Test (timeout=60000) @Test
public void testReverseScanner_FromMemStoreAndHFiles_MultiCFs2() public void testReverseScanner_FromMemStoreAndHFiles_MultiCFs2()
throws IOException { throws IOException {
byte[] row1 = Bytes.toBytes("row1"); byte[] row1 = Bytes.toBytes("row1");
@ -5827,7 +5835,7 @@ public class TestHRegion {
/** /**
* Test for HBASE-14497: Reverse Scan threw StackOverflow caused by readPt checking * Test for HBASE-14497: Reverse Scan threw StackOverflow caused by readPt checking
*/ */
@Test (timeout = 60000) @Test
public void testReverseScanner_StackOverflow() throws IOException { public void testReverseScanner_StackOverflow() throws IOException {
byte[] cf1 = Bytes.toBytes("CF1"); byte[] cf1 = Bytes.toBytes("CF1");
byte[][] families = {cf1}; byte[][] families = {cf1};
@ -5882,7 +5890,7 @@ public class TestHRegion {
} }
} }
@Test (timeout=60000) @Test
public void testSplitRegionWithReverseScan() throws IOException { public void testSplitRegionWithReverseScan() throws IOException {
TableName tableName = TableName.valueOf("testSplitRegionWithReverseScan"); TableName tableName = TableName.valueOf("testSplitRegionWithReverseScan");
byte [] qualifier = Bytes.toBytes("qualifier"); byte [] qualifier = Bytes.toBytes("qualifier");
@ -6283,7 +6291,7 @@ public class TestHRegion {
/** /**
* Test RegionTooBusyException thrown when region is busy * Test RegionTooBusyException thrown when region is busy
*/ */
@Test (timeout=24000) @Test
public void testRegionTooBusy() throws IOException { public void testRegionTooBusy() throws IOException {
String method = "testRegionTooBusy"; String method = "testRegionTooBusy";
TableName tableName = TableName.valueOf(method); TableName tableName = TableName.valueOf(method);
@ -6546,7 +6554,7 @@ public class TestHRegion {
qual2, 0, qual2.length)); qual2, 0, qual2.length));
} }
@Test(timeout = 60000) @Test
public void testBatchMutateWithWrongRegionException() throws Exception { public void testBatchMutateWithWrongRegionException() throws Exception {
final byte[] a = Bytes.toBytes("a"); final byte[] a = Bytes.toBytes("a");
final byte[] b = Bytes.toBytes("b"); final byte[] b = Bytes.toBytes("b");

View File

@ -22,13 +22,18 @@ import java.io.IOException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.TestMobSnapshotCloneIndependence;
import org.apache.hadoop.hbase.master.procedure.TestMasterFailoverWithProcedures;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests; import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WAL;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
/** /**
* A test similar to TestHRegion, but with in-memory flush families. * A test similar to TestHRegion, but with in-memory flush families.
@ -40,6 +45,9 @@ public class TestHRegionWithInMemoryFlush extends TestHRegion{
// Do not spin up clusters in here. If you need to spin up a cluster, do it // Do not spin up clusters in here. If you need to spin up a cluster, do it
// over in TestHRegionOnCluster. // over in TestHRegionOnCluster.
private static final Log LOG = LogFactory.getLog(TestHRegionWithInMemoryFlush.class); private static final Log LOG = LogFactory.getLog(TestHRegionWithInMemoryFlush.class);
@ClassRule
public static final TestRule timeout =
CategoryBasedTimeout.forClass(TestHRegionWithInMemoryFlush.class);
/** /**
* @return A region on which you must call * @return A region on which you must call

View File

@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TestMobSnapshotCloneIndependence;
import org.apache.hadoop.hbase.exceptions.MergeRegionException; import org.apache.hadoop.hbase.exceptions.MergeRegionException;
import org.apache.hadoop.hbase.master.AssignmentManager; import org.apache.hadoop.hbase.master.AssignmentManager;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;
@ -64,6 +65,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.Regio
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@ -75,6 +77,7 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -91,13 +94,15 @@ import com.google.protobuf.ServiceException;
* cluster where {@link TestRegionMergeTransaction} is tests against bare * cluster where {@link TestRegionMergeTransaction} is tests against bare
* {@link HRegion}. * {@link HRegion}.
*/ */
@Category({RegionServerTests.class, LargeTests.class}) @Category({RegionServerTests.class, MediumTests.class})
public class TestRegionMergeTransactionOnCluster { public class TestRegionMergeTransactionOnCluster {
private static final Log LOG = LogFactory private static final Log LOG = LogFactory
.getLog(TestRegionMergeTransactionOnCluster.class); .getLog(TestRegionMergeTransactionOnCluster.class);
@Rule public TestName name = new TestName(); @Rule public TestName name = new TestName();
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). @ClassRule
withLookingForStuckThread(true).build(); public static final TestRule timeout =
CategoryBasedTimeout.forClass(TestRegionMergeTransactionOnCluster.class);
private static final int NB_SERVERS = 3; private static final int NB_SERVERS = 3;
private static final byte[] FAMILYNAME = Bytes.toBytes("fam"); private static final byte[] FAMILYNAME = Bytes.toBytes("fam");

View File

@ -42,7 +42,9 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TestMobSnapshotCloneIndependence;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.procedure.TestMasterFailoverWithProcedures;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
@ -55,6 +57,7 @@ import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -71,8 +74,9 @@ import org.junit.rules.TestRule;
@Category({RegionServerTests.class, LargeTests.class}) @Category({RegionServerTests.class, LargeTests.class})
public class TestFlushSnapshotFromClient { public class TestFlushSnapshotFromClient {
private static final Log LOG = LogFactory.getLog(TestFlushSnapshotFromClient.class); private static final Log LOG = LogFactory.getLog(TestFlushSnapshotFromClient.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). @ClassRule
withLookingForStuckThread(true).build(); public static final TestRule timeout =
CategoryBasedTimeout.forClass(TestFlushSnapshotFromClient.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
protected static final int NUM_RS = 2; protected static final int NUM_RS = 2;

View File

@ -26,10 +26,12 @@ import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.master.procedure.TestMasterFailoverWithProcedures;
import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule; import org.junit.Rule;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule; import org.junit.rules.TestRule;
@ -45,8 +47,10 @@ import org.junit.rules.TestRule;
@Category({ClientTests.class, LargeTests.class}) @Category({ClientTests.class, LargeTests.class})
public class TestMobFlushSnapshotFromClient extends TestFlushSnapshotFromClient { public class TestMobFlushSnapshotFromClient extends TestFlushSnapshotFromClient {
private static final Log LOG = LogFactory.getLog(TestFlushSnapshotFromClient.class); private static final Log LOG = LogFactory.getLog(TestFlushSnapshotFromClient.class);
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
withLookingForStuckThread(true).build(); @ClassRule
public static final TestRule timeout =
CategoryBasedTimeout.forClass(TestMobFlushSnapshotFromClient.class);
@BeforeClass @BeforeClass
public static void setupCluster() throws Exception { public static void setupCluster() throws Exception {