diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java index 685d218468d..3d76fad60a5 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java @@ -23,7 +23,6 @@ package org.apache.hadoop.hbase.client; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.TableName; @@ -43,7 +42,7 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.TestRule; +import org.junit.rules.Timeout; import org.mockito.Mockito; import java.io.IOException; @@ -67,8 +66,6 @@ import java.util.concurrent.atomic.AtomicLong; @Category(MediumTests.class) public class TestAsyncProcess { - @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). - withLookingForStuckThread(true).build(); private final static Log LOG = LogFactory.getLog(TestAsyncProcess.class); private static final TableName DUMMY_TABLE = TableName.valueOf("DUMMY_TABLE"); @@ -415,6 +412,9 @@ public class TestAsyncProcess { } } + @Rule + public Timeout timeout = Timeout.millis(10000); // 10 seconds max per method tested + @Test public void testSubmit() throws Exception { ClusterConnection hc = createHConnection(); diff --git a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/master/TestMetricsMasterSourceFactory.java b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/master/TestMetricsMasterSourceFactory.java index 3f97bb3f035..91efd393ad4 100644 --- a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/master/TestMetricsMasterSourceFactory.java +++ b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/master/TestMetricsMasterSourceFactory.java @@ -19,8 +19,9 @@ package org.apache.hadoop.hbase.master; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; -import org.apache.hadoop.hbase.testclassification.MetricsTests; +import org.apache.hadoop.hbase.master.MetricsMasterSource; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.testclassification.MetricsTests; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -34,5 +35,6 @@ public class TestMetricsMasterSourceFactory { public void testGetInstanceNoHadoopCompat() throws Exception { //This should throw an exception because there is no compat lib on the class path. CompatibilitySingletonFactory.getInstance(MetricsMasterSourceFactory.class); + } -} \ No newline at end of file +} diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestTimeoutBlockingQueue.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestTimeoutBlockingQueue.java index ddf0275bc86..688e23a9299 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestTimeoutBlockingQueue.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestTimeoutBlockingQueue.java @@ -22,23 +22,24 @@ package org.apache.hadoop.hbase.procedure2.util; import java.util.Arrays; import java.util.concurrent.TimeUnit; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue.TimeoutRetriever; +import org.apache.hadoop.hbase.testclassification.SmallTests; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.apache.hadoop.hbase.CategoryBasedTimeout; -import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.Rule; -import org.junit.rules.TestRule; +import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; -@Category({MediumTests.class}) +@Category(SmallTests.class) public class TestTimeoutBlockingQueue { - @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). - withLookingForStuckThread(true).build(); + private static final Log LOG = LogFactory.getLog(TestTimeoutBlockingQueue.class); + static class TestObject { private long timeout; private int seqId; diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index 14d033f031d..00520ac16d5 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -344,6 +344,15 @@ + + maven-surefire-plugin + ${surefire.version} + + + ${surefire.firstPartGroups} + + diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java index d3e5767e735..596b8ab5b49 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java @@ -17,18 +17,15 @@ */ package org.apache.hadoop.hbase.ipc; -import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1; -import static org.junit.Assert.assertTrue; - import java.io.IOException; import java.net.Socket; import java.net.SocketAddress; import java.util.List; +import com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -39,7 +36,7 @@ import org.apache.hadoop.hbase.client.MetricsConnection; import org.apache.hadoop.hbase.client.RetriesExhaustedException; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.codec.Codec; -import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.io.compress.CompressionCodec; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -47,14 +44,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; -import org.junit.rules.TestRule; -import com.google.common.collect.Lists; +import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1; +import static org.junit.Assert.*; -@Category(MediumTests.class) +@Category(SmallTests.class) public class TestRpcClientLeaks { - @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). - withLookingForStuckThread(true).build(); public static class MyRpcClientImpl extends RpcClientImpl { public static List savedSockets = Lists.newArrayList(); @@ -118,4 +113,5 @@ public class TestRpcClientLeaks { assertTrue("Socket + " + socket + " is not closed", socket.isClosed()); } } -} \ No newline at end of file +} + diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java index 61f5000d5e3..fa0727a1152 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java @@ -25,7 +25,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.google.protobuf.Message; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Put; @@ -40,10 +39,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.TestRule; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -67,8 +64,6 @@ import static org.mockito.Mockito.when; @Category(SmallTests.class) public class TestSimpleRpcScheduler { - @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). - withLookingForStuckThread(true).build(); private static final Log LOG = LogFactory.getLog(TestSimpleRpcScheduler.class); private final RpcScheduler.Context CONTEXT = new RpcScheduler.Context() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java index a825d271801..a8726796cf1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java @@ -27,7 +27,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -37,13 +36,11 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.io.HFileLink; -import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; -import org.junit.Rule; -import org.junit.rules.TestRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -51,10 +48,8 @@ import org.junit.experimental.categories.Category; * Test the HFileLink Cleaner. * HFiles with links cannot be deleted until a link is present. */ -@Category({MediumTests.class}) +@Category(SmallTests.class) public class TestHFileLinkCleaner { - @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). - withLookingForStuckThread(true).build(); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java index fa76e043754..7f026c48796 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java @@ -32,7 +32,6 @@ import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -44,19 +43,16 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import org.junit.rules.TestRule; -import org.apache.hadoop.hbase.testclassification.MediumTests; -@Category({MediumTests.class}) +@Category(SmallTests.class) public class TestColumnSeeking { @Rule public TestName name = new TestName(); - @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). - withLookingForStuckThread(true).build(); private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java index d48ecb2cefa..3413d44f735 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.regionserver; - -import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.Assert; +import junit.framework.TestCase; +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.experimental.categories.Category; + import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; @@ -29,8 +29,8 @@ import java.util.concurrent.atomic.AtomicLong; * This is a hammer test that verifies MultiVersionConcurrencyControl in a * multiple writer single reader scenario. */ -@Category({MediumTests.class}) -public class TestMultiVersionConcurrencyControl { +@Category(SmallTests.class) +public class TestMultiVersionConcurrencyControl extends TestCase { static class Writer implements Runnable { final AtomicBoolean finished; final MultiVersionConcurrencyControl mvcc; @@ -46,6 +46,7 @@ public class TestMultiVersionConcurrencyControl { public boolean failed = false; public void run() { + AtomicLong startPoint = new AtomicLong(); while (!finished.get()) { MultiVersionConcurrencyControl.WriteEntry e = mvcc.begin(); @@ -123,9 +124,9 @@ public class TestMultiVersionConcurrencyControl { } // check failure. - Assert.assertFalse(readerFailed.get()); + assertFalse(readerFailed.get()); for (int i = 0; i < n; ++i) { - Assert.assertTrue(statuses[i].get()); + assertTrue(statuses[i].get()); } } -} +} \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java index 58569a15718..7ce3615658d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java @@ -25,8 +25,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -34,6 +32,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSUtils; @@ -41,17 +40,13 @@ import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.wal.WALKey; import org.junit.Test; -import org.junit.Rule; -import org.junit.rules.TestRule; import org.junit.experimental.categories.Category; /** * Test many concurrent appenders to an WAL while rolling the log. */ -@Category({MediumTests.class}) +@Category(SmallTests.class) public class TestLogRollingNoCluster { - @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). - withLookingForStuckThread(true).build(); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static byte [] EMPTY_1K_ARRAY = new byte[1024]; private static final int THREAD_COUNT = 100; // Spin up this many threads @@ -71,7 +66,7 @@ public class TestLogRollingNoCluster { FSUtils.setRootDir(conf, dir); final WALFactory wals = new WALFactory(conf, null, TestLogRollingNoCluster.class.getName()); final WAL wal = wals.getWAL(new byte[]{}, null); - + Appender [] appenders = null; final int count = THREAD_COUNT; diff --git a/pom.xml b/pom.xml index fd878c281fd..ba542ea222f 100644 --- a/pom.xml +++ b/pom.xml @@ -487,7 +487,6 @@ - ${surefire.firstPartGroups} false ${surefire.skipFirstPart} ${surefire.firstPartForkCount} @@ -1181,8 +1180,7 @@ false false 1 - - 1C + 2 org.apache.hadoop.hbase.testclassification.SmallTests org.apache.hadoop.hbase.testclassification.MediumTests false @@ -2206,7 +2204,27 @@ See as well the properties of the project for the values when no profile is active. --> - + + nonParallelTests + + false + + + 1 + + + + + parallelTests + + false + + + 1 + + + + singleJVMTests false @@ -2280,7 +2298,7 @@ 1 - 1C + 5 false false org.apache.hadoop.hbase.testclassification.SmallTests