diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java index 3a746828b90..fc405df031b 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java @@ -586,7 +586,7 @@ public class TestAsyncProcess { doSubmitRequest(maxHeapSizePerRequest, putsHeapSize); } - @Test(timeout=300000) + @Test public void testSubmitRandomSizeRequest() throws Exception { Random rn = new Random(); final long limit = 10 * 1024 * 1024; @@ -611,7 +611,7 @@ public class TestAsyncProcess { doSubmitRequest(maxHeapSizePerRequest, putsHeapSize); } - @Test(timeout=120000) + @Test public void testSubmitLargeRequest() throws Exception { long maxHeapSizePerRequest = 2 * 1024 * 1024; long putsHeapSize = maxHeapSizePerRequest * 2; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java index cfe9e07616a..48ca7511797 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java @@ -462,7 +462,7 @@ public class TestClientScanner { * Tests the case where all replicas of a region throw an exception. It should not cause a hang * but the exception should propagate to the client */ - @Test (timeout = 30000) + @Test public void testExceptionsFromReplicasArePropagated() throws IOException { scan.setConsistency(Consistency.TIMELINE); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestProcedureFuture.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestProcedureFuture.java index 44bd6e801db..01740e98461 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestProcedureFuture.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestProcedureFuture.java @@ -105,7 +105,7 @@ public class TestProcedureFuture { * we are skipping the waitOperationResult() call, * since we are getting the procedure result. */ - @Test(timeout=60000) + @Test public void testWithProcId() throws Exception { HBaseAdmin admin = Mockito.mock(HBaseAdmin.class); TestFuture f = new TestFuture(admin, 100L); @@ -120,7 +120,7 @@ public class TestProcedureFuture { /** * Verify that the spin loop for the procedure running works. */ - @Test(timeout=60000) + @Test public void testWithProcIdAndSpinning() throws Exception { final AtomicInteger spinCount = new AtomicInteger(0); HBaseAdmin admin = Mockito.mock(HBaseAdmin.class); @@ -147,7 +147,7 @@ public class TestProcedureFuture { * When a master return a result without procId, * we are skipping the getProcedureResult() call. */ - @Test(timeout=60000) + @Test public void testWithoutProcId() throws Exception { HBaseAdmin admin = Mockito.mock(HBaseAdmin.class); TestFuture f = new TestFuture(admin, null); @@ -167,7 +167,7 @@ public class TestProcedureFuture { * This happens when the operation calls happens on a "new master" but while we are waiting * the operation to be completed, we failover on an "old master". */ - @Test(timeout=60000) + @Test public void testOnServerWithNoProcedureSupport() throws Exception { HBaseAdmin admin = Mockito.mock(HBaseAdmin.class); TestFuture f = new TestFuture(admin, 100L) { diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java index a353a4085df..387e9dd6c15 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java @@ -64,7 +64,7 @@ public class TestSnapshotFromAdmin { * passed from the server ensures the correct overall waiting for the snapshot to finish. * @throws Exception */ - @Test(timeout = 60000) + @Test public void testBackoffLogic() throws Exception { final int pauseTime = 100; final int maxWaitTime = diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index dad1ce7c1bc..ce838fac758 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -243,7 +243,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testInitialChorePrecedence() throws InterruptedException { ChoreService service = new ChoreService("testInitialChorePrecedence"); @@ -272,7 +272,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testCancelChore() throws InterruptedException { final int period = 100; ScheduledChore chore1 = new DoNothingChore("chore1", period); @@ -289,7 +289,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testScheduledChoreConstruction() { final String NAME = "chore"; final int PERIOD = 100; @@ -322,7 +322,7 @@ public class TestChoreService { invalidDelayChore.getInitialDelay()); } - @Test (timeout=20000) + @Test public void testChoreServiceConstruction() throws InterruptedException { final int corePoolSize = 10; final int defaultCorePoolSize = ChoreService.MIN_CORE_POOL_SIZE; @@ -350,7 +350,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testFrequencyOfChores() throws InterruptedException { final int period = 100; // Small delta that acts as time buffer (allowing chores to complete if running slowly) @@ -377,7 +377,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testForceTrigger() throws InterruptedException { final int period = 100; final int delta = 10; @@ -413,7 +413,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testCorePoolIncrease() throws InterruptedException { final int initialCorePoolSize = 3; ChoreService service = new ChoreService("testCorePoolIncrease", initialCorePoolSize, false); @@ -453,7 +453,7 @@ public class TestChoreService { } } - @Test(timeout = 30000) + @Test public void testCorePoolDecrease() throws InterruptedException { final int initialCorePoolSize = 3; ChoreService service = new ChoreService("testCorePoolDecrease", initialCorePoolSize, false); @@ -522,7 +522,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testNumberOfRunningChores() throws InterruptedException { ChoreService service = new ChoreService("testNumberOfRunningChores"); @@ -563,7 +563,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testNumberOfChoresMissingStartTime() throws InterruptedException { ChoreService service = new ChoreService("testNumberOfChoresMissingStartTime"); @@ -611,7 +611,7 @@ public class TestChoreService { * been scheduled with the service. For example, if 4 ScheduledChores are scheduled with a * ChoreService, the number of threads in the ChoreService's core pool should never exceed 4 */ - @Test (timeout=20000) + @Test public void testMaximumChoreServiceThreads() throws InterruptedException { ChoreService service = new ChoreService("testMaximumChoreServiceThreads"); @@ -658,7 +658,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testChangingChoreServices() throws InterruptedException { final int period = 100; final int sleepTime = 10; @@ -697,7 +697,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testStopperForScheduledChores() throws InterruptedException { ChoreService service = new ChoreService("testStopperForScheduledChores"); Stoppable stopperForGroup1 = new SampleStopper(); @@ -752,7 +752,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testShutdownCancelsScheduledChores() throws InterruptedException { final int period = 100; ChoreService service = new ChoreService("testShutdownCancelsScheduledChores"); @@ -776,7 +776,7 @@ public class TestChoreService { assertFalse(successChore3.isScheduled()); } - @Test (timeout=20000) + @Test public void testShutdownWorksWhileChoresAreExecuting() throws InterruptedException { final int period = 100; final int sleep = 5 * period; @@ -804,7 +804,7 @@ public class TestChoreService { } } - @Test (timeout=20000) + @Test public void testShutdownRejectsNewSchedules() throws InterruptedException { final int period = 100; ChoreService service = new ChoreService("testShutdownRejectsNewSchedules"); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java index e094a1daa82..0b22caaec51 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java @@ -88,7 +88,7 @@ public class TestKeyStoreKeyProvider { } } - @Test(timeout=30000) + @Test public void testKeyStoreKeyProviderWithPassword() throws Exception { KeyProvider provider = new KeyStoreKeyProvider(); provider.init("jceks://" + storeFile.toURI().getPath() + "?password=" + PASSWORD); @@ -101,7 +101,7 @@ public class TestKeyStoreKeyProvider { } } - @Test(timeout=30000) + @Test public void testKeyStoreKeyProviderWithPasswordFile() throws Exception { KeyProvider provider = new KeyStoreKeyProvider(); provider.init("jceks://" + storeFile.toURI().getPath() + "?passwordFile=" + diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java index 48b66fa32f7..4f6da4ebcb3 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java @@ -43,7 +43,7 @@ public class TestThreads { private final AtomicBoolean wasInterrupted = new AtomicBoolean(false); - @Test(timeout=60000) + @Test public void testSleepWithoutInterrupt() throws InterruptedException { Thread sleeper = new Thread(new Runnable() { @Override diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java index 98254782861..ce0d6d6bc32 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java @@ -52,13 +52,13 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest { * * @throws Throwable on failure */ - @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000) + @Ignore ("Hangs on occasion; see HBASE-14430") @Test public void testCreatedServerIsNotAlive() throws Throwable { HttpServer server = createTestServer(); assertNotLive(server); } - @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000) + @Ignore ("Hangs on occasion; see HBASE-14430") @Test public void testStopUnstartedServer() throws Throwable { HttpServer server = createTestServer(); stop(server); @@ -69,7 +69,7 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest { * * @throws Throwable on failure */ - @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000) + @Ignore ("Hangs on occasion; see HBASE-14430") @Test public void testStartedServerIsAlive() throws Throwable { HttpServer server = null; server = createTestServer(); @@ -95,7 +95,7 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest { * * @throws Throwable on failure */ - @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000) + @Ignore ("Hangs on occasion; see HBASE-14430") @Test public void testStoppedServerIsNotAlive() throws Throwable { HttpServer server = createAndStartTestServer(); assertAlive(server); @@ -108,7 +108,7 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest { * * @throws Throwable on failure */ - @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000) + @Ignore ("Hangs on occasion; see HBASE-14430") @Test public void testStoppingTwiceServerIsAllowed() throws Throwable { HttpServer server = createAndStartTestServer(); assertAlive(server); @@ -124,7 +124,7 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest { * @throws Throwable * on failure */ - @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000) + @Ignore ("Hangs on occasion; see HBASE-14430") @Test public void testWepAppContextAfterServerStop() throws Throwable { HttpServer server = null; String key = "test.attribute.key"; diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java index e552ce247bd..9da48193e6d 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java @@ -47,7 +47,7 @@ public class TestLogLevel { static final PrintStream out = System.out; - @Test (timeout=60000) + @Test @SuppressWarnings("deprecation") public void testDynamicLogLevel() throws Exception { String logName = TestLogLevel.class.getName(); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java index 4dde8b4dfbb..2dd163305b9 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java @@ -293,7 +293,7 @@ public class IntegrationTestRpcClient { Test that not started connections are successfully removed from connection pool when rpc client is closing. */ - @Test (timeout = 30000) + @Test public void testRpcWithWriteThread() throws IOException, InterruptedException { LOG.info("Starting test"); Cluster cluster = new Cluster(1, 1); @@ -312,7 +312,7 @@ public class IntegrationTestRpcClient { } - @Test (timeout = 1800000) + @Test public void testRpcWithChaosMonkeyWithSyncClient() throws Throwable { for (int i = 0; i < numIterations; i++) { TimeoutThread.runWithTimeout(new Callable() { @@ -333,7 +333,7 @@ public class IntegrationTestRpcClient { } } - @Test (timeout = 900000) + @Test @Ignore // TODO: test fails with async client public void testRpcWithChaosMonkeyWithAsyncClient() throws Throwable { for (int i = 0; i < numIterations; i++) { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java index 0fb4f76cbd2..b15116a4775 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java @@ -86,7 +86,7 @@ public class TestCellCounter { * Test CellCounter all data should print to output * */ - @Test (timeout=300000) + @Test public void testCellCounter() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; @@ -124,7 +124,7 @@ public class TestCellCounter { /** * Test CellCounter all data should print to output */ - @Test(timeout = 300000) + @Test public void testCellCounterPrefix() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; @@ -162,7 +162,7 @@ public class TestCellCounter { /** * Test CellCounter with time range all data should print to output */ - @Test (timeout=300000) + @Test public void testCellCounterStartTimeRange() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; @@ -203,7 +203,7 @@ public class TestCellCounter { /** * Test CellCounter with time range all data should print to output */ - @Test (timeout=300000) + @Test public void testCellCounteEndTimeRange() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; @@ -243,7 +243,7 @@ public class TestCellCounter { /** * Test CellCounter with time range all data should print to output */ - @Test (timeout=300000) + @Test public void testCellCounteOutOfTimeRange() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); byte[][] families = { FAMILY_A, FAMILY_B }; @@ -288,7 +288,7 @@ public class TestCellCounter { /** * Test main method of CellCounter */ - @Test (timeout=300000) + @Test public void testCellCounterMain() throws Exception { PrintStream oldPrintStream = System.err; @@ -320,7 +320,7 @@ public class TestCellCounter { /** * Test CellCounter for complete table all data should print to output */ - @Test(timeout = 600000) + @Test public void testCellCounterForCompleteTable() throws Exception { final TableName sourceTable = TableName.valueOf(name.getMethodName()); String outputPath = OUTPUT_DIR + sourceTable; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java index 8251a3e4136..562a00932ed 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java @@ -60,7 +60,7 @@ public class TestHRegionPartitioner { /** * Test HRegionPartitioner */ - @Test (timeout=300000) + @Test public void testHRegionPartitioner() throws Exception { byte[][] families = { Bytes.toBytes("familyA"), Bytes.toBytes("familyB") }; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java index 05dab7f1e75..cd56c46f237 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java @@ -80,7 +80,7 @@ public class TestProcedureEvents { fs.delete(logDir, true); } - @Test(timeout=30000) + @Test public void testTimeoutEventProcedure() throws Exception { final int NTIMEOUTS = 5; @@ -92,12 +92,12 @@ public class TestProcedureEvents { assertEquals(NTIMEOUTS + 1, proc.getTimeoutsCount()); } - @Test(timeout=30000) + @Test public void testTimeoutEventProcedureDoubleExecution() throws Exception { testTimeoutEventProcedureDoubleExecution(false); } - @Test(timeout=30000) + @Test public void testTimeoutEventProcedureDoubleExecutionKillIfSuspended() throws Exception { testTimeoutEventProcedureDoubleExecution(true); } diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java index e79088471ce..7e660e4d0b1 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java @@ -133,7 +133,7 @@ public class TestProcedureExecution { } } - @Test(timeout=30000) + @Test public void testBadSubprocList() { List state = new ArrayList<>(); Procedure subProc2 = new TestSequentialProcedure("subProc2", state); @@ -155,7 +155,7 @@ public class TestProcedureExecution { assertEquals("rootProc-rollback", state.get(3)); } - @Test(timeout=30000) + @Test public void testSingleSequentialProc() { List state = new ArrayList<>(); Procedure subProc2 = new TestSequentialProcedure("subProc2", state); @@ -170,7 +170,7 @@ public class TestProcedureExecution { assertEquals(state.toString(), 3, state.size()); } - @Test(timeout=30000) + @Test public void testSingleSequentialProcRollback() { List state = new ArrayList<>(); Procedure subProc2 = new TestSequentialProcedure("subProc2", state, @@ -221,7 +221,7 @@ public class TestProcedureExecution { protected boolean abort(Void env) { return false; } } - @Test(timeout=30000) + @Test public void testRollbackRetriableFailure() { long procId = ProcedureTestingUtility.submitAndWait(procExecutor, new TestFaultyRollback()); @@ -298,7 +298,7 @@ public class TestProcedureExecution { } } - @Test(timeout=30000) + @Test public void testAbortTimeout() { final int PROC_TIMEOUT_MSEC = 2500; List state = new ArrayList<>(); @@ -317,7 +317,7 @@ public class TestProcedureExecution { assertEquals("wproc-rollback", state.get(1)); } - @Test(timeout=30000) + @Test public void testAbortTimeoutWithChildren() { List state = new ArrayList<>(); Procedure proc = new TestWaitingProcedure("wproc", state, true); diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java index db19974f096..1c53098a196 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java @@ -75,7 +75,7 @@ public class TestProcedureExecutor { procExecutor.start(numThreads, true); } - @Test(timeout=60000) + @Test public void testWorkerStuck() throws Exception { // replace the executor final Configuration conf = new Configuration(htu.getConfiguration()); diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java index 76129879af4..b702314dc77 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java @@ -85,7 +85,7 @@ public class TestProcedureNonce { fs.delete(logDir, true); } - @Test(timeout=30000) + @Test public void testCompletedProcWithSameNonce() throws Exception { final long nonceGroup = 123; final long nonce = 2222; @@ -111,7 +111,7 @@ public class TestProcedureNonce { ProcedureTestingUtility.assertProcNotFailed(result); } - @Test(timeout=30000) + @Test public void testRunningProcWithSameNonce() throws Exception { final long nonceGroup = 456; final long nonce = 33333; @@ -163,12 +163,12 @@ public class TestProcedureNonce { ProcedureTestingUtility.assertProcFailed(result); } - @Test(timeout=30000) + @Test public void testConcurrentNonceRegistration() throws IOException { testConcurrentNonceRegistration(true, 567, 44444); } - @Test(timeout=30000) + @Test public void testConcurrentNonceRegistrationWithRollback() throws IOException { testConcurrentNonceRegistration(false, 890, 55555); } diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java index 16af8433c3b..aece1de4f40 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java @@ -190,7 +190,7 @@ public class TestProcedureRecovery { restart(); } - @Test(timeout=30000) + @Test public void testSingleStepProcRecovery() throws Exception { Procedure proc = new TestSingleStepProcedure(); procExecutor.testing.killBeforeStoreUpdate = true; @@ -216,7 +216,7 @@ public class TestProcedureRecovery { assertEquals(1, Bytes.toInt(result.getResult())); } - @Test(timeout=30000) + @Test public void testMultiStepProcRecovery() throws Exception { // Step 0 - kill Procedure proc = new TestMultiStepProcedure(); @@ -245,7 +245,7 @@ public class TestProcedureRecovery { ProcedureTestingUtility.assertProcNotFailed(result); } - @Test(timeout=30000) + @Test public void testMultiStepRollbackRecovery() throws Exception { // Step 0 - kill Procedure proc = new TestMultiStepProcedure(); @@ -403,7 +403,7 @@ public class TestProcedureRecovery { } } - @Test(timeout=30000) + @Test public void testStateMachineMultipleLevel() throws Exception { long procId = procExecutor.submitProcedure(new TestStateMachineProcedure(true)); // Wait the completion @@ -414,7 +414,7 @@ public class TestProcedureRecovery { assertEquals(4, procExecutor.getLastProcId()); } - @Test(timeout=30000) + @Test public void testStateMachineRecovery() throws Exception { ProcedureTestingUtility.setToggleKillBeforeStoreUpdate(procExecutor, true); ProcedureTestingUtility.setKillBeforeStoreUpdate(procExecutor, true); @@ -452,7 +452,7 @@ public class TestProcedureRecovery { assertEquals(26, Bytes.toInt(result.getResult())); } - @Test(timeout=30000) + @Test public void testStateMachineRollbackRecovery() throws Exception { ProcedureTestingUtility.setToggleKillBeforeStoreUpdate(procExecutor, true); ProcedureTestingUtility.setKillBeforeStoreUpdate(procExecutor, true); diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java index 217f98228a2..7d0529e70ab 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java @@ -86,7 +86,7 @@ public class TestProcedureReplayOrder { fs.delete(logDir, true); } - @Test(timeout=90000) + @Test public void testSingleStepReplayOrder() throws Exception { final int NUM_PROC_XTHREAD = 32; final int NUM_PROCS = NUM_THREADS * NUM_PROC_XTHREAD; @@ -107,7 +107,7 @@ public class TestProcedureReplayOrder { procEnv.assertSortedExecList(NUM_PROCS); } - @Test(timeout=90000) + @Test public void testMultiStepReplayOrder() throws Exception { final int NUM_PROC_XTHREAD = 24; final int NUM_PROCS = NUM_THREADS * (NUM_PROC_XTHREAD * 2); diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java index 35ca30bc2e4..d3c0ed4ec48 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java @@ -59,12 +59,12 @@ public class TestProcedureSchedulerConcurrency { procSched.stop(); } - @Test(timeout=60000) + @Test public void testConcurrentWaitWake() throws Exception { testConcurrentWaitWake(false); } - @Test(timeout=60000) + @Test public void testConcurrentWaitWakeBatch() throws Exception { testConcurrentWaitWake(true); } diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java index 393f57a9637..3da7c117a6b 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java @@ -71,7 +71,7 @@ public class TestProcedureSuspended { procStore.stop(false); } - @Test(timeout=10000) + @Test public void testSuspendWhileHoldingLocks() { final AtomicBoolean lockA = new AtomicBoolean(false); final AtomicBoolean lockB = new AtomicBoolean(false); @@ -125,7 +125,7 @@ public class TestProcedureSuspended { assertEquals(false, lockB.get()); } - @Test(timeout=10000) + @Test public void testYieldWhileHoldingLocks() { final AtomicBoolean lock = new AtomicBoolean(false); diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java index 6c7594f6774..1929c0c7361 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java @@ -580,7 +580,7 @@ public class TestWALProcedureStore { } } - @Test(timeout=60000) + @Test public void testWalReplayOrder_AB_A() throws Exception { /* * | A B | -> | A | @@ -623,7 +623,7 @@ public class TestWALProcedureStore { }); } - @Test(timeout=60000) + @Test public void testWalReplayOrder_ABC_BAD() throws Exception { /* * | A B C | -> | B A D | diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java index 74d04fc015c..4e09e1ea1b7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java @@ -78,7 +78,7 @@ public class TestHBaseTestingUtility { * that what we insert in one place doesn't end up in the other. * @throws Exception */ - @Test (timeout=180000) + @Test public void testMultiClusters() throws Exception { // Create three clusters diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java index fee1439a70b..4d8c0156e97 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java @@ -79,7 +79,7 @@ public class TestJMXConnectorServer { /** * This tests to validate the HMaster's ConnectorServer after unauthorised stopMaster call. */ - @Test(timeout = 180000) + @Test public void testHMConnectorServerWhenStopMaster() throws Exception { conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, JMXListener.class.getName() + "," + MyAccessController.class.getName()); @@ -117,7 +117,7 @@ public class TestJMXConnectorServer { * This tests to validate the RegionServer's ConnectorServer after unauthorised stopRegionServer * call. */ - @Test(timeout = 180000) + @Test public void testRSConnectorServerWhenStopRegionServer() throws Exception { conf.set(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, JMXListener.class.getName() + "," + MyAccessController.class.getName()); @@ -147,7 +147,7 @@ public class TestJMXConnectorServer { /** * This tests to validate the HMaster's ConnectorServer after unauthorised shutdown call. */ - @Test(timeout = 180000) + @Test public void testHMConnectorServerWhenShutdownCluster() throws Exception { conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, JMXListener.class.getName() + "," + MyAccessController.class.getName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java index f1c42cee52c..c9db891023a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java @@ -288,7 +288,7 @@ public class TestNamespace { admin.deleteTable(desc.getTableName()); } - @Test(timeout = 60000) + @Test public void testNamespaceOperations() throws IOException { admin.createNamespace(NamespaceDescriptor.create(prefix + "ns1").build()); admin.createNamespace(NamespaceDescriptor.create(prefix + "ns2").build()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java index 15f04114076..c3a6f0c4b70 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java @@ -62,19 +62,19 @@ public class TestNodeHealthCheckChore { if (!fs.mkdirs(testDir)) throw new IOException("Failed mkdir " + testDir); } - @Test(timeout=60000) + @Test public void testHealthCheckerSuccess() throws Exception { String normalScript = "echo \"I am all fine\""; healthCheckerTest(normalScript, HealthCheckerExitStatus.SUCCESS); } - @Test(timeout=60000) + @Test public void testHealthCheckerFail() throws Exception { String errorScript = "echo ERROR" + eol + "echo \"Node not healthy\""; healthCheckerTest(errorScript, HealthCheckerExitStatus.FAILED); } - @Test(timeout=60000) + @Test public void testHealthCheckerTimeout() throws Exception { String timeOutScript = "sleep 10" + eol + "echo \"I am fine\""; healthCheckerTest(timeOutScript, HealthCheckerExitStatus.TIMED_OUT); @@ -99,7 +99,7 @@ public class TestNodeHealthCheckChore { this.healthScriptFile.delete(); } - @Test(timeout=60000) + @Test public void testRSHealthChore() throws Exception{ Stoppable stop = new StoppableImplementation(); Configuration conf = getConfForNodeHealthScript(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java index 222421961f5..ed37713d631 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java @@ -103,7 +103,7 @@ public class TestRegionRebalancing { * @throws IOException * @throws InterruptedException */ - @Test (timeout=300000) + @Test public void testRebalanceOnRegionServerNumberChange() throws IOException, InterruptedException { try(Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java index c418b5f2826..99b40af3280 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java @@ -124,7 +124,7 @@ public class TestStochasticBalancerJmxMetrics extends BalancerTestBase { /** * In Ensemble mode, there should be only one ensemble table */ - @Test (timeout=60000) + @Test public void testJmxMetrics_EnsembleMode() throws Exception { loadBalancer = new StochasticLoadBalancer(); @@ -153,7 +153,7 @@ public class TestStochasticBalancerJmxMetrics extends BalancerTestBase { /** * In per-table mode, each table has a set of metrics */ - @Test (timeout=60000) + @Test public void testJmxMetrics_PerTableMode() throws Exception { loadBalancer = new StochasticLoadBalancer(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java index e29e16884ff..f3e193e1f48 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java @@ -140,7 +140,7 @@ public class TestZooKeeperTableArchiveClient { /** * Test turning on/off archiving */ - @Test (timeout=300000) + @Test public void testArchivingEnableDisable() throws Exception { // 1. turn on hfile backups LOG.debug("----Starting archiving"); @@ -163,7 +163,7 @@ public class TestZooKeeperTableArchiveClient { archivingClient.getArchivingEnabled(TABLE_NAME)); } - @Test (timeout=300000) + @Test public void testArchivingOnSingleTable() throws Exception { createArchiveDirectory(); FileSystem fs = UTIL.getTestFileSystem(); @@ -211,7 +211,7 @@ public class TestZooKeeperTableArchiveClient { * Test archiving/cleaning across multiple tables, where some are retained, and others aren't * @throws Exception on failure */ - @Test (timeout=300000) + @Test public void testMultipleTables() throws Exception { createArchiveDirectory(); String otherTable = "otherTable"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java index 0e8a3e69460..c48d13004be 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java @@ -114,7 +114,7 @@ public class TestAdmin1 { } } - @Test (timeout=300000) + @Test public void testSplitFlushCompactUnknownTable() throws InterruptedException { final TableName unknowntable = TableName.valueOf(name.getMethodName()); Exception exception = null; @@ -142,7 +142,7 @@ public class TestAdmin1 { assertTrue(exception instanceof TableNotFoundException); } - @Test (timeout=300000) + @Test public void testDeleteEditUnknownColumnFamilyAndOrTable() throws IOException { // Test we get exception if we try to final TableName nonexistentTable = TableName.valueOf("nonexistent"); @@ -236,7 +236,7 @@ public class TestAdmin1 { } } - @Test (timeout=300000) + @Test public void testDisableAndEnableTable() throws IOException { final byte [] row = Bytes.toBytes("row"); final byte [] qualifier = Bytes.toBytes("qualifier"); @@ -301,7 +301,7 @@ public class TestAdmin1 { return state.getState(); } - @Test (timeout=300000) + @Test public void testDisableAndEnableTables() throws IOException { final byte [] row = Bytes.toBytes("row"); final byte [] qualifier = Bytes.toBytes("qualifier"); @@ -359,7 +359,7 @@ public class TestAdmin1 { assertEquals(TableState.State.ENABLED, getStateFromMeta(table2)); } - @Test (timeout=300000) + @Test public void testCreateTable() throws IOException { HTableDescriptor [] tables = admin.listTables(); int numTables = tables.length; @@ -373,12 +373,12 @@ public class TestAdmin1 { assertEquals(TableState.State.ENABLED, getStateFromMeta(tableName)); } - @Test (timeout=300000) + @Test public void testTruncateTable() throws IOException { testTruncateTable(TableName.valueOf(name.getMethodName()), false); } - @Test (timeout=300000) + @Test public void testTruncateTablePreservingSplits() throws IOException { testTruncateTable(TableName.valueOf(name.getMethodName()), true); } @@ -415,7 +415,7 @@ public class TestAdmin1 { } } - @Test (timeout=300000) + @Test public void testGetTableDescriptor() throws IOException { HColumnDescriptor fam1 = new HColumnDescriptor("fam1"); HColumnDescriptor fam2 = new HColumnDescriptor("fam2"); @@ -432,7 +432,7 @@ public class TestAdmin1 { table.close(); } - @Test (timeout=300000) + @Test public void testCompactionTimestamps() throws Exception { HColumnDescriptor fam1 = new HColumnDescriptor("fam1"); final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -486,7 +486,7 @@ public class TestAdmin1 { table.close(); } - @Test (timeout=300000) + @Test public void testHColumnValidName() { boolean exceptionThrown; try { @@ -502,7 +502,7 @@ public class TestAdmin1 { * @throws IOException * @throws InterruptedException */ - @Test (timeout=300000) + @Test public void testOnlineChangeTableSchema() throws IOException, InterruptedException { final TableName tableName = TableName.valueOf(name.getMethodName()); HTableDescriptor [] tables = admin.listTables(); @@ -618,7 +618,7 @@ public class TestAdmin1 { } } - @Test (timeout=300000) + @Test public void testCreateTableNumberOfRegions() throws IOException, InterruptedException { final TableName tableName = TableName.valueOf(name.getMethodName()); HTableDescriptor desc = new HTableDescriptor(tableName); @@ -668,7 +668,7 @@ public class TestAdmin1 { } } - @Test (timeout=300000) + @Test public void testCreateTableWithRegions() throws IOException, InterruptedException { final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -844,7 +844,7 @@ public class TestAdmin1 { } } - @Test (timeout=300000) + @Test public void testTableAvailableWithRandomSplitKeys() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); HTableDescriptor desc = new HTableDescriptor(tableName); @@ -859,7 +859,7 @@ public class TestAdmin1 { assertFalse("Table should be created with 1 row in META", tableAvailable); } - @Test (timeout=300000) + @Test public void testCreateTableWithOnlyEmptyStartRow() throws IOException { final byte[] tableName = Bytes.toBytes(name.getMethodName()); byte[][] splitKeys = new byte[1][]; @@ -873,7 +873,7 @@ public class TestAdmin1 { } } - @Test (timeout=300000) + @Test public void testCreateTableWithEmptyRowInTheSplitKeys() throws IOException{ final byte[] tableName = Bytes.toBytes(name.getMethodName()); byte[][] splitKeys = new byte[3][]; @@ -890,7 +890,7 @@ public class TestAdmin1 { } } - @Test (timeout=120000) + @Test public void testTableExist() throws IOException { final TableName table = TableName.valueOf(name.getMethodName()); boolean exist; @@ -906,7 +906,7 @@ public class TestAdmin1 { * @throws Exception * @throws IOException */ - @Test (timeout=400000) + @Test public void testForceSplit() throws Exception { byte[][] familyNames = new byte[][] { Bytes.toBytes("cf") }; int[] rowCounts = new int[] { 6000 }; @@ -925,7 +925,7 @@ public class TestAdmin1 { * * @throws IOException */ - @Test (timeout=300000) + @Test public void testEnableTableRetainAssignment() throws IOException { final TableName tableName = TableName.valueOf(name.getMethodName()); byte[][] splitKeys = { new byte[] { 1, 1, 1 }, new byte[] { 2, 2, 2 }, @@ -961,7 +961,7 @@ public class TestAdmin1 { * @throws Exception * @throws IOException */ - @Test (timeout=800000) + @Test public void testForceSplitMultiFamily() throws Exception { int numVersions = HColumnDescriptor.DEFAULT_VERSIONS; @@ -1234,12 +1234,12 @@ public class TestAdmin1 { assertTrue(gotException); } - @Test (expected=IllegalArgumentException.class, timeout=300000) + @Test (expected=IllegalArgumentException.class) public void testInvalidHColumnDescriptor() throws IOException { new HColumnDescriptor("/cfamily/name"); } - @Test (timeout=300000) + @Test public void testEnableDisableAddColumnDeleteColumn() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); TEST_UTIL.createTable(tableName, HConstants.CATALOG_FAMILY).close(); @@ -1264,7 +1264,7 @@ public class TestAdmin1 { this.admin.deleteTable(tableName); } - @Test (timeout=300000) + @Test public void testDeleteLastColumnFamily() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); TEST_UTIL.createTable(tableName, HConstants.CATALOG_FAMILY).close(); @@ -1297,7 +1297,7 @@ public class TestAdmin1 { * Test DFS replication for column families, where one CF has default replication(3) and the other * is set to 1. */ - @Test(timeout = 300000) + @Test public void testHFileReplication() throws Exception { final TableName tableName = TableName.valueOf(this.name.getMethodName()); String fn1 = "rep1"; @@ -1348,7 +1348,7 @@ public class TestAdmin1 { } } - @Test (timeout=300000) + @Test public void testMergeRegions() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); HColumnDescriptor cd = new HColumnDescriptor("d"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java index fee7f58a6c4..f1293f13516 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java @@ -118,7 +118,7 @@ public class TestAdmin2 { } } - @Test (timeout=300000) + @Test public void testCreateBadTables() throws IOException { String msg = null; try { @@ -175,7 +175,7 @@ public class TestAdmin2 { * Test for hadoop-1581 'HBASE: Unopenable tablename bug'. * @throws Exception */ - @Test (timeout=300000) + @Test public void testTableNameClash() throws Exception { final String name = this.name.getMethodName(); HTableDescriptor htd1 = new HTableDescriptor(TableName.valueOf(name + "SOMEUPPERCASE")); @@ -195,7 +195,7 @@ public class TestAdmin2 { * issue anymore * @throws Exception */ - @Test (timeout=300000) + @Test public void testCreateTableRPCTimeOut() throws Exception { final String name = this.name.getMethodName(); int oldTimeout = TEST_UTIL.getConfiguration(). @@ -219,7 +219,7 @@ public class TestAdmin2 { * Test read only tables * @throws Exception */ - @Test (timeout=300000) + @Test public void testReadOnlyTable() throws Exception { final TableName name = TableName.valueOf(this.name.getMethodName()); Table table = TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY); @@ -236,7 +236,7 @@ public class TestAdmin2 { * start with same. HBASE-771 * @throws IOException */ - @Test (timeout=300000) + @Test public void testTableNames() throws IOException { byte[][] illegalNames = new byte[][] { Bytes.toBytes("-bad"), @@ -265,7 +265,7 @@ public class TestAdmin2 { * For HADOOP-2579 * @throws IOException */ - @Test (expected=TableExistsException.class, timeout=300000) + @Test (expected=TableExistsException.class) public void testTableExistsExceptionWithATable() throws IOException { final TableName name = TableName.valueOf(this.name.getMethodName()); TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY).close(); @@ -276,7 +276,7 @@ public class TestAdmin2 { * Can't disable a table if the table isn't in enabled state * @throws IOException */ - @Test (expected=TableNotEnabledException.class, timeout=300000) + @Test (expected=TableNotEnabledException.class) public void testTableNotEnabledExceptionWithATable() throws IOException { final TableName name = TableName.valueOf(this.name.getMethodName()); TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY).close(); @@ -288,7 +288,7 @@ public class TestAdmin2 { * Can't enable a table if the table isn't in disabled state * @throws IOException */ - @Test (expected=TableNotDisabledException.class, timeout=300000) + @Test (expected=TableNotDisabledException.class) public void testTableNotDisabledExceptionWithATable() throws IOException { final TableName name = TableName.valueOf(this.name.getMethodName()); Table t = TEST_UTIL.createTable(name, HConstants.CATALOG_FAMILY); @@ -303,7 +303,7 @@ public class TestAdmin2 { * For HADOOP-2579 * @throws IOException */ - @Test (expected=TableNotFoundException.class, timeout=300000) + @Test (expected=TableNotFoundException.class) public void testTableNotFoundExceptionWithoutAnyTables() throws IOException { TableName tableName = TableName .valueOf("testTableNotFoundExceptionWithoutAnyTables"); @@ -311,7 +311,7 @@ public class TestAdmin2 { ht.get(new Get(Bytes.toBytes("e"))); } - @Test (timeout=300000) + @Test public void testShouldUnassignTheRegion() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); createTableWithDefaultConf(tableName); @@ -338,7 +338,7 @@ public class TestAdmin2 { isInList); } - @Test (timeout=300000) + @Test public void testCloseRegionIfInvalidRegionNameIsPassed() throws Exception { final String name = this.name.getMethodName(); byte[] tableName = Bytes.toBytes(name); @@ -364,7 +364,7 @@ public class TestAdmin2 { onlineRegions.contains(info)); } - @Test (timeout=300000) + @Test public void testCloseRegionThatFetchesTheHRIFromMeta() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); createTableWithDefaultConf(tableName); @@ -421,7 +421,7 @@ public class TestAdmin2 { * For HBASE-2556 * @throws IOException */ - @Test (timeout=300000) + @Test public void testGetTableRegions() throws IOException { final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -443,7 +443,7 @@ public class TestAdmin2 { expectedRegions, RegionInfos.size()); } - @Test (timeout=300000) + @Test public void testMoveToPreviouslyAssignedRS() throws IOException, InterruptedException { MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HMaster master = cluster.getMaster(); @@ -458,7 +458,7 @@ public class TestAdmin2 { am.getRegionStates().getRegionServerOfRegion(hri)); } - @Test (timeout=300000) + @Test public void testWALRollWriting() throws Exception { setUpforLogRolling(); String className = this.getClass().getName(); @@ -557,7 +557,7 @@ public class TestAdmin2 { /** * Check that we have an exception if the cluster is not there. */ - @Test (timeout=300000) + @Test public void testCheckHBaseAvailableWithoutCluster() { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); @@ -578,7 +578,7 @@ public class TestAdmin2 { " HBase was not available"); } - @Test (timeout=300000) + @Test public void testDisableCatalogTable() throws Exception { try { this.admin.disableTable(TableName.META_TABLE_NAME); @@ -594,7 +594,7 @@ public class TestAdmin2 { TEST_UTIL.getHBaseAdmin().createTable(htd); } - @Test (timeout=300000) + @Test public void testIsEnabledOrDisabledOnUnknownTable() throws Exception { try { admin.isTableEnabled(TableName.valueOf(name.getMethodName())); @@ -609,7 +609,7 @@ public class TestAdmin2 { } } - @Test (timeout=300000) + @Test public void testGetRegion() throws Exception { // We use actual HBaseAdmin instance instead of going via Admin interface in // here because makes use of an internal HBA method (TODO: Fix.). @@ -630,7 +630,7 @@ public class TestAdmin2 { } } - @Test(timeout = 30000) + @Test public void testBalancer() throws Exception { boolean initialState = admin.isBalancerEnabled(); @@ -652,7 +652,7 @@ public class TestAdmin2 { assertEquals(initialState, admin.isBalancerEnabled()); } - @Test(timeout = 30000) + @Test public void testRegionNormalizer() throws Exception { boolean initialState = admin.isNormalizerEnabled(); @@ -674,7 +674,7 @@ public class TestAdmin2 { assertEquals(initialState, admin.isNormalizerEnabled()); } - @Test(timeout = 30000) + @Test public void testAbortProcedureFail() throws Exception { Random randomGenerator = new Random(); long procId = randomGenerator.nextLong(); @@ -683,19 +683,19 @@ public class TestAdmin2 { assertFalse(abortResult); } - @Test(timeout = 300000) + @Test public void testGetProcedures() throws Exception { String procList = admin.getProcedures(); assertTrue(procList.startsWith("[")); } - @Test(timeout = 300000) + @Test public void testGetLocks() throws Exception { String lockList = admin.getLocks(); assertTrue(lockList.startsWith("[")); } - @Test(timeout = 30000) + @Test public void testDecommissionRegionServers() throws Exception { List decommissionedRegionServers = admin.listDecommissionedRegionServers(); assertTrue(decommissionedRegionServers.isEmpty()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java index 9321ced158b..14a881b1dd4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java @@ -44,7 +44,7 @@ public class TestAsyncDecommissionAdminApi extends TestAsyncAdminBase { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestAsyncDecommissionAdminApi.class); - @Test(timeout = 30000) + @Test public void testAsyncDecommissionRegionServers() throws Exception { List decommissionedRegionServers = admin.listDecommissionedRegionServers().get(); assertTrue(decommissionedRegionServers.isEmpty()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java index ed692cb4e44..f74b09551b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java @@ -75,7 +75,7 @@ public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase { LOG.info("Done initializing cluster"); } - @Test(timeout = 60000) + @Test public void testCreateAndDelete() throws Exception { String testName = "testCreateAndDelete"; String nsName = prefix + "_" + testName; @@ -97,7 +97,7 @@ public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase { assertNull(zkNamespaceManager.get(nsName)); } - @Test(timeout = 60000) + @Test public void testDeleteReservedNS() throws Exception { boolean exceptionCaught = false; try { @@ -119,7 +119,7 @@ public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase { } } - @Test(timeout = 60000) + @Test public void testNamespaceOperations() throws Exception { admin.createNamespace(NamespaceDescriptor.create(prefix + "ns1").build()).join(); admin.createNamespace(NamespaceDescriptor.create(prefix + "ns2").build()).join(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java index 456434f91a7..48d9a932be0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java @@ -92,7 +92,7 @@ public class TestClientPushback { UTIL.shutdownMiniCluster(); } - @Test(timeout=60000) + @Test public void testClientTracksServerPushback() throws Exception{ Configuration conf = UTIL.getConfiguration(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java index f996de6f3da..3b807aa8384 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java @@ -85,7 +85,7 @@ public class TestEnableTable { TEST_UTIL.shutdownMiniCluster(); } - @Test(timeout = 300000) + @Test public void testEnableTableWithNoRegionServers() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); @@ -149,7 +149,7 @@ public class TestEnableTable { * @throws IOException * @throws InterruptedException */ - @Test(timeout=60000) + @Test public void testDeleteForSureClearsAllTableRowsFromMeta() throws IOException, InterruptedException { final TableName tableName = TableName.valueOf(name.getMethodName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java index 60c124ae9a7..82cf8023ba5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java @@ -269,7 +269,7 @@ public class TestFromClientSide3 { } // override the config settings at the CF level and ensure priority - @Test(timeout = 60000) + @Test public void testAdvancedConfigOverride() throws Exception { /* * Overall idea: (1) create 3 store files and issue a compaction. config's @@ -681,7 +681,7 @@ public class TestFromClientSide3 { assertTrue(con.hasCellBlockSupport()); } - @Test(timeout = 60000) + @Test public void testPutWithPreBatchMutate() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); testPreBatchMutate(tableName, () -> { @@ -696,7 +696,7 @@ public class TestFromClientSide3 { }); } - @Test(timeout = 60000) + @Test public void testRowMutationsWithPreBatchMutate() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); testPreBatchMutate(tableName, () -> { @@ -743,7 +743,7 @@ public class TestFromClientSide3 { TEST_UTIL.deleteTable(tableName); } - @Test(timeout = 30000) + @Test public void testLockLeakWithDelta() throws Exception, Throwable { final TableName tableName = TableName.valueOf(name.getMethodName()); HTableDescriptor desc = new HTableDescriptor(tableName); @@ -796,7 +796,7 @@ public class TestFromClientSide3 { assertEquals(0, readLockCount); } - @Test(timeout = 30000) + @Test public void testMultiRowMutations() throws Exception, Throwable { final TableName tableName = TableName.valueOf(name.getMethodName()); HTableDescriptor desc = new HTableDescriptor(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java index f2f01adf32c..7cab2d1ee77 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java @@ -178,7 +178,7 @@ public class TestMultiParallel { * @throws NoSuchFieldException * @throws SecurityException */ - @Test(timeout=300000) + @Test public void testActiveThreadsCount() throws Exception { UTIL.getConfiguration().setLong("hbase.htable.threads.coresize", slaves + 1); try (Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration())) { @@ -202,7 +202,7 @@ public class TestMultiParallel { } } - @Test(timeout=300000) + @Test public void testBatchWithGet() throws Exception { LOG.info("test=testBatchWithGet"); Table table = UTIL.getConnection().getTable(TEST_TABLE); @@ -271,7 +271,7 @@ public class TestMultiParallel { table.close(); } - @Test (timeout=300000) + @Test public void testFlushCommitsNoAbort() throws Exception { LOG.info("test=testFlushCommitsNoAbort"); doTestFlushCommits(false); @@ -283,7 +283,7 @@ public class TestMultiParallel { * * @throws Exception */ - @Test (timeout=360000) + @Test public void testFlushCommitsWithAbort() throws Exception { LOG.info("test=testFlushCommitsWithAbort"); doTestFlushCommits(true); @@ -354,7 +354,7 @@ public class TestMultiParallel { LOG.info("done"); } - @Test (timeout=300000) + @Test public void testBatchWithPut() throws Exception { LOG.info("test=testBatchWithPut"); Table table = CONNECTION.getTable(TEST_TABLE); @@ -387,7 +387,7 @@ public class TestMultiParallel { table.close(); } - @Test(timeout=300000) + @Test public void testBatchWithDelete() throws Exception { LOG.info("test=testBatchWithDelete"); Table table = UTIL.getConnection().getTable(TEST_TABLE); @@ -418,7 +418,7 @@ public class TestMultiParallel { table.close(); } - @Test(timeout=300000) + @Test public void testHTableDeleteWithList() throws Exception { LOG.info("test=testHTableDeleteWithList"); Table table = UTIL.getConnection().getTable(TEST_TABLE); @@ -448,7 +448,7 @@ public class TestMultiParallel { table.close(); } - @Test(timeout=300000) + @Test public void testBatchWithManyColsInOneRowGetAndPut() throws Exception { LOG.info("test=testBatchWithManyColsInOneRowGetAndPut"); Table table = UTIL.getConnection().getTable(TEST_TABLE); @@ -487,7 +487,7 @@ public class TestMultiParallel { table.close(); } - @Test(timeout=300000) + @Test public void testBatchWithIncrementAndAppend() throws Exception { LOG.info("test=testBatchWithIncrementAndAppend"); final byte[] QUAL1 = Bytes.toBytes("qual1"); @@ -522,7 +522,7 @@ public class TestMultiParallel { table.close(); } - @Test(timeout=300000) + @Test public void testNonceCollision() throws Exception { LOG.info("test=testNonceCollision"); final Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration()); @@ -622,7 +622,7 @@ public class TestMultiParallel { } } - @Test(timeout=300000) + @Test public void testBatchWithMixedActions() throws Exception { LOG.info("test=testBatchWithMixedActions"); Table table = UTIL.getConnection().getTable(TEST_TABLE); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java index 686a8a44a8e..046b6f8d14e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java @@ -280,7 +280,7 @@ public class TestReplicaWithCluster { HTU.shutdownMiniCluster(); } - @Test (timeout=30000) + @Test public void testCreateDeleteTable() throws IOException { // Create table then get the single region for our new table. HTableDescriptor hdt = HTU.createTableDescriptor("testCreateDeleteTable"); @@ -313,7 +313,7 @@ public class TestReplicaWithCluster { HTU.deleteTable(hdt.getTableName()); } - @Test (timeout=120000) + @Test public void testChangeTable() throws Exception { TableDescriptor td = TableDescriptorBuilder.newBuilder(TableName.valueOf("testChangeTable")) .setRegionReplication(NB_SERVERS) @@ -373,7 +373,7 @@ public class TestReplicaWithCluster { } @SuppressWarnings("deprecation") - @Test (timeout=300000) + @Test public void testReplicaAndReplication() throws Exception { HTableDescriptor hdt = HTU.createTableDescriptor("testReplicaAndReplication"); hdt.setRegionReplication(NB_SERVERS); @@ -457,7 +457,7 @@ public class TestReplicaWithCluster { // the minicluster has negative impact of deleting all HConnections in JVM. } - @Test (timeout=30000) + @Test public void testBulkLoad() throws IOException { // Create table then get the single region for our new table. LOG.debug("Creating test table"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java index d7f4464e7c0..45da2c6bbb6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java @@ -102,7 +102,7 @@ public class TestScannerTimeout { * from failed. Before 2772, it reused the same scanner id. * @throws Exception */ - @Test(timeout=300000) + @Test public void test2772() throws Exception { LOG.info("START************ test2772"); HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TABLE_NAME); @@ -133,7 +133,7 @@ public class TestScannerTimeout { * from failed. Before 3686, it would skip rows in the scan. * @throws Exception */ - @Test(timeout=300000) + @Test public void test3686a() throws Exception { LOG.info("START ************ TEST3686A---1"); HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TABLE_NAME); @@ -179,7 +179,7 @@ public class TestScannerTimeout { * client. * @throws Exception */ - @Test(timeout=300000) + @Test public void test3686b() throws Exception { LOG.info("START ************ test3686b"); HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TABLE_NAME); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java index 7ce130c71ab..b46404f0e28 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java @@ -148,7 +148,7 @@ public class TestSnapshotFromClient { * Test snapshotting not allowed hbase:meta and -ROOT- * @throws Exception */ - @Test (timeout=300000) + @Test public void testMetaTablesSnapshot() throws Exception { Admin admin = UTIL.getAdmin(); byte[] snapshotName = Bytes.toBytes("metaSnapshot"); @@ -166,7 +166,7 @@ public class TestSnapshotFromClient { * * @throws Exception */ - @Test (timeout=300000) + @Test public void testSnapshotDeletionWithRegex() throws Exception { Admin admin = UTIL.getAdmin(); // make sure we don't fail on listing snapshots @@ -202,7 +202,7 @@ public class TestSnapshotFromClient { * Test snapshotting a table that is offline * @throws Exception */ - @Test (timeout=300000) + @Test public void testOfflineTableSnapshot() throws Exception { Admin admin = UTIL.getAdmin(); // make sure we don't fail on listing snapshots @@ -250,7 +250,7 @@ public class TestSnapshotFromClient { SnapshotTestingUtils.assertNoSnapshots(admin); } - @Test (timeout=300000) + @Test public void testSnapshotFailsOnNonExistantTable() throws Exception { Admin admin = UTIL.getAdmin(); // make sure we don't fail on listing snapshots @@ -279,7 +279,7 @@ public class TestSnapshotFromClient { } } - @Test (timeout=300000) + @Test public void testOfflineTableSnapshotWithEmptyRegions() throws Exception { // test with an empty table with one region @@ -323,7 +323,7 @@ public class TestSnapshotFromClient { SnapshotTestingUtils.assertNoSnapshots(admin); } - @Test(timeout = 300000) + @Test public void testListTableSnapshots() throws Exception { Admin admin = null; final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -369,7 +369,7 @@ public class TestSnapshotFromClient { } } - @Test(timeout = 300000) + @Test public void testListTableSnapshotsWithRegex() throws Exception { Admin admin = null; try { @@ -408,7 +408,7 @@ public class TestSnapshotFromClient { } } - @Test(timeout = 300000) + @Test public void testDeleteTableSnapshots() throws Exception { Admin admin = null; final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -443,7 +443,7 @@ public class TestSnapshotFromClient { } } - @Test(timeout = 300000) + @Test public void testDeleteTableSnapshotsWithRegex() throws Exception { Admin admin = null; Pattern tableNamePattern = Pattern.compile("test.*"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java index 00a5cec6636..ef52057ad79 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java @@ -188,7 +188,7 @@ public class TestSnapshotMetadata { /** * Verify that the describe for a cloned table matches the describe from the original. */ - @Test (timeout=300000) + @Test public void testDescribeMatchesAfterClone() throws Exception { // Clone the original table final String clonedTableNameAsString = "clone" + originalTableName; @@ -229,7 +229,7 @@ public class TestSnapshotMetadata { /** * Verify that the describe for a restored table matches the describe for one the original. */ - @Test (timeout=300000) + @Test public void testDescribeMatchesAfterRestore() throws Exception { runRestoreWithAdditionalMetadata(false); } @@ -238,7 +238,7 @@ public class TestSnapshotMetadata { * Verify that if metadata changed after a snapshot was taken, that the old metadata replaces the * new metadata during a restore */ - @Test (timeout=300000) + @Test public void testDescribeMatchesAfterMetadataChangeAndRestore() throws Exception { runRestoreWithAdditionalMetadata(true); } @@ -248,7 +248,7 @@ public class TestSnapshotMetadata { * the restored table's original metadata * @throws Exception */ - @Test (timeout=300000) + @Test public void testDescribeOnEmptyTableMatchesAfterMetadataChangeAndRestore() throws Exception { runRestoreWithAdditionalMetadata(true, false); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java index f67411b8e53..bc21f1a4a14 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java @@ -87,7 +87,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { TestReplicationBase.tearDownAfterClass(); } - @Test(timeout = 300000) + @Test public void disableNotFullReplication() throws Exception { HTableDescriptor table = new HTableDescriptor(admin2.getTableDescriptor(tableName)); HColumnDescriptor f = new HColumnDescriptor("notReplicatedFamily"); @@ -104,7 +104,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { } } - @Test(timeout = 300000) + @Test public void testEnableReplicationWhenSlaveClusterDoesntHaveTable() throws Exception { admin1.disableTableReplication(tableName); admin2.disableTable(tableName); @@ -114,7 +114,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { assertTrue(admin2.tableExists(tableName)); } - @Test(timeout = 300000) + @Test public void testEnableReplicationWhenReplicationNotEnabled() throws Exception { HTableDescriptor table = new HTableDescriptor(admin1.getTableDescriptor(tableName)); for (HColumnDescriptor fam : table.getColumnFamilies()) { @@ -135,7 +135,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { } } - @Test(timeout = 300000) + @Test public void testEnableReplicationWhenTableDescriptorIsNotSameInClusters() throws Exception { HTableDescriptor table = new HTableDescriptor(admin2.getTableDescriptor(tableName)); HColumnDescriptor f = new HColumnDescriptor("newFamily"); @@ -160,7 +160,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { } } - @Test(timeout = 300000) + @Test public void testDisableAndEnableReplication() throws Exception { admin1.disableTableReplication(tableName); HTableDescriptor table = admin1.getTableDescriptor(tableName); @@ -174,22 +174,22 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { } } - @Test(timeout = 300000, expected = TableNotFoundException.class) + @Test(expected = TableNotFoundException.class) public void testDisableReplicationForNonExistingTable() throws Exception { admin1.disableTableReplication(TableName.valueOf(name.getMethodName())); } - @Test(timeout = 300000, expected = TableNotFoundException.class) + @Test(expected = TableNotFoundException.class) public void testEnableReplicationForNonExistingTable() throws Exception { admin1.enableTableReplication(TableName.valueOf(name.getMethodName())); } - @Test(timeout = 300000, expected = IllegalArgumentException.class) + @Test(expected = IllegalArgumentException.class) public void testDisableReplicationWhenTableNameAsNull() throws Exception { admin1.disableTableReplication(null); } - @Test(timeout = 300000, expected = IllegalArgumentException.class) + @Test(expected = IllegalArgumentException.class) public void testEnableReplicationWhenTableNameAsNull() throws Exception { admin1.enableTableReplication(null); } @@ -198,7 +198,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { * Test enable table replication should create table only in user explicit specified table-cfs. * HBASE-14717 */ - @Test(timeout = 300000) + @Test public void testEnableReplicationForExplicitSetTableCfs() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); String peerId = "2"; @@ -239,7 +239,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { } } - @Test(timeout=300000) + @Test public void testReplicationPeerConfigUpdateCallback() throws Exception { String peerId = "1"; ReplicationPeerConfig rpc = new ReplicationPeerConfig(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java index 5393b4822d2..abe28888285 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java @@ -106,7 +106,7 @@ public class TestConstraint { * @throws Exception */ @SuppressWarnings("unchecked") - @Test(timeout = 60000) + @Test public void testConstraintFails() throws Exception { // create the table diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java index ce3c726361c..d0031ff65dd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java @@ -165,7 +165,7 @@ public class TestMasterCoprocessorExceptionWithAbort { UTIL.shutdownMiniCluster(); } - @Test(timeout=30000) + @Test public void testExceptionFromCoprocessorWhenCreatingTable() throws IOException { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java index 8c38208d669..f3e0d4c9f17 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java @@ -146,7 +146,7 @@ public class TestMasterCoprocessorExceptionWithRemove { UTIL.shutdownMiniCluster(); } - @Test(timeout=30000) + @Test public void testExceptionFromCoprocessorWhenCreatingTable() throws IOException { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java index fcaed63c98d..579b6d34e33 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java @@ -1261,7 +1261,7 @@ public class TestMasterObserver { UTIL.shutdownMiniCluster(); } - @Test (timeout=180000) + @Test public void testStarted() throws Exception { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); @@ -1280,7 +1280,7 @@ public class TestMasterObserver { cp.wasStartMasterCalled()); } - @Test (timeout=180000) + @Test public void testTableOperations() throws Exception { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -1409,7 +1409,7 @@ public class TestMasterObserver { } } - @Test (timeout=180000) + @Test public void testSnapshotOperations() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); MiniHBaseCluster cluster = UTIL.getHBaseCluster(); @@ -1470,7 +1470,7 @@ public class TestMasterObserver { } } - @Test (timeout=180000) + @Test public void testNamespaceOperations() throws Exception { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); String testNamespace = "observed_ns"; @@ -1503,7 +1503,7 @@ public class TestMasterObserver { } } - @Test (timeout=180000) + @Test public void testRegionTransitionOperations() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); MiniHBaseCluster cluster = UTIL.getHBaseCluster(); @@ -1592,7 +1592,7 @@ public class TestMasterObserver { } } - @Test (timeout=180000) + @Test public void testTableDescriptorsEnumeration() throws Exception { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); @@ -1609,7 +1609,7 @@ public class TestMasterObserver { cp.wasGetTableDescriptorsCalled()); } - @Test (timeout=180000) + @Test public void testTableNamesEnumeration() throws Exception { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); @@ -1624,7 +1624,7 @@ public class TestMasterObserver { cp.wasGetTableNamesCalled()); } - @Test (timeout=180000) + @Test public void testAbortProcedureOperation() throws Exception { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); @@ -1639,7 +1639,7 @@ public class TestMasterObserver { cp.wasAbortProcedureCalled()); } - @Test (timeout=180000) + @Test public void testGetProceduresOperation() throws Exception { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); @@ -1654,7 +1654,7 @@ public class TestMasterObserver { cp.wasGetProceduresCalled()); } - @Test (timeout=180000) + @Test public void testGetLocksOperation() throws Exception { MiniHBaseCluster cluster = UTIL.getHBaseCluster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java index 6bfd42b7ba5..ef3fa10f0ee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java @@ -378,7 +378,7 @@ public class TestRegionObserverInterface { } - @Test(timeout = 300000) + @Test // HBase-3758 public void testHBase3758() throws IOException { final TableName tableName = TableName.valueOf(name.getMethodName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java index 3e1d41a8942..8309f471570 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java @@ -61,7 +61,7 @@ public class TestRegionServerCoprocessorExceptionWithAbort { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final TableName TABLE_NAME = TableName.valueOf("observed_table"); - @Test(timeout=60000) + @Test public void testExceptionDuringInitialization() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2); // Let's fail fast. @@ -88,7 +88,7 @@ public class TestRegionServerCoprocessorExceptionWithAbort { } } - @Test(timeout=60000) + @Test public void testExceptionFromCoprocessorDuringPut() throws Exception { // set configure to indicate which cp should be loaded Configuration conf = TEST_UTIL.getConfiguration(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java index f394e57ca89..2d564b67136 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java @@ -87,7 +87,7 @@ public class TestRegionServerCoprocessorExceptionWithRemove { TEST_UTIL.shutdownMiniCluster(); } - @Test(timeout=60000) + @Test public void testExceptionFromCoprocessorDuringPut() throws IOException, InterruptedException { // Set watches on the zookeeper nodes for all of the regionservers in the diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java index 60362b7c801..9d5537244c1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java @@ -44,7 +44,7 @@ public class TestTimeoutExceptionInjector { /** * Test that a manually triggered timer fires an exception. */ - @Test(timeout = 60000) + @Test public void testTimerTrigger() { final long time = 10000000; // pick a value that is very far in the future ForeignExceptionListener listener = Mockito.mock(ForeignExceptionListener.class); @@ -71,7 +71,7 @@ public class TestTimeoutExceptionInjector { * Demonstrate TimeoutExceptionInjector semantics -- completion means no more exceptions passed to * error listener. */ - @Test(timeout = 60000) + @Test public void testStartAfterComplete() throws InterruptedException { final long time = 10; ForeignExceptionListener listener = Mockito.mock(ForeignExceptionListener.class); @@ -91,7 +91,7 @@ public class TestTimeoutExceptionInjector { * Demonstrate TimeoutExceptionInjector semantics -- triggering fires exception and completes * the timer. */ - @Test(timeout = 60000) + @Test public void testStartAfterTrigger() throws InterruptedException { final long time = 10; ForeignExceptionListener listener = Mockito.mock(ForeignExceptionListener.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java index 00f335e7507..1937d80b798 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java @@ -198,7 +198,7 @@ public class TestChangingEncoding { TEST_UTIL.waitUntilNoRegionsInTransition(TIMEOUT_MS); } - @Test(timeout=TIMEOUT_MS) + @Test public void testChangingEncoding() throws Exception { prepareTest("ChangingEncoding"); for (boolean onlineChange : new boolean[]{false, true}) { @@ -210,7 +210,7 @@ public class TestChangingEncoding { } } - @Test(timeout=TIMEOUT_MS) + @Test public void testChangingEncodingWithCompaction() throws Exception { prepareTest("ChangingEncodingWithCompaction"); for (boolean onlineChange : new boolean[]{false, true}) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java index 269fa45a931..b607ca7ccf5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java @@ -73,7 +73,7 @@ public class TestLoadAndSwitchEncodeOnDisk extends } @Override - @Test(timeout=TIMEOUT_MS) + @Test public void loadTest() throws Exception { Admin admin = TEST_UTIL.getAdmin(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java index dd301b6ffc5..856b2504f4b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java @@ -326,7 +326,7 @@ public class TestCacheConfig { * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy * LruBlockCache as L1 with a BucketCache for L2. */ - @Test (timeout=10000) + @Test public void testBucketCacheConfigL1L2Setup() { this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap"); // Make lru size is smaller than bcSize for sure. Need this to be true so when eviction diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index 31407a35712..be35c74140f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -744,12 +744,12 @@ public class TestHFileBlockIndex { valueRead); } - @Test(timeout=10000) + @Test public void testIntermediateLevelIndicesWithLargeKeys() throws IOException { testIntermediateLevelIndicesWithLargeKeys(16); } - @Test(timeout=10000) + @Test public void testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries() throws IOException { // because of the large rowKeys, we will end up with a 50-level block index without sanity check testIntermediateLevelIndicesWithLargeKeys(2); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java index 134bb62a1f7..23fe9054141 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java @@ -128,7 +128,7 @@ public class TestHFileEncryption { return b.getOnDiskSizeWithHeader(); } - @Test(timeout=20000) + @Test public void testDataBlockEncryption() throws IOException { final int blocks = 10; final int[] blockSizes = new int[blocks]; @@ -164,7 +164,7 @@ public class TestHFileEncryption { } } - @Test(timeout=20000) + @Test public void testHFileEncryptionMetadata() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); CacheConfig cacheConf = new CacheConfig(conf); @@ -202,7 +202,7 @@ public class TestHFileEncryption { } } - @Test(timeout=6000000) + @Test public void testHFileEncryption() throws Exception { // Create 1000 random test KVs RedundantKVGenerator generator = new RedundantKVGenerator(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java index de55afa200c..b512d2f84a1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java @@ -77,7 +77,7 @@ public class TestPrefetch { assertTrue(cc.shouldPrefetchOnOpen()); } - @Test(timeout=60000) + @Test public void testPrefetch() throws Exception { Path storeFile = writeStoreFile(); readStoreFile(storeFile); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java index faf259fcd47..a694fcba164 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java @@ -111,7 +111,7 @@ public class TestBucketWriterThread { * @throws IOException * @throws InterruptedException */ - @Test (timeout=30000) + @Test public void testNonErrorCase() throws IOException, InterruptedException { bc.cacheBlock(this.plainKey, this.plainCacheable); doDrainOfOneEntry(this.bc, this.wt, this.q); @@ -137,7 +137,7 @@ public class TestBucketWriterThread { * @throws InterruptedException */ @SuppressWarnings("unchecked") - @Test (timeout=30000) + @Test public void testIOE() throws IOException, InterruptedException { this.bc.cacheBlock(this.plainKey, plainCacheable); RAMQueueEntry rqe = q.remove(); @@ -156,7 +156,7 @@ public class TestBucketWriterThread { * @throws IOException * @throws InterruptedException */ - @Test (timeout=30000) + @Test public void testCacheFullException() throws IOException, InterruptedException { this.bc.cacheBlock(this.plainKey, plainCacheable); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcServer.java index d0ef0555d6b..b45dd5b2863 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcServer.java @@ -77,7 +77,7 @@ public class TestNettyRpcServer { TEST_UTIL.shutdownMiniCluster(); } - @Test(timeout = 180000) + @Test public void testNettyRpcServer() throws Exception { final Table table = TEST_UTIL.createTable(TABLE, FAMILY); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java index f3d53c152ad..1d70c9783ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java @@ -171,7 +171,7 @@ public abstract class AbstractTestDLS { ZKUtil.deleteNodeRecursively(TEST_UTIL.getZooKeeperWatcher(), "/hbase"); } - @Test(timeout = 300000) + @Test public void testRecoveredEdits() throws Exception { conf.setLong("hbase.regionserver.hlog.blocksize", 30 * 1024); // create more than one wal startCluster(NUM_RS); @@ -250,7 +250,7 @@ public abstract class AbstractTestDLS { } } - @Test(timeout = 300000) + @Test public void testMasterStartsUpWithLogSplittingWork() throws Exception { conf.setInt(ServerManager.WAIT_ON_REGIONSERVERS_MINTOSTART, NUM_RS - 1); startCluster(NUM_RS); @@ -310,7 +310,7 @@ public abstract class AbstractTestDLS { * @throws Exception */ // Was marked flaky before Distributed Log Replay cleanup. - @Test(timeout = 300000) + @Test public void testWorkerAbort() throws Exception { LOG.info("testWorkerAbort"); startCluster(3); @@ -367,7 +367,7 @@ public abstract class AbstractTestDLS { } } - @Test(timeout = 300000) + @Test public void testThreeRSAbort() throws Exception { LOG.info("testThreeRSAbort"); int numRegionsToCreate = 40; @@ -402,7 +402,7 @@ public abstract class AbstractTestDLS { } } - @Test(timeout = 30000) + @Test public void testDelayedDeleteOnFailure() throws Exception { LOG.info("testDelayedDeleteOnFailure"); startCluster(1); @@ -470,7 +470,7 @@ public abstract class AbstractTestDLS { } } - @Test(timeout = 300000) + @Test public void testReadWriteSeqIdFiles() throws Exception { LOG.info("testReadWriteSeqIdFiles"); startCluster(2); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java index 2ebab10821d..e8f739b4e74 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java @@ -174,7 +174,7 @@ public class TestAssignmentListener { TEST_UTIL.shutdownMiniCluster(); } - @Test(timeout=60000) + @Test public void testServerListener() throws IOException, InterruptedException { ServerManager serverManager = TEST_UTIL.getHBaseCluster().getMaster().getServerManager(); @@ -216,7 +216,7 @@ public class TestAssignmentListener { } } - @Test(timeout=60000) + @Test public void testAssignmentListener() throws IOException, InterruptedException { AssignmentManager am = TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager(); Admin admin = TEST_UTIL.getAdmin(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java index 5567aba7042..fcd16380433 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java @@ -90,7 +90,7 @@ public class TestCatalogJanitorInMemoryStates { /** * Test clearing a split parent from memory. */ - @Test(timeout = 180000) + @Test public void testInMemoryParentCleanup() throws IOException, InterruptedException { final AssignmentManager am = TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager(); final ServerManager sm = TEST_UTIL.getHBaseCluster().getMaster().getServerManager(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java index d5e657cc44f..4e852f8cd56 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java @@ -103,7 +103,7 @@ public class TestDeadServer { assertFalse(ds.cleanPreviousInstance(deadServerHostComingAlive)); } - @Test(timeout = 15000) + @Test public void testCrashProcedureReplay() { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); final ProcedureExecutor pExecutor = master.getMasterProcedureExecutor(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterBalanceThrottling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterBalanceThrottling.java index 3bc60eb3b43..80f0fb65574 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterBalanceThrottling.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterBalanceThrottling.java @@ -64,7 +64,7 @@ public class TestMasterBalanceThrottling { TEST_UTIL.shutdownMiniCluster(); } - @Test(timeout = 60000) + @Test public void testThrottlingByBalanceInterval() throws Exception { // Use default config and start a cluster of two regionservers. TEST_UTIL.startMiniCluster(2); @@ -88,7 +88,7 @@ public class TestMasterBalanceThrottling { TEST_UTIL.deleteTable(tableName); } - @Test(timeout = 60000) + @Test public void testThrottlingByMaxRitPercent() throws Exception { // Set max balancing time to 500 ms and max percent of regions in transition to 0.05 TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_BALANCER_MAX_BALANCING, 500); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailoverBalancerPersistence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailoverBalancerPersistence.java index 43e5481a69c..8678684f1a5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailoverBalancerPersistence.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailoverBalancerPersistence.java @@ -47,7 +47,7 @@ public class TestMasterFailoverBalancerPersistence { * * @throws Exception */ - @Test(timeout = 240000) + @Test public void testMasterFailoverBalancerPersistence() throws Exception { final int NUM_MASTERS = 3; final int NUM_RS = 1; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java index 119039f719f..91955f802fd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java @@ -85,7 +85,7 @@ public class TestMasterMetrics { } } - @Test(timeout = 300000) + @Test public void testClusterRequests() throws Exception { // sending fake request to master to see how metric value has changed diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java index a4737e65e9e..ccc6104d0c6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java @@ -58,7 +58,7 @@ public class TestMasterMetricsWrapper { TEST_UTIL.shutdownMiniCluster(); } - @Test (timeout = 30000) + @Test public void testInfo() { HMaster master = TEST_UTIL.getHBaseCluster().getMaster(); MetricsMasterWrapperImpl info = new MetricsMasterWrapperImpl(master); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java index 47672b440e1..4c51026b06a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java @@ -57,7 +57,7 @@ public class TestMasterShutdown { * Verifies that all masters are properly shutdown. * @throws Exception */ - @Test (timeout=120000) + @Test public void testMasterShutdown() throws Exception { final int NUM_MASTERS = 3; final int NUM_RS = 3; @@ -103,7 +103,7 @@ public class TestMasterShutdown { htu.shutdownMiniCluster(); } - @Test(timeout = 60000) + @Test public void testMasterShutdownBeforeStartingAnyRegionServer() throws Exception { final int NUM_MASTERS = 1; final int NUM_RS = 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java index 9b23d49d051..77c5c58a77d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java @@ -198,7 +198,8 @@ public class TestMasterTransitions { * in. * @see HBASE-2428 */ - @Ignore @Test (timeout=300000) public void testRegionCloseWhenNoMetaHBase2428() + @Ignore @Test + public void testRegionCloseWhenNoMetaHBase2428() throws Exception { /* LOG.info("Running testRegionCloseWhenNoMetaHBase2428"); @@ -253,7 +254,8 @@ public class TestMasterTransitions { * If confusion between old and new, purportedly meta never comes back. Test * that meta gets redeployed. */ - @Ignore @Test (timeout=300000) public void testAddingServerBeforeOldIsDead2413() + @Ignore @Test + public void testAddingServerBeforeOldIsDead2413() throws IOException { /* LOG.info("Running testAddingServerBeforeOldIsDead2413"); @@ -380,7 +382,8 @@ public class TestMasterTransitions { * done. * @see HBASE-2482 */ - @Ignore @Test (timeout=300000) public void testKillRSWithOpeningRegion2482() + @Ignore @Test + public void testKillRSWithOpeningRegion2482() throws Exception { /* LOG.info("Running testKillRSWithOpeningRegion2482"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java index 7e730ae53a7..7faed1c2b17 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java @@ -78,7 +78,7 @@ public class TestMetaShutdownHandler { * the region server so that it is still up during the master SSH. * We will check that the master SSH is still successfully done. */ - @Test (timeout=180000) + @Test public void testExpireMetaRegionServer() throws Exception { MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HMaster master = cluster.getMaster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java index 2ea05dfbc7c..56976b36ae6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java @@ -66,7 +66,7 @@ public class TestRestartCluster { UTIL.shutdownMiniCluster(); } - @Test (timeout=300000) + @Test public void testClusterRestart() throws Exception { UTIL.startMiniCluster(3); while (!UTIL.getMiniHBaseCluster().getMaster().isInitialized()) { @@ -112,7 +112,7 @@ public class TestRestartCluster { /** * This tests retaining assignments on a cluster restart */ - @Test (timeout=300000) + @Test public void testRetainAssignmentOnRestart() throws Exception { UTIL.startMiniCluster(2); while (!UTIL.getMiniHBaseCluster().getMaster().isInitialized()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java index e98f3c9bb47..8ecb49d9f93 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java @@ -64,7 +64,7 @@ public class TestRollingRestart { @Rule public TestName name = new TestName(); - @Test (timeout=500000) + @Test public void testBasicRollingRestart() throws Exception { // Start a cluster with 2 masters and 4 regionservers diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java index 20bd6cd2ead..c3021f3d1f7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java @@ -215,7 +215,7 @@ public class TestSplitLogManager { * Test whether the splitlog correctly creates a task in zookeeper * @throws Exception */ - @Test (timeout=180000) + @Test public void testTaskCreation() throws Exception { LOG.info("TestTaskCreation - test the creation of a task in zk"); @@ -230,7 +230,7 @@ public class TestSplitLogManager { assertTrue(slt.isUnassigned(master.getServerName())); } - @Test (timeout=180000) + @Test public void testOrphanTaskAcquisition() throws Exception { LOG.info("TestOrphanTaskAcquisition"); @@ -254,7 +254,7 @@ public class TestSplitLogManager { waitForCounter(tot_mgr_rescan, 0, 1, to + to/2); } - @Test (timeout=180000) + @Test public void testUnassignedOrphan() throws Exception { LOG.info("TestUnassignedOrphan - an unassigned task is resubmitted at" + " startup"); @@ -283,7 +283,7 @@ public class TestSplitLogManager { assertTrue(ZKUtil.checkExists(zkw, tasknode) > version); } - @Test (timeout=180000) + @Test public void testMultipleResubmits() throws Exception { LOG.info("TestMultipleResbmits - no indefinite resubmissions"); conf.setInt("hbase.splitlog.max.resubmit", 2); @@ -315,7 +315,7 @@ public class TestSplitLogManager { assertEquals(2L, tot_mgr_resubmit.sum() - tot_mgr_resubmit_force.sum()); } - @Test (timeout=180000) + @Test public void testRescanCleanup() throws Exception { LOG.info("TestRescanCleanup - ensure RESCAN nodes are cleaned up"); @@ -344,7 +344,7 @@ public class TestSplitLogManager { waitForCounter(tot_mgr_rescan_deleted, 0, 1, to/2); } - @Test (timeout=180000) + @Test public void testTaskDone() throws Exception { LOG.info("TestTaskDone - cleanup task node once in DONE state"); @@ -363,7 +363,7 @@ public class TestSplitLogManager { assertTrue(ZKUtil.checkExists(zkw, tasknode) == -1); } - @Test (timeout=180000) + @Test public void testTaskErr() throws Exception { LOG.info("TestTaskErr - cleanup task node once in ERR state"); @@ -386,7 +386,7 @@ public class TestSplitLogManager { conf.setInt("hbase.splitlog.max.resubmit", ZKSplitLogManagerCoordination.DEFAULT_MAX_RESUBMIT); } - @Test (timeout=180000) + @Test public void testTaskResigned() throws Exception { LOG.info("TestTaskResigned - resubmit task node once in RESIGNED state"); assertEquals(0, tot_mgr_resubmit.sum()); @@ -412,7 +412,7 @@ public class TestSplitLogManager { assertTrue(slt.isUnassigned(master.getServerName())); } - @Test (timeout=180000) + @Test public void testUnassignedTimeout() throws Exception { LOG.info("TestUnassignedTimeout - iff all tasks are unassigned then" + " resubmit"); @@ -448,7 +448,7 @@ public class TestSplitLogManager { waitForCounter(tot_mgr_resubmit_unassigned, 0, 1, 2 * to + to/2); } - @Test (timeout=180000) + @Test public void testDeadWorker() throws Exception { LOG.info("testDeadWorker"); @@ -476,7 +476,7 @@ public class TestSplitLogManager { return; } - @Test (timeout=180000) + @Test public void testWorkerCrash() throws Exception { slm = new SplitLogManager(master, conf); TaskBatch batch = new TaskBatch(); @@ -500,7 +500,7 @@ public class TestSplitLogManager { Assert.assertEquals(1, tot_mgr_resubmit.sum()); } - @Test (timeout=180000) + @Test public void testEmptyLogDir() throws Exception { LOG.info("testEmptyLogDir"); slm = new SplitLogManager(master, conf); @@ -512,7 +512,7 @@ public class TestSplitLogManager { assertFalse(fs.exists(emptyLogDirPath)); } - @Test (timeout = 60000) + @Test public void testLogFilesAreArchived() throws Exception { LOG.info("testLogFilesAreArchived"); slm = new SplitLogManager(master, conf); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java index fa054b46b05..aa544aa049a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java @@ -59,7 +59,7 @@ public class TestTableStateManager { TEST_UTIL.shutdownMiniCluster(); } - @Test(timeout = 60000) + @Test public void testUpgradeFromZk() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); TEST_UTIL.startMiniCluster(2, 1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java index c6939637f18..c4055fe2a41 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java @@ -82,22 +82,22 @@ public class TestAssignmentOnRSCrash { UTIL.shutdownMiniCluster(); } - @Test(timeout=30000) + @Test public void testKillRsWithUserRegionWithData() throws Exception { testCrashRsWithUserRegion(true, true); } - @Test(timeout=30000) + @Test public void testKillRsWithUserRegionWithoutData() throws Exception { testCrashRsWithUserRegion(true, false); } - @Test(timeout=30000) + @Test public void testStopRsWithUserRegionWithData() throws Exception { testCrashRsWithUserRegion(false, true); } - @Test(timeout=30000) + @Test public void testStopRsWithUserRegionWithoutData() throws Exception { testCrashRsWithUserRegion(false, false); } @@ -133,12 +133,12 @@ public class TestAssignmentOnRSCrash { assertTrue("expected RSs to be killed", nkilled > 0); } - @Test(timeout=60000) + @Test public void testKillRsWithMetaRegion() throws Exception { testCrashRsWithMetaRegion(true); } - @Test(timeout=60000) + @Test public void testStopRsWithMetaRegion() throws Exception { testCrashRsWithMetaRegion(false); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java index 4d0214f4887..f5e46343209 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java @@ -137,7 +137,7 @@ public class TestRogueRSAssignment { admin.setBalancerRunning(true, false); } - @Test(timeout = 120000) + @Test public void testReportRSWithWrongRegion() throws Exception { final TableName tableName = TableName.valueOf(this.name.getMethodName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java index 32b7539e199..83c63da1ea8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java @@ -137,7 +137,7 @@ public class TestSplitTableRegionProcedure { } } - @Test(timeout=60000) + @Test public void testSplitTableRegion() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java index a437fe9501d..8a0365f50a4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java @@ -149,7 +149,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase { * * @throws Exception */ - @Test (timeout=180000) + @Test public void testBulkAssignment() throws Exception { List tmp = getListOfServerNames(randomServers(5, 0)); List hris = randomRegions(20); @@ -190,7 +190,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase { * assignment info. * @throws Exception */ - @Test (timeout=180000) + @Test public void testRetainAssignment() throws Exception { // Test simple case where all same servers are there List servers = randomServers(10, 10); @@ -226,7 +226,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase { assertRetainedAssignment(existing, listOfServerNames, assignment); } - @Test (timeout=30000) + @Test public void testRandomAssignment() throws Exception { for (int i = 1; i != 5; ++i) { LOG.info("run testRandomAssignment() with idle servers:" + i); @@ -272,7 +272,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase { } } - @Test (timeout=180000) + @Test public void testRegionAvailability() throws Exception { // Create a cluster with a few servers, assign them to specific racks // then assign some regions. The tests should check whether moving a @@ -351,7 +351,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase { assertTrue(!cluster.wouldLowerAvailability(hri1, servers[6])); } - @Test (timeout=180000) + @Test public void testRegionAvailabilityWithRegionMoves() throws Exception { List list0 = new ArrayList<>(); List list1 = new ArrayList<>(); @@ -466,7 +466,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase { } } - @Test (timeout=180000) + @Test public void testClusterServersWithSameHostPort() { // tests whether the BaseLoadBalancer.Cluster can be constructed with servers // sharing same host and port @@ -506,7 +506,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase { } } - @Test (timeout=180000) + @Test public void testClusterRegionLocations() { // tests whether region locations are handled correctly in Cluster List servers = getListOfServerNames(randomServers(10, 10)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java index 2f5e2cc1b81..0d8529bc462 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java @@ -131,7 +131,7 @@ public class TestDefaultLoadBalancer extends BalancerTestBase { * * @throws Exception */ - @Test (timeout=60000) + @Test public void testBalanceClusterOverall() throws Exception { Map>> clusterLoad = new TreeMap<>(); for (int[] mockCluster : clusterStateMocks) { @@ -169,7 +169,7 @@ public class TestDefaultLoadBalancer extends BalancerTestBase { * level balance while the bytable strategy cannot * @throws Exception */ - @Test (timeout=60000) + @Test public void testImpactOfBalanceClusterOverall() throws Exception { Map>> clusterLoad = new TreeMap<>(); Map> clusterServers = mockUniformClusterServers(mockUniformCluster); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java index 4e48c56ff43..32480ea1bb0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java @@ -103,7 +103,7 @@ public class TestHFileCleaner { + status.getAccessTime(); } - @Test(timeout = 60 *1000) + @Test public void testHFileCleaning() throws Exception { final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate(); String prefix = "someHFileThatWouldBeAUUID"; @@ -340,7 +340,7 @@ public class TestHFileCleaner { Assert.assertEquals(SMALL_FILE_NUM, cleaner.getNumOfDeletedSmallFiles()); } - @Test(timeout = 60 * 1000) + @Test public void testOnConfigurationChange() throws Exception { // constants final int ORIGINAL_THROTTLE_POINT = 512 * 1024; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java index 79d97209792..9c577fe6435 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java @@ -201,7 +201,7 @@ public class TestLogsCleaner { } } - @Test(timeout=5000) + @Test public void testZnodeCversionChange() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); ReplicationLogCleaner cleaner = new ReplicationLogCleaner(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java index 0fc3806b991..f0779af80e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java @@ -204,7 +204,7 @@ public class TestReplicationHFileCleaner { * Test for HBASE-14621. This test will not assert directly anything. Without the fix the test * will end up in a infinite loop, so it will timeout. */ - @Test(timeout = 15000) + @Test public void testForDifferntHFileRefsZnodeVersion() throws Exception { // 1. Create a file Path file = new Path(root, "testForDifferntHFileRefsZnodeVersion"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java index 9f2ad924c8d..94efcc7ed41 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java @@ -168,7 +168,7 @@ public class TestSnapshotFromMaster { *
  • If asking about a snapshot has hasn't occurred, you should get an error.
  • * */ - @Test(timeout = 300000) + @Test public void testIsDoneContract() throws Exception { IsSnapshotDoneRequest.Builder builder = IsSnapshotDoneRequest.newBuilder(); @@ -222,7 +222,7 @@ public class TestSnapshotFromMaster { assertTrue("Completed, on-disk snapshot not found", response.getDone()); } - @Test(timeout = 300000) + @Test public void testGetCompletedSnapshots() throws Exception { // first check when there are no snapshots GetCompletedSnapshotsRequest request = GetCompletedSnapshotsRequest.newBuilder().build(); @@ -253,7 +253,7 @@ public class TestSnapshotFromMaster { assertEquals("Returned snapshots don't match created snapshots", expected, snapshots); } - @Test(timeout = 300000) + @Test public void testDeleteSnapshot() throws Exception { String snapshotName = "completed"; @@ -280,7 +280,7 @@ public class TestSnapshotFromMaster { * should be retained, while those that are not in a snapshot should be deleted. * @throws Exception on failure */ - @Test(timeout = 300000) + @Test public void testSnapshotHFileArchiving() throws Exception { Admin admin = UTIL.getAdmin(); // make sure we don't fail on listing snapshots diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java index 5e1c9cefdb0..85b00d0d8f7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java @@ -408,25 +408,25 @@ public class TestLockProcedure { ProcedureTestingUtility.assertProcNotFailed(procExec, procId); } - @Test(timeout = 20000) + @Test public void testRemoteTableLockRecovery() throws Exception { LockRequest lock = getTableExclusiveLock(tableName1, testMethodName); testRemoteLockRecovery(lock); } - @Test(timeout = 20000) + @Test public void testRemoteNamespaceLockRecovery() throws Exception { LockRequest lock = getNamespaceLock(namespace, testMethodName); testRemoteLockRecovery(lock); } - @Test(timeout = 20000) + @Test public void testRemoteRegionLockRecovery() throws Exception { LockRequest lock = getRegionLock(tableRegions1, testMethodName); testRemoteLockRecovery(lock); } - @Test (timeout = 20000) + @Test public void testLocalMasterLockRecovery() throws Exception { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true); CountDownLatch latch = new CountDownLatch(1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java index 14b8dc4f4a3..6f7f69eb9a9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java @@ -92,7 +92,7 @@ public class TestSimpleRegionNormalizerOnCluster { TEST_UTIL.shutdownMiniCluster(); } - @Test(timeout = 90000) + @Test @SuppressWarnings("deprecation") public void testRegionNormalizationSplitOnCluster() throws Exception { testRegionNormalizationSplitOnCluster(false); @@ -179,7 +179,7 @@ public class TestSimpleRegionNormalizerOnCluster { admin.deleteTable(TABLENAME); } - @Test(timeout = 60000) + @Test @SuppressWarnings("deprecation") public void testRegionNormalizationMergeOnCluster() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java index bb405a5c186..bda2c8a3af2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java @@ -100,7 +100,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase { return htd; } - @Test(timeout=60000) + @Test public void testCloneSnapshot() throws Exception { final ProcedureExecutor procExec = getMasterProcedureExecutor(); final TableName clonedTableName = TableName.valueOf("testCloneSnapshot2"); @@ -117,7 +117,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase { clonedTableName); } - @Test(timeout=60000) + @Test public void testCloneSnapshotToSameTable() throws Exception { // take the snapshot SnapshotProtos.SnapshotDescription snapshotDesc = getSnapshot(); @@ -135,7 +135,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase { ProcedureTestingUtility.getExceptionCause(result) instanceof TableExistsException); } - @Test(timeout=60000) + @Test public void testRecoveryAndDoubleExecution() throws Exception { final ProcedureExecutor procExec = getMasterProcedureExecutor(); final TableName clonedTableName = TableName.valueOf("testRecoveryAndDoubleExecution"); @@ -158,7 +158,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase { clonedTableName); } - @Test(timeout = 60000) + @Test public void testRollbackAndDoubleExecution() throws Exception { final ProcedureExecutor procExec = getMasterProcedureExecutor(); final TableName clonedTableName = TableName.valueOf("testRollbackAndDoubleExecution"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java index c420a53a9ab..7a5a9fa671d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java @@ -84,7 +84,7 @@ public class TestCreateNamespaceProcedure { ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(getMasterProcedureExecutor(), false); } - @Test(timeout = 60000) + @Test public void testCreateNamespace() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testCreateNamespace").build(); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -98,7 +98,7 @@ public class TestCreateNamespaceProcedure { validateNamespaceCreated(nsd); } - @Test(timeout=60000) + @Test public void testCreateSameNamespaceTwice() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testCreateSameNamespaceTwice").build(); @@ -124,7 +124,7 @@ public class TestCreateNamespaceProcedure { ProcedureTestingUtility.getExceptionCause(result) instanceof NamespaceExistException); } - @Test(timeout=60000) + @Test public void testCreateSystemNamespace() throws Exception { final NamespaceDescriptor nsd = UTIL.getAdmin().getNamespaceDescriptor(NamespaceDescriptor.SYSTEM_NAMESPACE.getName()); @@ -141,7 +141,7 @@ public class TestCreateNamespaceProcedure { ProcedureTestingUtility.getExceptionCause(result) instanceof NamespaceExistException); } - @Test(timeout=60000) + @Test public void testCreateNamespaceWithInvalidRegionCount() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testCreateNamespaceWithInvalidRegionCount").build(); @@ -161,7 +161,7 @@ public class TestCreateNamespaceProcedure { assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException); } - @Test(timeout=60000) + @Test public void testCreateNamespaceWithInvalidTableCount() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testCreateNamespaceWithInvalidTableCount").build(); @@ -181,7 +181,7 @@ public class TestCreateNamespaceProcedure { assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException); } - @Test(timeout = 60000) + @Test public void testRecoveryAndDoubleExecution() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testRecoveryAndDoubleExecution").build(); @@ -202,7 +202,7 @@ public class TestCreateNamespaceProcedure { validateNamespaceCreated(nsd); } - @Test(timeout = 60000) + @Test public void testRollbackAndDoubleExecution() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testRollbackAndDoubleExecution").build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java index 10dca9d99e2..ef859a3024f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java @@ -94,7 +94,7 @@ public class TestDeleteNamespaceProcedure { } } - @Test(timeout = 60000) + @Test public void testDeleteNamespace() throws Exception { final String namespaceName = "testDeleteNamespace"; final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -110,7 +110,7 @@ public class TestDeleteNamespaceProcedure { validateNamespaceNotExist(namespaceName); } - @Test(timeout=60000) + @Test public void testDeleteNonExistNamespace() throws Exception { final String namespaceName = "testDeleteNonExistNamespace"; final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -129,7 +129,7 @@ public class TestDeleteNamespaceProcedure { ProcedureTestingUtility.getExceptionCause(result) instanceof NamespaceNotFoundException); } - @Test(timeout=60000) + @Test public void testDeleteSystemNamespace() throws Exception { final String namespaceName = NamespaceDescriptor.SYSTEM_NAMESPACE.getName(); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -144,7 +144,7 @@ public class TestDeleteNamespaceProcedure { assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException); } - @Test(timeout=60000) + @Test public void testDeleteNonEmptyNamespace() throws Exception { final String namespaceName = "testDeleteNonExistNamespace"; final TableName tableName = TableName.valueOf("testDeleteNonExistNamespace:" + name.getMethodName()); @@ -164,7 +164,7 @@ public class TestDeleteNamespaceProcedure { assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException); } - @Test(timeout = 60000) + @Test public void testRecoveryAndDoubleExecution() throws Exception { final String namespaceName = "testRecoveryAndDoubleExecution"; final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -186,7 +186,7 @@ public class TestDeleteNamespaceProcedure { validateNamespaceNotExist(namespaceName); } - @Test(timeout = 60000) + @Test public void testRollbackAndDoubleExecution() throws Exception { final String namespaceName = "testRollbackAndDoubleExecution"; final ProcedureExecutor procExec = getMasterProcedureExecutor(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java index bb41ef05b3a..86ddf92c432 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java @@ -48,7 +48,7 @@ public class TestDeleteTableProcedure extends TestTableDDLProcedureBase { private static final Logger LOG = LoggerFactory.getLogger(TestDeleteTableProcedure.class); @Rule public TestName name = new TestName(); - @Test(timeout=60000, expected=TableNotFoundException.class) + @Test(expected=TableNotFoundException.class) public void testDeleteNotExistentTable() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -59,7 +59,7 @@ public class TestDeleteTableProcedure extends TestTableDDLProcedureBase { latch.await(); } - @Test(timeout=60000, expected=TableNotDisabledException.class) + @Test(expected=TableNotDisabledException.class) public void testDeleteNotDisabledTable() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -72,7 +72,7 @@ public class TestDeleteTableProcedure extends TestTableDDLProcedureBase { latch.await(); } - @Test(timeout=60000) + @Test public void testDeleteDeletedTable() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -103,14 +103,14 @@ public class TestDeleteTableProcedure extends TestTableDDLProcedureBase { assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotFoundException); } - @Test(timeout=60000) + @Test public void testSimpleDelete() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final byte[][] splitKeys = null; testSimpleDelete(tableName, splitKeys); } - @Test(timeout=60000) + @Test public void testSimpleDeleteWithSplits() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final byte[][] splitKeys = new byte[][] { @@ -132,7 +132,7 @@ public class TestDeleteTableProcedure extends TestTableDDLProcedureBase { MasterProcedureTestingUtility.validateTableDeletion(getMaster(), tableName); } - @Test(timeout=60000) + @Test public void testRecoveryAndDoubleExecution() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java index da8878972c1..437cda20984 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java @@ -48,7 +48,7 @@ public class TestDisableTableProcedure extends TestTableDDLProcedureBase { @Rule public TestName name = new TestName(); - @Test(timeout = 60000) + @Test public void testDisableTable() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -64,7 +64,7 @@ public class TestDisableTableProcedure extends TestTableDDLProcedureBase { MasterProcedureTestingUtility.validateTableIsDisabled(getMaster(), tableName); } - @Test(timeout = 60000) + @Test public void testDisableTableMultipleTimes() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -112,7 +112,7 @@ public class TestDisableTableProcedure extends TestTableDDLProcedureBase { MasterProcedureTestingUtility.validateTableIsDisabled(getMaster(), tableName); } - @Test(timeout=60000) + @Test public void testRecoveryAndDoubleExecution() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java index 5aeb665a6ec..4211187086f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java @@ -47,7 +47,7 @@ public class TestEnableTableProcedure extends TestTableDDLProcedureBase { private static final Logger LOG = LoggerFactory.getLogger(TestEnableTableProcedure.class); @Rule public TestName name = new TestName(); - @Test(timeout = 60000) + @Test public void testEnableTable() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -64,7 +64,7 @@ public class TestEnableTableProcedure extends TestTableDDLProcedureBase { MasterProcedureTestingUtility.validateTableIsEnabled(getMaster(), tableName); } - @Test(timeout=60000, expected=TableNotDisabledException.class) + @Test(expected=TableNotDisabledException.class) public void testEnableNonDisabledTable() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -97,7 +97,7 @@ public class TestEnableTableProcedure extends TestTableDDLProcedureBase { Assert.fail("Enable should throw exception through latch."); } - @Test(timeout = 60000) + @Test public void testRecoveryAndDoubleExecution() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -120,7 +120,7 @@ public class TestEnableTableProcedure extends TestTableDDLProcedureBase { MasterProcedureTestingUtility.validateTableIsEnabled(getMaster(), tableName); } - @Test(timeout = 60000) + @Test public void testRollbackAndDoubleExecution() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java index 69285f19ca6..48602dc412f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestFastFailOnProcedureNotRegistered.java @@ -36,7 +36,7 @@ public class TestFastFailOnProcedureNotRegistered extends TestTableDDLProcedureB public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestFastFailOnProcedureNotRegistered.class); - @Test(expected=DoNotRetryIOException.class, timeout = 3000) + @Test(expected=DoNotRetryIOException.class) public void testFastFailOnProcedureNotRegistered() throws IOException { Admin admin = UTIL.getAdmin(); Map props = new HashMap(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java index 83e38439898..96bdbde83b7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java @@ -90,7 +90,7 @@ public class TestMasterProcedureEvents { } } - @Test(timeout = 30000) + @Test public void testMasterInitializedEvent() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); HMaster master = UTIL.getMiniHBaseCluster().getMaster(); @@ -108,7 +108,7 @@ public class TestMasterProcedureEvents { new CreateTableProcedure(procExec.getEnvironment(), htd, new HRegionInfo[] { hri })); } - @Test(timeout = 30000) + @Test public void testServerCrashProcedureEvent() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); HMaster master = UTIL.getMiniHBaseCluster().getMaster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java index 2c0db61d537..3cc5a862b58 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java @@ -71,7 +71,7 @@ public class TestMasterProcedureSchedulerConcurrency { * Verify that "write" operations for a single table are serialized, * but different tables can be executed in parallel. */ - @Test(timeout=60000) + @Test public void testConcurrentWriteOps() throws Exception { final TestTableProcSet procSet = new TestTableProcSet(queue); @@ -157,7 +157,7 @@ public class TestMasterProcedureSchedulerConcurrency { } } - @Test(timeout=60000) + @Test public void testMasterProcedureSchedulerPerformanceEvaluation() throws Exception { // Make sure the tool does not get stuck MasterProcedureSchedulerPerformanceEvaluation.main(new String[] { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java index a58cc69be37..34e7921f276 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java @@ -88,7 +88,7 @@ public class TestModifyNamespaceProcedure { } - @Test(timeout = 60000) + @Test public void testModifyNamespace() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testModifyNamespace").build(); final String nsKey1 = "hbase.namespace.quota.maxregions"; @@ -124,7 +124,7 @@ public class TestModifyNamespaceProcedure { assertEquals(nsValue2, currentNsDescriptor.getConfigurationValue(nsKey2)); } - @Test(timeout=60000) + @Test public void testModifyNonExistNamespace() throws Exception { final String namespaceName = "testModifyNonExistNamespace"; final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -152,7 +152,7 @@ public class TestModifyNamespaceProcedure { ProcedureTestingUtility.getExceptionCause(result) instanceof NamespaceNotFoundException); } - @Test(timeout=60000) + @Test public void testModifyNamespaceWithInvalidRegionCount() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testModifyNamespaceWithInvalidRegionCount").build(); @@ -175,7 +175,7 @@ public class TestModifyNamespaceProcedure { assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException); } - @Test(timeout=60000) + @Test public void testModifyNamespaceWithInvalidTableCount() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testModifyNamespaceWithInvalidTableCount").build(); @@ -198,7 +198,7 @@ public class TestModifyNamespaceProcedure { assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException); } - @Test(timeout = 60000) + @Test public void testRecoveryAndDoubleExecution() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testRecoveryAndDoubleExecution").build(); @@ -227,7 +227,7 @@ public class TestModifyNamespaceProcedure { assertEquals(nsValue, currentNsDescriptor.getConfigurationValue(nsKey)); } - @Test(timeout = 60000) + @Test public void testRollbackAndDoubleExecution() throws Exception { final NamespaceDescriptor nsd = NamespaceDescriptor.create("testRollbackAndDoubleExecution").build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java index 930956fdc50..c519835d75e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java @@ -48,7 +48,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase { @Rule public TestName name = new TestName(); - @Test(timeout=60000) + @Test public void testModifyTable() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -86,7 +86,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase { assertEquals(newMemStoreFlushSize, currentHtd.getMemStoreFlushSize()); } - @Test(timeout = 60000) + @Test public void testModifyTableAddCF() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -126,7 +126,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase { assertEquals(3, currentHtd.getFamiliesKeys().size()); } - @Test(timeout = 60000) + @Test public void testModifyTableDeleteCF() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final String cf1 = "cf1"; @@ -185,7 +185,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase { assertTrue(currentHtd.hasFamily(Bytes.toBytes(cf1))); } - @Test(timeout=60000) + @Test public void testRecoveryAndDoubleExecutionOffline() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final String cf2 = "cf2"; @@ -225,7 +225,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase { tableName, regions, false, "cf1", cf2); } - @Test(timeout = 60000) + @Test public void testRecoveryAndDoubleExecutionOnline() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final String cf2 = "cf2"; @@ -264,7 +264,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase { tableName, regions, "cf1", cf2); } - @Test(timeout = 60000) + @Test public void testRollbackAndDoubleExecutionOnline() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final String familyName = "cf2"; @@ -293,7 +293,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase { tableName, regions, "cf1"); } - @Test(timeout = 60000) + @Test public void testRollbackAndDoubleExecutionOffline() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final String familyName = "cf2"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java index 9a1c2d2ba79..c003379c4cc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java @@ -94,7 +94,7 @@ public class TestProcedureAdmin { } } - @Test(timeout=60000) + @Test public void testAbortProcedureSuccess() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -119,7 +119,7 @@ public class TestProcedureAdmin { tableName); } - @Test(timeout=60000) + @Test public void testAbortProcedureFailure() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -151,7 +151,7 @@ public class TestProcedureAdmin { UTIL.getHBaseCluster().getMaster(), tableName); } - @Test(timeout=60000) + @Test public void testAbortProcedureInterruptedNotAllowed() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -179,7 +179,7 @@ public class TestProcedureAdmin { UTIL.getHBaseCluster().getMaster(), tableName); } - @Test(timeout=60000) + @Test public void testAbortNonExistProcedure() throws Exception { final ProcedureExecutor procExec = getMasterProcedureExecutor(); Random randomGenerator = new Random(); @@ -193,7 +193,7 @@ public class TestProcedureAdmin { assertFalse(abortResult); } - @Test(timeout=60000) + @Test public void testGetProcedure() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final ProcedureExecutor procExec = getMasterProcedureExecutor(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java index 499531589cd..e9e5ebecb87 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java @@ -148,7 +148,7 @@ public class TestRestoreSnapshotProcedure extends TestTableDDLProcedureBase { return htd; } - @Test(timeout=600000) + @Test public void testRestoreSnapshot() throws Exception { final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -160,7 +160,7 @@ public class TestRestoreSnapshotProcedure extends TestTableDDLProcedureBase { validateSnapshotRestore(); } - @Test(timeout=60000) + @Test public void testRestoreSnapshotToDifferentTable() throws Exception { final ProcedureExecutor procExec = getMasterProcedureExecutor(); final TableName restoredTableName = TableName.valueOf(name.getMethodName()); @@ -175,7 +175,7 @@ public class TestRestoreSnapshotProcedure extends TestTableDDLProcedureBase { ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotFoundException); } - @Test(timeout=60000) + @Test public void testRestoreSnapshotToEnabledTable() throws Exception { final ProcedureExecutor procExec = getMasterProcedureExecutor(); @@ -195,7 +195,7 @@ public class TestRestoreSnapshotProcedure extends TestTableDDLProcedureBase { } } - @Test(timeout=60000) + @Test public void testRecoveryAndDoubleExecution() throws Exception { final ProcedureExecutor procExec = getMasterProcedureExecutor(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java index 274b4e542e0..44c55103b6b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java @@ -94,7 +94,7 @@ public class TestSafemodeBringsDownMaster { public void tearDown() throws Exception { } - @Test(timeout=60000) + @Test public void testSafemodeBringsDownMaster() throws Exception { final TableName tableName = TableName.valueOf("testSafemodeBringsDownMaster"); final byte[][] splitKeys = new byte[][] { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java index f8899e7850d..58d5d46d237 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java @@ -87,17 +87,17 @@ public class TestServerCrashProcedure { } - @Test(timeout=60000) + @Test public void testCrashTargetRs() throws Exception { testRecoveryAndDoubleExecution(false, false); } - @Test(timeout=60000) + @Test public void testRecoveryAndDoubleExecutionOnRsWithMeta() throws Exception { testRecoveryAndDoubleExecution(true, true); } - @Test(timeout=60000) + @Test public void testRecoveryAndDoubleExecutionOnRsWithoutMeta() throws Exception { testRecoveryAndDoubleExecution(false, true); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java index ac6a3c545ed..99699ffb6dd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java @@ -104,7 +104,7 @@ public class TestWALProcedureStoreOnHDFS { } } - @Test(timeout=60000, expected=RuntimeException.class) + @Test(expected=RuntimeException.class) public void testWalAbortOnLowReplication() throws Exception { setupDFS(); @@ -123,7 +123,7 @@ public class TestWALProcedureStoreOnHDFS { assertFalse(store.isRunning()); } - @Test(timeout=60000) + @Test public void testWalAbortOnLowReplicationWithQueuedWriters() throws Exception { setupDFS(); @@ -167,7 +167,7 @@ public class TestWALProcedureStoreOnHDFS { reCount.get() < thread.length); } - @Test(timeout=60000) + @Test public void testWalRollOnLowReplication() throws Exception { UTIL.getConfiguration().setInt("dfs.namenode.replication.min", 1); setupDFS(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java index b8499d5b466..3eff88a57bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java @@ -92,7 +92,7 @@ public class TestSnapshotFileCache { fs.delete(snapshotDir, true); } - @Test(timeout = 10000000) + @Test public void testLoadAndDelete() throws IOException { // don't refresh the cache unless we tell it to long period = Long.MAX_VALUE; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java index 8ef8c7aa67a..7c67cd8e79c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java @@ -278,7 +278,7 @@ public class TestMobCompactor { LOG.info("alter status finished"); } - @Test(timeout = 300000) + @Test public void testMinorCompaction() throws Exception { resetConf(); int mergeSize = 5000; @@ -428,7 +428,7 @@ public class TestMobCompactor { new String[] { "20150907", "20151128", "20151205", "20160103" }, false); } - @Test(timeout = 300000) + @Test public void testCompactionWithHFileLink() throws IOException, InterruptedException { resetConf(); String tableNameAsString = "testCompactionWithHFileLink"; @@ -517,7 +517,7 @@ public class TestMobCompactor { assertRefFileNameEqual(family1); } - @Test(timeout = 300000) + @Test public void testMajorCompactionFromAdmin() throws Exception { resetConf(); int mergeSize = 5000; @@ -592,7 +592,7 @@ public class TestMobCompactor { table.close(); } - @Test(timeout = 300000) + @Test public void testScannerOnBulkLoadRefHFiles() throws Exception { resetConf(); setUp("testScannerOnBulkLoadRefHFiles"); @@ -652,7 +652,7 @@ public class TestMobCompactor { * is compacted with some other normal hfiles. This is to make sure the mvcc is included * after compaction for mob enabled store files. */ - @Test(timeout = 300000) + @Test public void testGetAfterCompaction() throws Exception { resetConf(); conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java index a1fccbc9af4..ecaa7519434 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java @@ -88,7 +88,7 @@ public class TestProcedure { * With a single member, verify ordered execution. The Coordinator side is run in a separate * thread so we can only trigger from members and wait for particular state latches. */ - @Test(timeout = 60000) + @Test public void testSingleMember() throws Exception { // The member List members = new ArrayList<>(); @@ -133,7 +133,7 @@ public class TestProcedure { verify(procspy, never()).receive(any()); } - @Test(timeout = 60000) + @Test public void testMultipleMember() throws Exception { // 2 members List members = new ArrayList<>(); @@ -186,7 +186,7 @@ public class TestProcedure { verify(procspy, never()).receive(any()); } - @Test(timeout = 60000) + @Test public void testErrorPropagation() throws Exception { List members = new ArrayList<>(); members.add("member"); @@ -212,7 +212,7 @@ public class TestProcedure { verify(procspy).sendGlobalBarrierComplete(); } - @Test(timeout = 60000) + @Test public void testBarrieredErrorPropagation() throws Exception { List members = new ArrayList<>(); members.add("member"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java index 077ed194923..6bc35d68a9d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java @@ -123,7 +123,7 @@ public class TestProcedureCoordinator { /** * Check handling a connection failure correctly if we get it during the acquiring phase */ - @Test(timeout = 60000) + @Test public void testUnreachableControllerDuringPrepare() throws Exception { coordinator = buildNewCoordinator(); // setup the proc @@ -154,7 +154,7 @@ public class TestProcedureCoordinator { /** * Check handling a connection failure correctly if we get it during the barrier phase */ - @Test(timeout = 60000) + @Test public void testUnreachableControllerDuringCommit() throws Exception { coordinator = buildNewCoordinator(); @@ -184,17 +184,17 @@ public class TestProcedureCoordinator { anyListOf(String.class)); } - @Test(timeout = 60000) + @Test public void testNoCohort() throws Exception { runSimpleProcedure(); } - @Test(timeout = 60000) + @Test public void testSingleCohortOrchestration() throws Exception { runSimpleProcedure("one"); } - @Test(timeout = 60000) + @Test public void testMultipleCohortOrchestration() throws Exception { runSimpleProcedure("one", "two", "three", "four"); } @@ -210,7 +210,7 @@ public class TestProcedureCoordinator { /** * Test that if nodes join the barrier early we still correctly handle the progress */ - @Test(timeout = 60000) + @Test public void testEarlyJoiningBarrier() throws Exception { final String[] cohort = new String[] { "one", "two", "three", "four" }; coordinator = buildNewCoordinator(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java index 20406fe9548..b95ddf20aad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java @@ -129,7 +129,7 @@ public class TestProcedureMember { /** * Test the normal sub procedure execution case. */ - @Test(timeout = 500) + @Test public void testSimpleRun() throws Exception { member = buildCohortMember(); EmptySubprocedure subproc = new EmptySubprocedure(member, mockListener); @@ -160,7 +160,7 @@ public class TestProcedureMember { * Make sure we call cleanup etc, when we have an exception during * {@link Subprocedure#acquireBarrier()}. */ - @Test(timeout = 60000) + @Test public void testMemberPrepareException() throws Exception { buildCohortMemberPair(); @@ -195,7 +195,7 @@ public class TestProcedureMember { /** * Make sure we call cleanup etc, when we have an exception during prepare. */ - @Test(timeout = 60000) + @Test public void testSendMemberAcquiredCommsFailure() throws Exception { buildCohortMemberPair(); @@ -234,7 +234,7 @@ public class TestProcedureMember { * is checked. Thus, the {@link Subprocedure#acquireBarrier()} should succeed but later get rolled back * via {@link Subprocedure#cleanup}. */ - @Test(timeout = 60000) + @Test public void testCoordinatorAbort() throws Exception { buildCohortMemberPair(); @@ -279,7 +279,7 @@ public class TestProcedureMember { * member. Members are then responsible for reading its TX log. This implementation actually * rolls back, and thus breaks the normal TX guarantees. */ - @Test(timeout = 60000) + @Test public void testMemberCommitException() throws Exception { buildCohortMemberPair(); @@ -320,7 +320,7 @@ public class TestProcedureMember { * member. Members are then responsible for reading its TX log. This implementation actually * rolls back, and thus breaks the normal TX guarantees. */ - @Test(timeout = 60000) + @Test public void testMemberCommitCommsFailure() throws Exception { buildCohortMemberPair(); final TimeoutException oate = new TimeoutException("bogus timeout",1,2,0); @@ -358,7 +358,7 @@ public class TestProcedureMember { * Fail correctly on getting an external error while waiting for the prepared latch * @throws Exception on failure */ - @Test(timeout = 60000) + @Test public void testPropagateConnectionErrorBackToManager() throws Exception { // setup the operation member = buildCohortMember(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java index fef77944952..143f30db487 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java @@ -83,7 +83,7 @@ public class TestZKProcedureControllers { * Smaller test to just test the actuation on the cohort member * @throws Exception on failure */ - @Test(timeout = 60000) + @Test public void testSimpleZKCohortMemberController() throws Exception { ZKWatcher watcher = UTIL.getZooKeeperWatcher(); final String operationName = "instanceTest"; @@ -146,7 +146,7 @@ public class TestZKProcedureControllers { assertEquals("Didn't delete commit node", -1, ZKUtil.checkExists(watcher, commit)); } - @Test(timeout = 60000) + @Test public void testZKCoordinatorControllerWithNoCohort() throws Exception { final String operationName = "no cohort controller test"; final byte[] data = new byte[] { 1, 2, 3 }; @@ -155,7 +155,7 @@ public class TestZKProcedureControllers { runMockCommitWithOrchestratedControllers(startCohortFirst, operationName, data); } - @Test(timeout = 60000) + @Test public void testZKCoordinatorControllerWithSingleMemberCohort() throws Exception { final String operationName = "single member controller test"; final byte[] data = new byte[] { 1, 2, 3 }; @@ -164,7 +164,7 @@ public class TestZKProcedureControllers { runMockCommitWithOrchestratedControllers(startCohortFirst, operationName, data, "cohort"); } - @Test(timeout = 60000) + @Test public void testZKCoordinatorControllerMultipleCohort() throws Exception { final String operationName = "multi member controller test"; final byte[] data = new byte[] { 1, 2, 3 }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaState.java index c16478b447a..b0d619e7d05 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaState.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaState.java @@ -49,7 +49,7 @@ public class TestQuotaState { @Rule public TestName name = new TestName(); - @Test(timeout=60000) + @Test public void testQuotaStateBypass() { QuotaState quotaInfo = new QuotaState(); assertTrue(quotaInfo.isBypass()); @@ -60,7 +60,7 @@ public class TestQuotaState { assertNoopLimiter(userQuotaState.getTableLimiter(UNKNOWN_TABLE_NAME)); } - @Test(timeout=60000) + @Test public void testSimpleQuotaStateOperation() { final TableName tableName = TableName.valueOf(name.getMethodName()); final int NUM_GLOBAL_THROTTLE = 3; @@ -81,7 +81,7 @@ public class TestQuotaState { assertThrottleException(quotaInfo.getTableLimiter(tableName), NUM_TABLE_THROTTLE); } - @Test(timeout=60000) + @Test public void testQuotaStateUpdateBypassThrottle() { final long LAST_UPDATE = 10; @@ -100,7 +100,7 @@ public class TestQuotaState { assertNoopLimiter(quotaInfo.getTableLimiter(UNKNOWN_TABLE_NAME)); } - @Test(timeout=60000) + @Test public void testQuotaStateUpdateGlobalThrottle() { final int NUM_GLOBAL_THROTTLE_1 = 3; final int NUM_GLOBAL_THROTTLE_2 = 11; @@ -146,7 +146,7 @@ public class TestQuotaState { assertNoopLimiter(quotaInfo.getGlobalLimiter()); } - @Test(timeout=60000) + @Test public void testQuotaStateUpdateTableThrottle() { final TableName tableNameA = TableName.valueOf(name.getMethodName() + "A"); final TableName tableNameB = TableName.valueOf(name.getMethodName() + "B"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java index 1c3eae4d5d7..a7b8d9db243 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java @@ -117,7 +117,7 @@ public class TestQuotaThrottle { } } - @Test(timeout=60000) + @Test public void testUserGlobalThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String userName = User.getCurrent().getShortName(); @@ -141,7 +141,7 @@ public class TestQuotaThrottle { assertEquals(60, doGets(60, tables)); } - @Test(timeout=60000) + @Test public void testUserGlobalReadAndWriteThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String userName = User.getCurrent().getShortName(); @@ -173,7 +173,7 @@ public class TestQuotaThrottle { assertEquals(60, doGets(60, tables)); } - @Test(timeout=60000) + @Test public void testUserTableThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String userName = User.getCurrent().getShortName(); @@ -198,7 +198,7 @@ public class TestQuotaThrottle { assertEquals(60, doGets(60, tables)); } - @Test(timeout=60000) + @Test public void testUserTableReadAndWriteThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String userName = User.getCurrent().getShortName(); @@ -239,7 +239,7 @@ public class TestQuotaThrottle { assertEquals(60, doGets(60, tables)); } - @Test(timeout=60000) + @Test public void testUserNamespaceThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String userName = User.getCurrent().getShortName(); @@ -264,7 +264,7 @@ public class TestQuotaThrottle { assertEquals(60, doGets(60, tables)); } - @Test(timeout=60000) + @Test public void testUserNamespaceReadAndWriteThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String userName = User.getCurrent().getShortName(); @@ -297,7 +297,7 @@ public class TestQuotaThrottle { assertEquals(60, doGets(60, tables)); } - @Test(timeout=60000) + @Test public void testTableGlobalThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); @@ -321,7 +321,7 @@ public class TestQuotaThrottle { assertEquals(80, doGets(80, tables[0], tables[1])); } - @Test(timeout=60000) + @Test public void testTableGlobalReadAndWriteThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); @@ -358,7 +358,7 @@ public class TestQuotaThrottle { assertEquals(80, doGets(80, tables[0], tables[1])); } - @Test(timeout=60000) + @Test public void testNamespaceGlobalThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String NAMESPACE = "default"; @@ -380,7 +380,7 @@ public class TestQuotaThrottle { assertEquals(40, doPuts(40, tables[0])); } - @Test(timeout=60000) + @Test public void testNamespaceGlobalReadAndWriteThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String NAMESPACE = "default"; @@ -411,7 +411,7 @@ public class TestQuotaThrottle { assertEquals(40, doPuts(40, tables[0])); } - @Test(timeout=60000) + @Test public void testUserAndTableThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String userName = User.getCurrent().getShortName(); @@ -460,7 +460,7 @@ public class TestQuotaThrottle { assertEquals(40, doGets(40, tables[0])); } - @Test(timeout=60000) + @Test public void testUserGlobalBypassThrottle() throws Exception { final Admin admin = TEST_UTIL.getAdmin(); final String userName = User.getCurrent().getShortName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java index f49fa40ed7b..ae71b314fb0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java @@ -239,7 +239,7 @@ public class TestSpaceQuotas { } } - @Test(timeout=120000) + @Test public void testNoBulkLoadsWithNoWrites() throws Exception { Put p = new Put(Bytes.toBytes("to_reject")); p.addColumn( @@ -259,7 +259,7 @@ public class TestSpaceQuotas { } } - @Test(timeout=120000) + @Test public void testAtomicBulkLoadUnderQuota() throws Exception { // Need to verify that if the batch of hfiles cannot be loaded, none are loaded. TableName tn = helper.createTableWithRegions(10); @@ -330,7 +330,7 @@ public class TestSpaceQuotas { } } - @Test(timeout=120000) + @Test public void testTableQuotaOverridesNamespaceQuota() throws Exception { final SpaceViolationPolicy policy = SpaceViolationPolicy.NO_INSERTS; final TableName tn = helper.createTableWithRegions(10); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java index ca746c3a44c..d58fb275301 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java @@ -153,7 +153,7 @@ public class TestCompactSplitThread { } } - @Test(timeout = 60000) + @Test public void testFlushWithTableCompactionDisabled() throws Exception { HTableDescriptor htd = new HTableDescriptor(tableName); htd.setCompactionEnabled(false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java index e0f87a608ab..599db542ff4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java @@ -69,22 +69,22 @@ public class TestCompactionState { TEST_UTIL.shutdownMiniCluster(); } - @Test(timeout=600000) + @Test public void testMajorCompaction() throws IOException, InterruptedException { compaction(name.getMethodName(), 8, CompactionState.MAJOR, false); } - @Test(timeout=600000) + @Test public void testMinorCompaction() throws IOException, InterruptedException { compaction(name.getMethodName(), 15, CompactionState.MINOR, false); } - @Test(timeout=600000) + @Test public void testMajorCompactionOnFamily() throws IOException, InterruptedException { compaction(name.getMethodName(), 8, CompactionState.MAJOR, true); } - @Test(timeout=600000) + @Test public void testMinorCompactionOnFamily() throws IOException, InterruptedException { compaction(name.getMethodName(), 15, CompactionState.MINOR, true); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java index a7e959f20aa..b423b45a550 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java @@ -184,7 +184,7 @@ public class TestFSErrorsExposed { * removes the data from HDFS underneath it, and ensures that * errors are bubbled to the client. */ - @Test(timeout=5 * 60 * 1000) + @Test public void testFullSystemBubblesFSErrors() throws Exception { // We won't have an error if the datanode is not there if we use short circuit // it's a known 'feature'. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java index e27a9862784..7cb3e63e644 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java @@ -108,7 +108,7 @@ public class TestFailedAppendAndSync { * First I need to set up some mocks for Server and RegionServerServices. I also need to * set up a dodgy WAL that will throw an exception when we go to append to it. */ - @Test (timeout=300000) + @Test public void testLockupAroundBadAssignSync() throws IOException { final AtomicLong rolls = new AtomicLong(0); // Dodgy WAL. Will throw exceptions when flags set. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java index fd448023b4e..22ffeb01335 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java @@ -68,7 +68,7 @@ public class TestHRegionOnCluster { @Rule public TestName name = new TestName(); - @Test (timeout=300000) + @Test public void testDataCorrectnessReplayingRecoveredEdits() throws Exception { final int NUM_MASTERS = 1; final int NUM_RS = 3; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index 31eea48b5b1..6d2f735fd3c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -254,7 +254,7 @@ public class TestHRegionReplayEvents { * Tests a case where we replay only a flush start marker, then the region is closed. This region * should not block indefinitely */ - @Test (timeout = 60000) + @Test public void testOnlyReplayingFlushStartDoesNotHoldUpRegionClose() throws IOException { // load some data to primary and flush int start = 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java index c2bd83ac4b9..ea40200da82 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java @@ -1465,7 +1465,7 @@ public class TestHStore { * @throws IOException * @throws InterruptedException */ - @Test (timeout=30000) + @Test public void testRunDoubleMemStoreCompactors() throws IOException, InterruptedException { int flushSize = 500; Configuration conf = HBaseConfiguration.create(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java index 7c6c6fe786e..70b8433ad0a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java @@ -158,7 +158,7 @@ public class TestMobStoreScanner { testGetFromArchive(true); } - @Test(timeout=60000) + @Test public void testGetMassive() throws Exception { setUp(defaultThreshold, TableName.valueOf(name.getMethodName())); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java index 8266a88bab5..353cc28be8e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java @@ -127,7 +127,7 @@ public class TestPerColumnFamilyFlush { Arrays.equals(r.getFamilyMap(family).get(qf), val)); } - @Test(timeout = 180000) + @Test public void testSelectiveFlushWhenEnabled() throws IOException { // Set up the configuration, use new one to not conflict with minicluster in other tests Configuration conf = new HBaseTestingUtility().getConfiguration(); @@ -269,7 +269,7 @@ public class TestPerColumnFamilyFlush { HBaseTestingUtility.closeRegionAndWAL(region); } - @Test(timeout = 180000) + @Test public void testSelectiveFlushWhenNotEnabled() throws IOException { // Set up the configuration, use new one to not conflict with minicluster in other tests Configuration conf = new HBaseTestingUtility().getConfiguration(); @@ -423,7 +423,7 @@ public class TestPerColumnFamilyFlush { } // Test Log Replay with Distributed log split on. - @Test(timeout = 180000) + @Test public void testLogReplayWithDistributedLogSplit() throws Exception { doTestLogReplay(); } @@ -442,7 +442,7 @@ public class TestPerColumnFamilyFlush { * test ensures that we do a full-flush in that scenario. * @throws IOException */ - @Test(timeout = 180000) + @Test public void testFlushingWhenLogRolling() throws Exception { TableName tableName = TableName.valueOf("testFlushingWhenLogRolling"); Configuration conf = TEST_UTIL.getConfiguration(); @@ -559,7 +559,7 @@ public class TestPerColumnFamilyFlush { // Under the same write load, small stores should have less store files when // percolumnfamilyflush enabled. - @Test(timeout = 180000) + @Test public void testCompareStoreFileCount() throws Exception { long memstoreFlushSize = 1024L * 1024; Configuration conf = TEST_UTIL.getConfiguration(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java index f5bb735fde9..cdf48017e19 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java @@ -77,7 +77,7 @@ public class TestRecoveredEdits { * made it in. * @throws IOException */ - @Test (timeout=180000) + @Test public void testReplayWorksThoughLotsOfFlushing() throws IOException { for(MemoryCompactionPolicy policy : MemoryCompactionPolicy.values()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java index 05d80e5de9e..7190d840a7f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java @@ -82,7 +82,7 @@ public class TestRegionOpen { return HTU.getHBaseCluster().getLiveRegionServerThreads().get(0).getRegionServer(); } - @Test(timeout = 60000) + @Test public void testPriorityRegionIsOpenedWithSeparateThreadPool() throws Exception { final TableName tableName = TableName.valueOf(TestRegionOpen.class.getSimpleName()); ThreadPoolExecutor exec = getRS().getExecutorService() @@ -100,7 +100,7 @@ public class TestRegionOpen { assertEquals(completed + 1, exec.getCompletedTaskCount()); } - @Test(timeout = 60000) + @Test public void testNonExistentRegionReplica() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final byte[] FAMILYNAME = Bytes.toBytes("fam"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java index f9502b70998..cd9e1a560cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java @@ -108,7 +108,7 @@ public class TestRegionReplicaFailover { * Tests the case where a newly created table with region replicas and no data, the secondary * region replicas are available to read immediately. */ - @Test(timeout = 60000) + @Test public void testSecondaryRegionWithEmptyRegion() throws IOException { // Create a new table with region replication, don't put any data. Test that the secondary // region replica is available to read. @@ -127,7 +127,7 @@ public class TestRegionReplicaFailover { * (enable/disable table, etc) makes the region replicas readable. * @throws IOException */ - @Test(timeout = 60000) + @Test public void testSecondaryRegionWithNonEmptyRegion() throws IOException { // Create a new table with region replication and load some data // than disable and enable the table again and verify the data from secondary @@ -146,7 +146,7 @@ public class TestRegionReplicaFailover { /** * Tests the case where killing a primary region with unflushed data recovers */ - @Test (timeout = 120000) + @Test public void testPrimaryRegionKill() throws Exception { try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table table = connection.getTable(htd.getTableName())) { @@ -209,7 +209,7 @@ public class TestRegionReplicaFailover { * Tests the case where killing a secondary region with unflushed data recovers, and the replica * becomes available to read again shortly. */ - @Test (timeout = 120000) + @Test public void testSecondaryRegionKill() throws Exception { try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table table = connection.getTable(htd.getTableName())) { @@ -250,7 +250,7 @@ public class TestRegionReplicaFailover { * new writes while one of the secondaries is killed. Verification is done for both of the * secondary replicas. */ - @Test (timeout = 120000) + @Test public void testSecondaryRegionKillWhilePrimaryIsAcceptingWrites() throws Exception { try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table table = connection.getTable(htd.getTableName()); @@ -322,7 +322,7 @@ public class TestRegionReplicaFailover { * Tests the case where we are creating a table with a lot of regions and replicas. Opening region * replicas should not block handlers on RS indefinitely. */ - @Test (timeout = 120000) + @Test public void testLotsOfRegionReplicas() throws IOException { int numRegions = NB_SERVERS * 20; int regionReplication = 10; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java index c9a2260b056..263b0dd1eb0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java @@ -122,7 +122,7 @@ public class TestRegionReplicas { return HTU.getMiniHBaseCluster().getRegionServer(0); } - @Test(timeout = 60000) + @Test public void testOpenRegionReplica() throws Exception { openRegion(HTU, getRS(), hriSecondary); try { @@ -138,7 +138,7 @@ public class TestRegionReplicas { } /** Tests that the meta location is saved for secondary regions */ - @Test(timeout = 60000) + @Test public void testRegionReplicaUpdatesMetaLocation() throws Exception { openRegion(HTU, getRS(), hriSecondary); Table meta = null; @@ -152,7 +152,7 @@ public class TestRegionReplicas { } } - @Test(timeout = 60000) + @Test public void testRegionReplicaGets() throws Exception { try { //load some data to primary @@ -176,7 +176,7 @@ public class TestRegionReplicas { } } - @Test(timeout = 60000) + @Test public void testGetOnTargetRegionReplica() throws Exception { try { //load some data to primary @@ -233,7 +233,7 @@ public class TestRegionReplicas { before(); } - @Test(timeout = 300000) + @Test public void testRefresStoreFiles() throws Exception { // enable store file refreshing final int refreshPeriod = 2000; // 2 sec @@ -310,7 +310,7 @@ public class TestRegionReplicas { } } - @Test(timeout = 300000) + @Test public void testFlushAndCompactionsInPrimary() throws Exception { long runtime = 30 * 1000; @@ -431,7 +431,7 @@ public class TestRegionReplicas { } } - @Test(timeout = 300000) + @Test public void testVerifySecondaryAbilityToReadWithOnFiles() throws Exception { // disable the store file refresh chore (we do this by hand) HTU.getConfiguration().setInt(StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD, 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java index 4e9972f6f98..56c335b46e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java @@ -112,7 +112,7 @@ public class TestRegionReplicasAreDistributed { return HTU.getMiniHBaseCluster().getRegionServer(2); } - @Test(timeout = 60000) + @Test public void testRegionReplicasCreatedAreDistributed() throws Exception { try { checkAndAssertRegionDistribution(false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java index c3ae3face45..2a604b40981 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java @@ -107,7 +107,7 @@ public class TestRegionReplicasWithModifyTable { return HTU.getMiniHBaseCluster().getRegionServer(2); } - @Test(timeout = 60000) + @Test public void testRegionReplicasUsingEnableTable() throws Exception { TableName tableName = null; try { @@ -128,7 +128,7 @@ public class TestRegionReplicasWithModifyTable { HTU.getAdmin().deleteTable(tableName); } - @Test(timeout = 60000) + @Test public void testRegionReplicasUsingEnableTableForMultipleRegions() throws Exception { TableName tableName = null; try { @@ -144,7 +144,7 @@ public class TestRegionReplicasWithModifyTable { } } - @Test(timeout = 60000) + @Test public void testRegionReplicasByEnableTableWhenReplicaCountIsIncreased() throws Exception { TableName tableName = null; try { @@ -160,7 +160,7 @@ public class TestRegionReplicasWithModifyTable { } } - @Test(timeout = 60000) + @Test public void testRegionReplicasByEnableTableWhenReplicaCountIsDecreased() throws Exception { TableName tableName = null; try { @@ -176,7 +176,7 @@ public class TestRegionReplicasWithModifyTable { } } - @Test(timeout = 60000) + @Test public void testRegionReplicasByEnableTableWhenReplicaCountIsDecreasedWithMultipleRegions() throws Exception { TableName tableName = null; @@ -193,7 +193,7 @@ public class TestRegionReplicasWithModifyTable { } } - @Test(timeout = 60000) + @Test public void testRegionReplicasByEnableTableWhenReplicaCountIsIncreasedWithmultipleRegions() throws Exception { TableName tableName = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java index d2ff7a30de5..1b51ae4e372 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java @@ -71,7 +71,7 @@ public class TestRegionServerHostname { TEST_UTIL.shutdownMiniCluster(); } - @Test (timeout=30000) + @Test public void testInvalidRegionServerHostnameAbortsServer() throws Exception { String invalidHostname = "hostAddr.invalid"; TEST_UTIL.getConfiguration().set(HRegionServer.RS_HOSTNAME_KEY, invalidHostname); @@ -86,7 +86,7 @@ public class TestRegionServerHostname { assertNull("Failed to validate against invalid hostname", hrs); } - @Test(timeout=120000) + @Test public void testRegionServerHostname() throws Exception { Enumeration netInterfaceList = NetworkInterface.getNetworkInterfaces(); while (netInterfaceList.hasMoreElements()) { @@ -122,7 +122,7 @@ public class TestRegionServerHostname { } } - @Test(timeout=30000) + @Test public void testConflictRegionServerHostnameConfigurationsAbortServer() throws Exception { Enumeration netInterfaceList = NetworkInterface.getNetworkInterfaces(); while (netInterfaceList.hasMoreElements()) { @@ -159,7 +159,7 @@ public class TestRegionServerHostname { } } - @Test(timeout=30000) + @Test public void testRegionServerHostnameReportedToMaster() throws Exception { TEST_UTIL.getConfiguration().setBoolean(HRegionServer.RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java index ad839bb97ca..af2861fe65d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java @@ -214,13 +214,13 @@ public class TestRegionServerNoMaster { } - @Test(timeout = 60000) + @Test public void testCloseByRegionServer() throws Exception { closeRegionNoZK(); openRegion(HTU, getRS(), hri); } - @Test(timeout = 60000) + @Test public void testMultipleCloseFromMaster() throws Exception { for (int i = 0; i < 10; i++) { AdminProtos.CloseRegionRequest crr = @@ -242,7 +242,7 @@ public class TestRegionServerNoMaster { /** * Test that if we do a close while opening it stops the opening. */ - @Test(timeout = 60000) + @Test public void testCancelOpeningWithoutZK() throws Exception { // We close closeRegionNoZK(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java index aaee8cb7b7b..9345f7c94f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java @@ -114,7 +114,7 @@ public class TestScannerRetriableFailure { } } - @Test(timeout=180000) + @Test public void testFaultyScanner() throws Exception { TableName tableName = TEST_TABLE.getTableName(); Table table = UTIL.createTable(tableName, FAMILY_NAME); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java index 5e3fa39bc3c..b3e9e8ceb8c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java @@ -247,7 +247,7 @@ public class TestSplitLogWorker { }; - @Test(timeout=60000) + @Test public void testAcquireTaskAtStartup() throws Exception { LOG.info("testAcquireTaskAtStartup"); SplitLogCounters.resetCounters(); @@ -283,7 +283,7 @@ public class TestSplitLogWorker { } } - @Test(timeout=60000) + @Test public void testRaceForTask() throws Exception { LOG.info("testRaceForTask"); SplitLogCounters.resetCounters(); @@ -317,7 +317,7 @@ public class TestSplitLogWorker { } } - @Test(timeout=60000) + @Test public void testPreemptTask() throws Exception { LOG.info("testPreemptTask"); SplitLogCounters.resetCounters(); @@ -350,7 +350,7 @@ public class TestSplitLogWorker { } } - @Test(timeout=60000) + @Test public void testMultipleTasks() throws Exception { LOG.info("testMultipleTasks"); SplitLogCounters.resetCounters(); @@ -394,7 +394,7 @@ public class TestSplitLogWorker { } } - @Test(timeout=60000) + @Test public void testRescan() throws Exception { LOG.info("testRescan"); SplitLogCounters.resetCounters(); @@ -446,7 +446,7 @@ public class TestSplitLogWorker { assertEquals(2, num); } - @Test(timeout=60000) + @Test public void testAcquireMultiTasks() throws Exception { LOG.info("testAcquireMultiTasks"); SplitLogCounters.resetCounters(); @@ -481,7 +481,7 @@ public class TestSplitLogWorker { * RS * @throws Exception */ - @Test(timeout=60000) + @Test public void testAcquireMultiTasksByAvgTasksPerRS() throws Exception { LOG.info("testAcquireMultiTasks"); SplitLogCounters.resetCounters(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java index 8b20b20bdf2..3f73d3712bc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java @@ -142,7 +142,7 @@ public class TestWalAndCompactingMemStoreFlush { conf.setDouble(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, 0.5); } - @Test(timeout = 180000) + @Test public void testSelectiveFlushWithEager() throws IOException { // Set up the configuration conf.setLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 300 * 1024); @@ -376,7 +376,7 @@ public class TestWalAndCompactingMemStoreFlush { /*------------------------------------------------------------------------------*/ /* Check the same as above but for index-compaction type of compacting memstore */ - @Test(timeout = 180000) + @Test public void testSelectiveFlushWithIndexCompaction() throws IOException { /*------------------------------------------------------------------------------*/ /* SETUP */ @@ -632,7 +632,7 @@ public class TestWalAndCompactingMemStoreFlush { HBaseTestingUtility.closeRegionAndWAL(region); } - @Test(timeout = 180000) + @Test public void testSelectiveFlushAndWALinDataCompaction() throws IOException { // Set up the configuration conf.setLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 300 * 1024); @@ -764,7 +764,7 @@ public class TestWalAndCompactingMemStoreFlush { HBaseTestingUtility.closeRegionAndWAL(region); } - @Test(timeout = 180000) + @Test public void testSelectiveFlushWithBasicAndMerge() throws IOException { // Set up the configuration conf.setLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 300 * 1024); @@ -907,7 +907,7 @@ public class TestWalAndCompactingMemStoreFlush { } // should end in 300 seconds (5 minutes) - @Test(timeout = 300000) + @Test public void testStressFlushAndWALinIndexCompaction() throws IOException { // Set up the configuration conf.setLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 600 * 1024); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java index 84a62169490..9322c5e5df8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java @@ -85,7 +85,7 @@ public abstract class AbstractTestLogRollPeriod { /** * Tests that the LogRoller perform the roll with some data in the log */ - @Test(timeout=60000) + @Test public void testWithEdits() throws Exception { final TableName tableName = TableName.valueOf("TestLogRollPeriodWithEdits"); final String family = "cf"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncLogRolling.java index aecb27384ca..992e6a52410 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncLogRolling.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncLogRolling.java @@ -52,7 +52,7 @@ public class TestAsyncLogRolling extends AbstractTestLogRolling { AbstractTestLogRolling.setUpBeforeClass(); } - @Test(timeout = 180000) + @Test public void testLogRollOnDatanodeDeath() throws IOException, InterruptedException { dfsCluster.startDataNodes(TEST_UTIL.getConfiguration(), 3, true, null, null); tableName = getName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java index 9c789ba90e7..f376cf1efa0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java @@ -127,7 +127,7 @@ public class TestFSHLog extends AbstractTestFSWAL { /** * Test case for https://issues.apache.org/jira/browse/HBASE-16721 */ - @Test (timeout = 30000) + @Test public void testUnflushedSeqIdTracking() throws IOException, InterruptedException { final String name = this.name.getMethodName(); final byte[] b = Bytes.toBytes("b"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java index 3476aafd3c5..e2b480919ee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java @@ -185,7 +185,7 @@ public class TestLogRollAbort { * comes back online after the master declared it dead and started to split. * Want log rolling after a master split to fail. See HBASE-2312. */ - @Test (timeout=300000) + @Test public void testLogRollAfterSplitStart() throws IOException { LOG.info("Verify wal roll after split starts will fail."); String logName = ServerName.valueOf("testLogRollAfterSplitStart", diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java index cd0b2bf90fe..75913d2dbf5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java @@ -149,7 +149,7 @@ public class TestMasterReplication { * replicated. It also tests that the puts and deletes are not replicated back * to the originating cluster. */ - @Test(timeout = 300000) + @Test public void testCyclicReplication1() throws Exception { LOG.info("testSimplePutDelete"); int numClusters = 2; @@ -179,7 +179,7 @@ public class TestMasterReplication { * {@link BaseReplicationEndpoint#canReplicateToSameCluster()} returns false, so the * ReplicationSource should terminate, and no further logs should get enqueued */ - @Test(timeout = 300000) + @Test public void testLoopedReplication() throws Exception { LOG.info("testLoopedReplication"); startMiniClusters(1); @@ -216,7 +216,7 @@ public class TestMasterReplication { * It tests the replication scenario involving 0 -> 1 -> 0. It does it by bulk loading a set of * HFiles to a table in each cluster, checking if it's replicated. */ - @Test(timeout = 300000) + @Test public void testHFileCyclicReplication() throws Exception { LOG.info("testHFileCyclicReplication"); int numClusters = 2; @@ -272,7 +272,7 @@ public class TestMasterReplication { * originating from itself and also the edits that it received using replication from a different * cluster. The scenario is explained in HBASE-9158 */ - @Test(timeout = 300000) + @Test public void testCyclicReplication2() throws Exception { LOG.info("testCyclicReplication2"); int numClusters = 3; @@ -324,7 +324,7 @@ public class TestMasterReplication { * It tests the multi slave hfile replication scenario involving 0 -> 1, 2. It does it by bulk * loading a set of HFiles to a table in master cluster, checking if it's replicated in its peers. */ - @Test(timeout = 300000) + @Test public void testHFileMultiSlaveReplication() throws Exception { LOG.info("testHFileMultiSlaveReplication"); int numClusters = 3; @@ -382,7 +382,7 @@ public class TestMasterReplication { * families. It does it by bulk loading a set of HFiles belonging to both the CFs of table and set * only one CF data to replicate. */ - @Test(timeout = 300000) + @Test public void testHFileReplicationForConfiguredTableCfs() throws Exception { LOG.info("testHFileReplicationForConfiguredTableCfs"); int numClusters = 2; @@ -436,7 +436,7 @@ public class TestMasterReplication { /** * Tests cyclic replication scenario of 0 -> 1 -> 2 -> 1. */ - @Test(timeout = 300000) + @Test public void testCyclicReplication3() throws Exception { LOG.info("testCyclicReplication2"); int numClusters = 3; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java index 7b2e73fa651..df2b97c26be 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java @@ -126,7 +126,7 @@ public class TestMultiSlaveReplication { table.addFamily(fam); } - @Test(timeout=300000) + @Test public void testMultiSlaveReplication() throws Exception { LOG.info("testCyclicReplication"); MiniHBaseCluster master = utility1.startMiniCluster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java index 1dd0c19f960..cb5c12570b7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java @@ -371,7 +371,7 @@ public class TestPerTableCFReplication { assertTrue(tabCFsMap.get(tableName3).contains("cf3")); } - @Test(timeout=300000) + @Test public void testPerTableCFReplication() throws Exception { LOG.info("testPerTableCFReplication"); ReplicationAdmin replicationAdmin = new ReplicationAdmin(conf1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java index 96e0edefe8a..5492cf67d5c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java @@ -92,7 +92,7 @@ public class TestReplicationChangingPeerRegionservers extends TestReplicationBas } } - @Test(timeout = 300000) + @Test public void testChangingNumberOfPeerRegionServers() throws IOException, InterruptedException { LOG.info("testSimplePutDelete"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java index 5bdcd459fdd..2aa8e8361e7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java @@ -52,7 +52,7 @@ public class TestReplicationDisableInactivePeer extends TestReplicationBase { * * @throws Exception */ - @Test(timeout = 600000) + @Test public void testDisableInactivePeer() throws Exception { // enabling and shutdown the peer diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java index 6de4a4269b4..ffa03a2d506 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java @@ -96,7 +96,7 @@ public class TestReplicationDroppedTables extends TestReplicationBase { } } - @Test(timeout = 600000) + @Test public void testEditsStuckBehindDroppedTable() throws Exception { // Sanity check // Make sure by default edits for dropped tables stall the replication queue, even when the @@ -104,14 +104,14 @@ public class TestReplicationDroppedTables extends TestReplicationBase { testEditsBehindDroppedTable(false, "test_dropped"); } - @Test(timeout = 600000) + @Test public void testEditsDroppedWithDroppedTable() throws Exception { // Make sure by default edits for dropped tables are themselves dropped when the // table(s) in question have been deleted on both ends. testEditsBehindDroppedTable(true, "test_dropped"); } - @Test(timeout = 600000) + @Test public void testEditsDroppedWithDroppedTableNS() throws Exception { // also try with a namespace Connection connection1 = ConnectionFactory.createConnection(conf1); @@ -192,7 +192,7 @@ public class TestReplicationDroppedTables extends TestReplicationBase { conf1.setBoolean(HConstants.REPLICATION_DROP_ON_DELETED_TABLE_KEY, false); } - @Test(timeout = 600000) + @Test public void testEditsBehindDroppedTableTiming() throws Exception { conf1.setBoolean(HConstants.REPLICATION_DROP_ON_DELETED_TABLE_KEY, true); conf1.setInt(HConstants.REPLICATION_SOURCE_MAXTHREADS_KEY, 1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java index 25cc3c43a10..173287cf2fb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java @@ -126,7 +126,7 @@ public class TestReplicationEndpoint extends TestReplicationBase { }); } - @Test (timeout=120000) + @Test public void testCustomReplicationEndpoint() throws Exception { // test installing a custom replication endpoint other than the default one. admin.addPeer("testCustomReplicationEndpoint", @@ -165,7 +165,7 @@ public class TestReplicationEndpoint extends TestReplicationBase { admin.removePeer("testCustomReplicationEndpoint"); } - @Test (timeout=120000) + @Test public void testReplicationEndpointReturnsFalseOnReplicate() throws Exception { Assert.assertEquals(0, ReplicationEndpointForTest.replicateCount.get()); Assert.assertTrue(!ReplicationEndpointReturningFalse.replicated.get()); @@ -201,7 +201,7 @@ public class TestReplicationEndpoint extends TestReplicationBase { admin.removePeer("testReplicationEndpointReturnsFalseOnReplicate"); } - @Test (timeout=120000) + @Test public void testInterClusterReplication() throws Exception { final String id = "testInterClusterReplication"; @@ -247,7 +247,7 @@ public class TestReplicationEndpoint extends TestReplicationBase { utility1.deleteTableData(tableName); } - @Test (timeout=120000) + @Test public void testWALEntryFilterFromReplicationEndpoint() throws Exception { ReplicationPeerConfig rpc = new ReplicationPeerConfig().setClusterKey(ZKConfig.getZooKeeperClusterKey(conf1)) .setReplicationEndpointImpl(ReplicationEndpointWithWALEntryFilter.class.getName()); @@ -276,7 +276,7 @@ public class TestReplicationEndpoint extends TestReplicationBase { admin.removePeer("testWALEntryFilterFromReplicationEndpoint"); } - @Test (timeout=120000, expected=IOException.class) + @Test (expected=IOException.class) public void testWALEntryFilterAddValidation() throws Exception { ReplicationPeerConfig rpc = new ReplicationPeerConfig().setClusterKey(ZKConfig.getZooKeeperClusterKey(conf1)) .setReplicationEndpointImpl(ReplicationEndpointWithWALEntryFilter.class.getName()); @@ -286,7 +286,7 @@ public class TestReplicationEndpoint extends TestReplicationBase { admin.addPeer("testWALEntryFilterAddValidation", rpc); } - @Test (timeout=120000, expected=IOException.class) + @Test (expected=IOException.class) public void testWALEntryFilterUpdateValidation() throws Exception { ReplicationPeerConfig rpc = new ReplicationPeerConfig().setClusterKey(ZKConfig.getZooKeeperClusterKey(conf1)) .setReplicationEndpointImpl(ReplicationEndpointWithWALEntryFilter.class.getName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillMasterRS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillMasterRS.java index c92c4d4219f..4b8e634940e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillMasterRS.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillMasterRS.java @@ -38,7 +38,7 @@ public class TestReplicationKillMasterRS extends TestReplicationKillRS { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestReplicationKillMasterRS.class); - @Test(timeout=300000) + @Test public void killOneMasterRS() throws Exception { loadTableAndKillRS(utility1); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillSlaveRS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillSlaveRS.java index 15f667bc8dd..a6a5738c277 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillSlaveRS.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillSlaveRS.java @@ -38,7 +38,7 @@ public class TestReplicationKillSlaveRS extends TestReplicationKillRS { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestReplicationKillSlaveRS.class); - @Test(timeout=300000) + @Test public void killOneSlaveRS() throws Exception { loadTableAndKillRS(utility2); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java index 0bd0260bf74..7f2a0f1cf73 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java @@ -57,7 +57,7 @@ public class TestReplicationStatus extends TestReplicationBase { * test : status.getLoad(server).getReplicationLoadSink() * * @throws Exception */ - @Test(timeout = 300000) + @Test public void testReplicationStatus() throws Exception { LOG.info("testReplicationStatus"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java index 1672390a24e..ec7f11899ce 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java @@ -105,7 +105,7 @@ public class TestReplicationSyncUpTool extends TestReplicationBase { * check's gone Also check the puts and deletes are not replicated back to * the originating cluster. */ - @Test(timeout = 300000) + @Test public void testSyncUpTool() throws Exception { /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java index 69500a1730e..41737907ca7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java @@ -143,7 +143,7 @@ public class TestReplicationTrackerZKImpl { assertEquals(0, rt.getListOfRegionServers().size()); } - @Test(timeout = 30000) + @Test public void testRegionServerRemovedEvent() throws Exception { ZKUtil.createAndWatch(zkw, ZNodePaths.joinZNode(zkw.znodePaths.rsZNode, "hostname2.example.org:1234"), @@ -159,7 +159,7 @@ public class TestReplicationTrackerZKImpl { assertEquals("hostname2.example.org:1234", rsRemovedData); } - @Test(timeout = 30000) + @Test public void testPeerRemovedEvent() throws Exception { rp.registerPeer("5", new ReplicationPeerConfig().setClusterKey(utility.getClusterKey())); rt.registerListener(new DummyReplicationListener()); @@ -171,7 +171,7 @@ public class TestReplicationTrackerZKImpl { assertEquals("5", peerRemovedData); } - @Test(timeout = 30000) + @Test public void testPeerListChangedEvent() throws Exception { // add a peer rp.registerPeer("5", new ReplicationPeerConfig().setClusterKey(utility.getClusterKey())); @@ -194,7 +194,7 @@ public class TestReplicationTrackerZKImpl { rp.unregisterPeer("5"); } - @Test(timeout = 30000) + @Test public void testPeerNameControl() throws Exception { int exists = 0; int hyphen = 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java index 0a477375b41..8c5299e9027 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java @@ -171,7 +171,7 @@ public class TestReplicationWithTags { utility1.shutdownMiniCluster(); } - @Test(timeout = 300000) + @Test public void testReplicationWithCellTags() throws Exception { LOG.info("testSimplePutDelete"); Put put = new Put(ROW); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java index 80eed960d5c..61a1fbfc569 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java @@ -157,7 +157,7 @@ public class TestRegionReplicaReplicationEndpoint { admin.close(); } - @Test (timeout=240000) + @Test public void testRegionReplicaReplicationPeerIsCreatedForModifyTable() throws Exception { // modify a table by adding region replicas. Check whether the replication peer is created // and replication started. @@ -280,22 +280,22 @@ public class TestRegionReplicaReplicationEndpoint { } } - @Test(timeout = 240000) + @Test public void testRegionReplicaReplicationWith2Replicas() throws Exception { testRegionReplicaReplication(2); } - @Test(timeout = 240000) + @Test public void testRegionReplicaReplicationWith3Replicas() throws Exception { testRegionReplicaReplication(3); } - @Test(timeout = 240000) + @Test public void testRegionReplicaReplicationWith10Replicas() throws Exception { testRegionReplicaReplication(10); } - @Test (timeout = 240000) + @Test public void testRegionReplicaWithoutMemstoreReplication() throws Exception { int regionReplication = 3; final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -327,7 +327,7 @@ public class TestRegionReplicaReplicationEndpoint { } } - @Test (timeout = 240000) + @Test public void testRegionReplicaReplicationForFlushAndCompaction() throws Exception { // Tests a table with region replication 3. Writes some data, and causes flushes and // compactions. Verifies that the data is readable from the replicas. Note that this @@ -362,12 +362,12 @@ public class TestRegionReplicaReplicationEndpoint { } } - @Test (timeout = 240000) + @Test public void testRegionReplicaReplicationIgnoresDisabledTables() throws Exception { testRegionReplicaReplicationIgnoresDisabledTables(false); } - @Test (timeout = 240000) + @Test public void testRegionReplicaReplicationIgnoresDroppedTables() throws Exception { testRegionReplicaReplicationIgnoresDisabledTables(true); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java index 15066dd2642..ab67d94bee9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java @@ -174,7 +174,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster { } } - @Test (timeout = 240000) + @Test public void testReplayCallable() throws Exception { // tests replaying the edits to a secondary region replica using the Callable directly openRegion(HTU, rs0, hriSecondary); @@ -214,7 +214,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster { } } - @Test (timeout = 240000) + @Test public void testReplayCallableWithRegionMove() throws Exception { // tests replaying the edits to a secondary region replica using the Callable directly while // the region is moved to another location.It tests handling of RME. @@ -249,7 +249,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster { connection.close(); } - @Test (timeout = 240000) + @Test public void testRegionReplicaReplicationEndpointReplicate() throws Exception { // tests replaying the edits to a secondary region replica using the RRRE.replicate() openRegion(HTU, rs0, hriSecondary); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java index 5f7be5c0487..c4d529e23c1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java @@ -41,7 +41,7 @@ public class TestReplicationThrottler { /** * unit test for throttling */ - @Test(timeout=10000) + @Test public void testThrottling() { LOG.info("testThrottling"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java index e62c76e1183..e6f07f1dc59 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java @@ -99,7 +99,7 @@ public class TestAccessControlFilter extends SecureTestUtil { TEST_UTIL.shutdownMiniCluster(); } - @Test (timeout=180000) + @Test public void testQualifierAccess() throws Exception { final Table table = createTable(TEST_UTIL, TABLE, new byte[][] { FAMILY }); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index ba781d6ca17..100c0cc02a9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -340,7 +340,7 @@ public class TestAccessController extends SecureTestUtil { TEST_TABLE.getNamespaceAsString()).size()); } - @Test (timeout=180000) + @Test public void testUnauthorizedShutdown() throws Exception { AccessTestAction action = new AccessTestAction() { @Override public Object run() throws Exception { @@ -353,7 +353,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testUnauthorizedStopMaster() throws Exception { AccessTestAction action = new AccessTestAction() { @Override public Object run() throws Exception { @@ -367,7 +367,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testSecurityCapabilities() throws Exception { List capabilities = TEST_UTIL.getConnection().getAdmin() .getSecurityCapabilities(); @@ -377,7 +377,7 @@ public class TestAccessController extends SecureTestUtil { capabilities.contains(SecurityCapability.CELL_AUTHORIZATION)); } - @Test (timeout=180000) + @Test public void testTableCreate() throws Exception { AccessTestAction createTable = new AccessTestAction() { @Override @@ -397,7 +397,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_READ, USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testTableModify() throws Exception { AccessTestAction modifyTable = new AccessTestAction() { @Override @@ -416,7 +416,7 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(modifyTable, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testTableDelete() throws Exception { AccessTestAction deleteTable = new AccessTestAction() { @Override @@ -432,7 +432,7 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(deleteTable, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testTableTruncate() throws Exception { AccessTestAction truncateTable = new AccessTestAction() { @Override @@ -449,7 +449,7 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(truncateTable, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testTableDisable() throws Exception { AccessTestAction disableTable = new AccessTestAction() { @Override @@ -478,7 +478,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_CREATE, USER_GROUP_ADMIN, USER_GROUP_READ, USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testTableEnable() throws Exception { AccessTestAction enableTable = new AccessTestAction() { @Override @@ -592,7 +592,7 @@ public class TestAccessController extends SecureTestUtil { getProceduresAction, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testGetLocks() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -607,7 +607,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_READ, USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testMove() throws Exception { List regions; try (RegionLocator locator = systemUserConnection.getRegionLocator(TEST_TABLE)) { @@ -630,7 +630,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testAssign() throws Exception { List regions; try (RegionLocator locator = systemUserConnection.getRegionLocator(TEST_TABLE)) { @@ -651,7 +651,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testUnassign() throws Exception { List regions; try (RegionLocator locator = systemUserConnection.getRegionLocator(TEST_TABLE)) { @@ -672,7 +672,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testRegionOffline() throws Exception { List regions; try (RegionLocator locator = systemUserConnection.getRegionLocator(TEST_TABLE)) { @@ -693,7 +693,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testSetSplitOrMergeEnabled() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -709,7 +709,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testBalance() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -724,7 +724,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testBalanceSwitch() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -739,7 +739,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testShutdown() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -754,7 +754,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testStopMaster() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -775,7 +775,7 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(action, USER_NONE, USER_RO, USER_GROUP_ADMIN, USER_GROUP_READ, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testSplitWithSplitRow() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); createTestTable(tableName); @@ -795,7 +795,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testFlush() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -811,7 +811,7 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(action, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testCompact() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -839,7 +839,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testRead() throws Exception { // get action AccessTestAction getAction = new AccessTestAction() { @@ -879,7 +879,7 @@ public class TestAccessController extends SecureTestUtil { verifyRead(scanAction); } - @Test (timeout=180000) + @Test // test put, delete, increment public void testWrite() throws Exception { // put action @@ -928,7 +928,7 @@ public class TestAccessController extends SecureTestUtil { verifyWrite(incrementAction); } - @Test (timeout=180000) + @Test public void testReadWrite() throws Exception { // action for checkAndDelete AccessTestAction checkAndDeleteAction = new AccessTestAction() { @@ -963,7 +963,7 @@ public class TestAccessController extends SecureTestUtil { verifyReadWrite(checkAndPut); } - @Test (timeout=180000) + @Test public void testBulkLoad() throws Exception { try { FileSystem fs = TEST_UTIL.getTestFileSystem(); @@ -1080,7 +1080,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testAppend() throws Exception { AccessTestAction appendAction = new AccessTestAction() { @@ -1107,7 +1107,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_ADMIN); } - @Test (timeout=180000) + @Test public void testGrantRevoke() throws Exception { AccessTestAction grantAction = new AccessTestAction() { @Override @@ -1189,7 +1189,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testPostGrantRevoke() throws Exception { final TableName tableName = TableName.valueOf("TempTable"); @@ -1443,7 +1443,7 @@ public class TestAccessController extends SecureTestUtil { return perms.contains(userPermission); } - @Test (timeout=180000) + @Test public void testPostGrantRevokeAtQualifierLevel() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final byte[] family1 = Bytes.toBytes("f1"); @@ -1546,7 +1546,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testPermissionList() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); final byte[] family1 = Bytes.toBytes("f1"); @@ -1676,7 +1676,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testGlobalPermissionList() throws Exception { List perms; Table acl = systemUserConnection.getTable(AccessControlLists.ACL_TABLE_NAME); @@ -1710,7 +1710,7 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(action, USER_CREATE, USER_RW, USER_NONE, USER_RO); } - @Test (timeout=180000) + @Test public void testCheckPermissions() throws Exception { // -------------------------------------- // test global permissions @@ -1877,7 +1877,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testStopRegionServer() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -1892,7 +1892,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testRollWALWriterRequest() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -1907,7 +1907,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testOpenRegion() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -1922,7 +1922,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_READ, USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testCloseRegion() throws Exception { AccessTestAction action = new AccessTestAction() { @Override @@ -1937,7 +1937,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_READ, USER_GROUP_WRITE); } - @Test (timeout=180000) + @Test public void testSnapshot() throws Exception { Admin admin = TEST_UTIL.getAdmin(); final HTableDescriptor htd = admin.getTableDescriptor(TEST_TABLE); @@ -1996,7 +1996,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_READ, USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testSnapshotWithOwner() throws Exception { Admin admin = TEST_UTIL.getAdmin(); final HTableDescriptor htd = admin.getTableDescriptor(TEST_TABLE); @@ -2052,7 +2052,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testGlobalAuthorizationForNewRegisteredRS() throws Exception { LOG.debug("Test for global authorization for a new registered RegionServer."); MiniHBaseCluster hbaseCluster = TEST_UTIL.getHBaseCluster(); @@ -2115,7 +2115,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testTableDescriptorsEnumeration() throws Exception { User TABLE_ADMIN = User.createUserForTesting(conf, "UserA", new String[0]); @@ -2159,7 +2159,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testTableNameEnumeration() throws Exception { AccessTestAction listTablesAction = new AccessTestAction() { @Override @@ -2181,7 +2181,7 @@ public class TestAccessController extends SecureTestUtil { verifyIfEmptyList(listTablesAction, USER_NONE); } - @Test (timeout=180000) + @Test public void testTableDeletion() throws Exception { User TABLE_ADMIN = User.createUserForTesting(conf, "TestUser", new String[0]); final TableName tableName = TableName.valueOf(name.getMethodName()); @@ -2220,7 +2220,7 @@ public class TestAccessController extends SecureTestUtil { createTable(TEST_UTIL, htd, new byte[][] { Bytes.toBytes("s") }); } - @Test (timeout=180000) + @Test public void testNamespaceUserGrant() throws Exception { AccessTestAction getAction = new AccessTestAction() { @Override @@ -2244,7 +2244,7 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(getAction, USER_NONE); } - @Test (timeout=180000) + @Test public void testAccessControlClientGrantRevoke() throws Exception { // Create user for testing, who has no READ privileges by default. User testGrantRevoke = User.createUserForTesting(conf, "testGrantRevoke", new String[0]); @@ -2283,7 +2283,7 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(getAction, testGrantRevoke); } - @Test (timeout=180000) + @Test public void testAccessControlClientGlobalGrantRevoke() throws Exception { // Create user for testing, who has no READ privileges by default. User testGlobalGrantRevoke = User.createUserForTesting(conf, @@ -2329,7 +2329,7 @@ public class TestAccessController extends SecureTestUtil { } - @Test(timeout = 180000) + @Test public void testAccessControlClientMultiGrantRevoke() throws Exception { User testGrantRevoke = User.createUserForTesting(conf, "testGrantRevoke", new String[0]); @@ -2452,7 +2452,7 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(putAction, testGrantRevoke); } - @Test (timeout=180000) + @Test public void testAccessControlClientGrantRevokeOnNamespace() throws Exception { // Create user for testing, who has no READ privileges by default. User testNS = User.createUserForTesting(conf, "testNS", new String[0]); @@ -2542,7 +2542,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testCoprocessorExec() throws Exception { // Set up our ping endpoint service on all regions of our test table for (JVMClusterUtil.RegionServerThread thread: @@ -2593,7 +2593,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testSetQuota() throws Exception { AccessTestAction setUserQuotaAction = new AccessTestAction() { @Override @@ -2660,7 +2660,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_READ, USER_GROUP_WRITE, USER_GROUP_CREATE); } - @Test (timeout=180000) + @Test public void testGetNamespacePermission() throws Exception { String namespace = "testGetNamespacePermission"; NamespaceDescriptor desc = NamespaceDescriptor.create(namespace).build(); @@ -2703,7 +2703,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testTruncatePerms() throws Exception { try { List existingPerms = AccessControlClient.getUserPermissions( @@ -2736,7 +2736,7 @@ public class TestAccessController extends SecureTestUtil { }; } - @Test (timeout=180000) + @Test public void testAccessControlClientUserPerms() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); createTestTable(tableName); @@ -2755,7 +2755,7 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testAccessControllerUserPermsRegexHandling() throws Exception { User testRegexHandler = User.createUserForTesting(conf, "testRegexHandling", new String[0]); @@ -2812,7 +2812,7 @@ public class TestAccessController extends SecureTestUtil { USER_GROUP_ADMIN); } - @Test (timeout=180000) + @Test public void testPrepareAndCleanBulkLoad() throws Exception { AccessTestAction prepareBulkLoadAction = new AccessTestAction() { @Override @@ -2832,7 +2832,7 @@ public class TestAccessController extends SecureTestUtil { verifyAnyCreate(cleanupBulkLoadAction); } - @Test (timeout=180000) + @Test public void testReplicateLogEntries() throws Exception { AccessTestAction replicateLogEntriesAction = new AccessTestAction() { @Override diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java index 17de97bc135..f023aed1930 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java @@ -177,7 +177,7 @@ public class TestAccessController2 extends SecureTestUtil { assertEquals(0, AccessControlLists.getNamespacePermissions(conf, namespace).size()); } - @Test (timeout=180000) + @Test public void testCreateWithCorrectOwner() throws Exception { // Create a test user final User testUser = User.createUserForTesting(TEST_UTIL.getConfiguration(), "TestUser", @@ -214,7 +214,7 @@ public class TestAccessController2 extends SecureTestUtil { assertTrue(perms.get(0).implies(Permission.Action.ADMIN)); } - @Test (timeout=180000) + @Test public void testCreateTableWithGroupPermissions() throws Exception { grantGlobal(TEST_UTIL, TESTGROUP_1_NAME, Action.CREATE); try { @@ -239,7 +239,7 @@ public class TestAccessController2 extends SecureTestUtil { } } - @Test (timeout=180000) + @Test public void testACLTableAccess() throws Exception { final Configuration conf = TEST_UTIL.getConfiguration(); @@ -348,7 +348,7 @@ public class TestAccessController2 extends SecureTestUtil { /* * Test table scan operation at table, column family and column qualifier level. */ - @Test(timeout = 300000) + @Test public void testPostGrantAndRevokeScanAction() throws Exception { AccessTestAction scanTableActionForGroupWithTableLevelAccess = new AccessTestAction() { @Override @@ -486,7 +486,7 @@ public class TestAccessController2 extends SecureTestUtil { public static class MyAccessController extends AccessController { } - @Test (timeout=180000) + @Test public void testCoprocessorLoading() throws Exception { MasterCoprocessorHost cpHost = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterCoprocessorHost(); @@ -500,7 +500,7 @@ public class TestAccessController2 extends SecureTestUtil { ACCESS_CONTROLLER, Coprocessor.PRIORITY_HIGHEST, 1, conf); } - @Test (timeout=180000) + @Test public void testACLZNodeDeletion() throws Exception { String baseAclZNode = "/hbase/acl/"; String ns = "testACLZNodeDeletionNamespace"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java index 54cf79da6e0..ec741fb473b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java @@ -148,7 +148,7 @@ public class TestCellACLs extends SecureTestUtil { Threads.sleep(1000); } - @Test (timeout=120000) + @Test public void testCellPermissions() throws Exception { // store two sets of values, one store with a cell level ACL, and one without verifyAllowed(new AccessTestAction() { @@ -375,7 +375,7 @@ public class TestCellACLs extends SecureTestUtil { * Insure we are not granting access in the absence of any cells found * when scanning for covered cells. */ - @Test (timeout=120000) + @Test public void testCoveringCheck() throws Exception { // Grant read access to USER_OTHER grantOnTable(TEST_UTIL, USER_OTHER.getShortName(), TEST_TABLE.getTableName(), TEST_FAMILY, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java index 7a5232a7106..ce8e2ebaad7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java @@ -1153,7 +1153,7 @@ public class TestVisibilityLabelsWithDeletes extends VisibilityLabelsWithDeletes } } - @Test(timeout = 180000) + @Test public void testDeleteColumnWithLatestTimeStampWhenNoVersionMatches() throws Exception { setAuths(); final TableName tableName = TableName.valueOf(testName.getMethodName()); @@ -1874,7 +1874,7 @@ public class TestVisibilityLabelsWithDeletes extends VisibilityLabelsWithDeletes } } - @Test(timeout = 180000) + @Test public void testSpecificDeletesFollowedByDeleteFamily() throws Exception { setAuths(); final TableName tableName = TableName.valueOf(testName.getMethodName()); @@ -1961,7 +1961,7 @@ public class TestVisibilityLabelsWithDeletes extends VisibilityLabelsWithDeletes } } - @Test(timeout = 180000) + @Test public void testSpecificDeletesFollowedByDeleteFamily1() throws Exception { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java index 833590887a2..8b393bc3ae8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java @@ -122,7 +122,7 @@ public class TestWithDisabledAuthorization { TEST_UTIL.shutdownMiniCluster(); } - @Test (timeout=180000) + @Test public void testManageUserAuths() throws Throwable { // Even though authorization is disabled, we should be able to manage user auths @@ -200,7 +200,7 @@ public class TestWithDisabledAuthorization { assertEquals(0, authsList.size()); } - @Test (timeout=180000) + @Test public void testPassiveVisibility() throws Exception { // No values should be filtered regardless of authorization if we are passive try (Table t = createTableAndWriteDataWithLabels( diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java index d84297fc286..de58fe5e08a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java @@ -106,7 +106,7 @@ public class TestRegionSnapshotTask { * not be moved around if a snapshot operation is in progress. * See HBASE-18398 */ - @Test(timeout = 30000) + @Test public void testAddRegionWithCompactions() throws Exception { final TableName tableName = TableName.valueOf("test_table"); Table table = setupTable(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java index 9899b7bbe1c..2f6c66fa558 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java @@ -67,7 +67,7 @@ public class TestSnapshotClientRetries { TEST_UTIL.shutdownMiniCluster(); } - @Test(timeout = 60000, expected=SnapshotExistsException.class) + @Test(expected=SnapshotExistsException.class) public void testSnapshotAlreadyExist() throws Exception { final String snapshotName = "testSnapshotAlreadyExist"; TEST_UTIL.createTable(TEST_TABLE.getTableName(), "f"); @@ -75,7 +75,7 @@ public class TestSnapshotClientRetries { snapshotAndAssertOneRetry(snapshotName, TEST_TABLE.getTableName()); } - @Test(timeout = 60000, expected=SnapshotDoesNotExistException.class) + @Test(expected=SnapshotDoesNotExistException.class) public void testCloneNonExistentSnapshot() throws Exception { final String snapshotName = "testCloneNonExistentSnapshot"; cloneAndAssertOneRetry(snapshotName, TEST_TABLE.getTableName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java index e744394460b..9da82a4936d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java @@ -116,7 +116,7 @@ public class TestLoadIncrementalHFiles { util.shutdownMiniCluster(); } - @Test(timeout = 120000) + @Test public void testSimpleLoadWithMap() throws Exception { runTest("testSimpleLoadWithMap", BloomType.NONE, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, @@ -127,14 +127,14 @@ public class TestLoadIncrementalHFiles { /** * Test case that creates some regions and loads HFiles that fit snugly inside those regions */ - @Test(timeout = 120000) + @Test public void testSimpleLoad() throws Exception { runTest("testSimpleLoad", BloomType.NONE, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }); } - @Test(timeout = 120000) + @Test public void testSimpleLoadWithFileCopy() throws Exception { String testName = tn.getMethodName(); final byte[] TABLE_NAME = Bytes.toBytes("mytable_" + testName); @@ -147,7 +147,7 @@ public class TestLoadIncrementalHFiles { /** * Test case that creates some regions and loads HFiles that cross the boundaries of those regions */ - @Test(timeout = 120000) + @Test public void testRegionCrossingLoad() throws Exception { runTest("testRegionCrossingLoad", BloomType.NONE, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, @@ -157,7 +157,7 @@ public class TestLoadIncrementalHFiles { /** * Test loading into a column family that has a ROW bloom filter. */ - @Test(timeout = 60000) + @Test public void testRegionCrossingRowBloom() throws Exception { runTest("testRegionCrossingLoadRowBloom", BloomType.ROW, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, @@ -167,7 +167,7 @@ public class TestLoadIncrementalHFiles { /** * Test loading into a column family that has a ROWCOL bloom filter. */ - @Test(timeout = 120000) + @Test public void testRegionCrossingRowColBloom() throws Exception { runTest("testRegionCrossingLoadRowColBloom", BloomType.ROWCOL, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, @@ -178,7 +178,7 @@ public class TestLoadIncrementalHFiles { * Test case that creates some regions and loads HFiles that have different region boundaries than * the table pre-split. */ - @Test(timeout = 120000) + @Test public void testSimpleHFileSplit() throws Exception { runTest("testHFileSplit", BloomType.NONE, new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"), @@ -191,7 +191,7 @@ public class TestLoadIncrementalHFiles { * Test case that creates some regions and loads HFiles that cross the boundaries and have * different region boundaries than the table pre-split. */ - @Test(timeout = 60000) + @Test public void testRegionCrossingHFileSplit() throws Exception { testRegionCrossingHFileSplit(BloomType.NONE); } @@ -200,7 +200,7 @@ public class TestLoadIncrementalHFiles { * Test case that creates some regions and loads HFiles that cross the boundaries have a ROW bloom * filter and a different region boundaries than the table pre-split. */ - @Test(timeout = 120000) + @Test public void testRegionCrossingHFileSplitRowBloom() throws Exception { testRegionCrossingHFileSplit(BloomType.ROW); } @@ -209,7 +209,7 @@ public class TestLoadIncrementalHFiles { * Test case that creates some regions and loads HFiles that cross the boundaries have a ROWCOL * bloom filter and a different region boundaries than the table pre-split. */ - @Test(timeout = 120000) + @Test public void testRegionCrossingHFileSplitRowColBloom() throws Exception { testRegionCrossingHFileSplit(BloomType.ROWCOL); } @@ -412,7 +412,7 @@ public class TestLoadIncrementalHFiles { * "hbase.client.rpc.codec" = KeyValueCodecWithTags so that the client can get tags in the * responses. */ - @Test(timeout = 60000) + @Test public void testTagsSurviveBulkLoadSplit() throws Exception { Path dir = util.getDataTestDirOnTestFS(tn.getMethodName()); FileSystem fs = util.getTestFileSystem(); @@ -451,7 +451,7 @@ public class TestLoadIncrementalHFiles { /** * Test loading into a column family that does not exist. */ - @Test(timeout = 60000) + @Test public void testNonexistentColumnFamilyLoad() throws Exception { String testName = tn.getMethodName(); byte[][][] hFileRanges = @@ -480,12 +480,12 @@ public class TestLoadIncrementalHFiles { } } - @Test(timeout = 120000) + @Test public void testNonHfileFolderWithUnmatchedFamilyName() throws Exception { testNonHfileFolder("testNonHfileFolderWithUnmatchedFamilyName", true); } - @Test(timeout = 120000) + @Test public void testNonHfileFolder() throws Exception { testNonHfileFolder("testNonHfileFolder", false); } @@ -549,7 +549,7 @@ public class TestLoadIncrementalHFiles { } } - @Test(timeout = 120000) + @Test public void testSplitStoreFile() throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitHFile"); FileSystem fs = util.getTestFileSystem(); @@ -634,7 +634,7 @@ public class TestLoadIncrementalHFiles { map.put(last, value - 1); } - @Test(timeout = 120000) + @Test public void testInferBoundaries() { TreeMap map = new TreeMap<>(Bytes.BYTES_COMPARATOR); @@ -696,7 +696,7 @@ public class TestLoadIncrementalHFiles { } } - @Test(timeout = 60000) + @Test public void testLoadTooMayHFiles() throws Exception { Path dir = util.getDataTestDirOnTestFS("testLoadTooMayHFiles"); FileSystem fs = util.getTestFileSystem(); @@ -730,7 +730,7 @@ public class TestLoadIncrementalHFiles { loader.run(args); } - @Test(timeout = 120000) + @Test public void testTableWithCFNameStartWithUnderScore() throws Exception { Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore"); FileSystem fs = util.getTestFileSystem(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java index b9c2d18e183..7e051b38084 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java @@ -277,7 +277,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { * Test that shows that exception thrown from the RS side will result in an exception on the * LIHFile client. */ - @Test(expected = IOException.class, timeout = 120000) + @Test(expected = IOException.class) public void testBulkLoadPhaseFailure() throws Exception { final TableName table = TableName.valueOf(name.getMethodName()); final AtomicInteger attmptedCalls = new AtomicInteger(); @@ -391,7 +391,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { * atomic bulk load call. We cannot use presplitting to test this path, so we actually inject a * split just before the atomic region load. */ - @Test(timeout = 120000) + @Test public void testSplitWhileBulkLoadPhase() throws Exception { final TableName table = TableName.valueOf(name.getMethodName()); try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { @@ -437,7 +437,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { * This test splits a table and attempts to bulk load. The bulk import files should be split * before atomically importing. */ - @Test(timeout = 120000) + @Test public void testGroupOrSplitPresplit() throws Exception { final TableName table = TableName.valueOf(name.getMethodName()); try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { @@ -477,7 +477,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { * This test creates a table with many small regions. The bulk load files would be splitted * multiple times before all of them can be loaded successfully. */ - @Test(timeout = 120000) + @Test public void testSplitTmpFileCleanUp() throws Exception { final TableName table = TableName.valueOf(name.getMethodName()); byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000010"), @@ -512,7 +512,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { /** * This simulates an remote exception which should cause LIHF to exit with an exception. */ - @Test(expected = IOException.class, timeout = 120000) + @Test(expected = IOException.class) public void testGroupOrSplitFailure() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { @@ -546,7 +546,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { fail("doBulkLoad should have thrown an exception"); } - @Test(timeout = 120000) + @Test public void testGroupOrSplitWhenRegionHoleExistsInMeta() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000100") }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFilesSplitRecovery.java index 20e76beb6bd..7fe79a9805e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFilesSplitRecovery.java @@ -65,7 +65,7 @@ public class TestSecureLoadIncrementalHFilesSplitRecovery } // Disabling this test as it does not work in secure mode - @Test(timeout = 180000) + @Test @Override public void testBulkLoadPhaseFailure() { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java index 9639af0f19c..e8bbf7b14ad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java @@ -206,7 +206,7 @@ public class TestBoundedPriorityBlockingQueue { assertNull(null, queue.poll()); } - @Test(timeout=10000) + @Test public void testPollInExecutor() throws InterruptedException { final TestObject testObj = new TestObject(0, 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java index 3c526589ad8..85ab16d6851 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java @@ -100,7 +100,7 @@ public class TestHBaseFsckMOB extends BaseTestHBaseFsck { /** * This creates a table and then corrupts a mob file. Hbck should quarantine the file. */ - @Test(timeout=180000) + @Test public void testQuarantineCorruptMobFile() throws Exception { TableName table = TableName.valueOf(name.getMethodName()); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java index 51f518a746e..8b0b34841e0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java @@ -115,7 +115,7 @@ public class TestIdReadWriteLock { } - @Test(timeout = 60000) + @Test public void testMultipleClients() throws Exception { ExecutorService exec = Executors.newFixedThreadPool(NUM_THREADS); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java index d318eec2086..c150196e2ca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java @@ -48,7 +48,7 @@ public class TestMiniClusterLoadParallel } @Override - @Test(timeout=TIMEOUT_MS) + @Test public void loadTest() throws Exception { prepareForLoadTest(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java index 9bcad721915..a7f1409e3da 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java @@ -131,7 +131,7 @@ public class TestMiniClusterLoadSequential { return writer; } - @Test(timeout=TIMEOUT_MS) + @Test public void loadTest() throws Exception { prepareForLoadTest(); runLoadTestOnExistingTable(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java index e92b027f561..fb9ee842192 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java @@ -176,7 +176,7 @@ public class TestRegionMover { /** * Test that loading the same region set doesn't cause timeout loop during meta load. */ - @Test(timeout = 30000) + @Test public void testRepeatedLoad() throws Exception { MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HRegionServer regionServer = cluster.getRegionServer(0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java index c3916011bed..66d2c943307 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java @@ -368,7 +368,7 @@ public class TestWALFactory { * [FSNamesystem.nextGenerationStampForBlock]) * 3. HDFS-142 (on restart, maintain pendingCreates) */ - @Test (timeout=300000) + @Test public void testAppendClose() throws Exception { TableName tableName = TableName.valueOf(currentTest.getMethodName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java index 011c9ed4c7d..f5800dff16c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java @@ -223,7 +223,7 @@ public class TestWALSplit { * @throws IOException * @throws InterruptedException */ - @Test (timeout=300000) + @Test public void testLogCannotBeWrittenOnceParsed() throws IOException, InterruptedException { final AtomicLong counter = new AtomicLong(0); AtomicBoolean stop = new AtomicBoolean(false); @@ -378,7 +378,7 @@ public class TestWALSplit { /** * {@see https://issues.apache.org/jira/browse/HBASE-3020} */ - @Test (timeout=300000) + @Test public void testRecoveredEditsPathForMeta() throws IOException { byte[] encoded = RegionInfoBuilder.FIRST_META_REGIONINFO.getEncodedNameAsBytes(); Path tdir = FSUtils.getTableDir(HBASEDIR, TableName.META_TABLE_NAME); @@ -400,7 +400,7 @@ public class TestWALSplit { * Test old recovered edits file doesn't break WALSplitter. * This is useful in upgrading old instances. */ - @Test (timeout=300000) + @Test public void testOldRecoveredEditsFileSidelined() throws IOException { byte [] encoded = RegionInfoBuilder.FIRST_META_REGIONINFO.getEncodedNameAsBytes(); Path tdir = FSUtils.getTableDir(HBASEDIR, TableName.META_TABLE_NAME); @@ -429,7 +429,7 @@ public class TestWALSplit { fs.initialize(fs.getUri(), conf); } - @Test (timeout=300000) + @Test public void testSplitPreservesEdits() throws IOException{ final String REGION = "region__1"; REGIONS.clear(); @@ -445,7 +445,7 @@ public class TestWALSplit { assertTrue("edits differ after split", logsAreEqual(originalLog, splitLog[0])); } - @Test (timeout=300000) + @Test public void testSplitRemovesRegionEventsEdits() throws IOException{ final String REGION = "region__1"; REGIONS.clear(); @@ -464,7 +464,7 @@ public class TestWALSplit { } - @Test (timeout=300000) + @Test public void testSplitLeavesCompactionEventsEdits() throws IOException{ RegionInfo hri = RegionInfoBuilder.newBuilder(TABLE_NAME).build(); REGIONS.clear(); @@ -518,12 +518,12 @@ public class TestWALSplit { return result; } - @Test (timeout=300000) + @Test public void testEmptyLogFiles() throws IOException { testEmptyLogFiles(true); } - @Test (timeout=300000) + @Test public void testEmptyOpenLogFiles() throws IOException { testEmptyLogFiles(false); } @@ -538,14 +538,14 @@ public class TestWALSplit { splitAndCount(NUM_WRITERS, NUM_WRITERS * ENTRIES); // skip 2 empty } - @Test (timeout=300000) + @Test public void testOpenZeroLengthReportedFileButWithDataGetsSplit() throws IOException { // generate logs but leave wal.dat.5 open. generateWALs(5); splitAndCount(NUM_WRITERS, NUM_WRITERS * ENTRIES); } - @Test (timeout=300000) + @Test public void testTralingGarbageCorruptionFileSkipErrorsPasses() throws IOException { conf.setBoolean(HBASE_SKIP_ERRORS, true); generateWALs(Integer.MAX_VALUE); @@ -554,7 +554,7 @@ public class TestWALSplit { splitAndCount(NUM_WRITERS, NUM_WRITERS * ENTRIES); } - @Test (timeout=300000) + @Test public void testFirstLineCorruptionLogFileSkipErrorsPasses() throws IOException { conf.setBoolean(HBASE_SKIP_ERRORS, true); generateWALs(Integer.MAX_VALUE); @@ -563,7 +563,7 @@ public class TestWALSplit { splitAndCount(NUM_WRITERS - 1, (NUM_WRITERS - 1) * ENTRIES); //1 corrupt } - @Test (timeout=300000) + @Test public void testMiddleGarbageCorruptionSkipErrorsReadsHalfOfFile() throws IOException { conf.setBoolean(HBASE_SKIP_ERRORS, true); generateWALs(Integer.MAX_VALUE); @@ -579,7 +579,7 @@ public class TestWALSplit { REGIONS.size() * (goodEntries + firstHalfEntries) <= allRegionsCount); } - @Test(timeout = 300000) + @Test public void testCorruptedFileGetsArchivedIfSkipErrors() throws IOException { conf.setBoolean(HBASE_SKIP_ERRORS, true); List failureTypes = Arrays @@ -645,14 +645,14 @@ public class TestWALSplit { } } - @Test (timeout=300000, expected = IOException.class) + @Test (expected = IOException.class) public void testTrailingGarbageCorruptionLogFileSkipErrorsFalseThrows() throws IOException { conf.setBoolean(HBASE_SKIP_ERRORS, false); splitCorruptWALs(FaultyProtobufLogReader.FailureType.BEGINNING); } - @Test (timeout=300000) + @Test public void testCorruptedLogFilesSkipErrorsFalseDoesNotTouchLogs() throws IOException { conf.setBoolean(HBASE_SKIP_ERRORS, false); @@ -697,19 +697,19 @@ public class TestWALSplit { } - @Test (timeout=300000) + @Test public void testEOFisIgnored() throws IOException { int entryCount = 10; ignoreCorruption(Corruptions.TRUNCATE, entryCount, entryCount-1); } - @Test (timeout=300000) + @Test public void testCorruptWALTrailer() throws IOException { int entryCount = 10; ignoreCorruption(Corruptions.TRUNCATE_TRAILER, entryCount, entryCount); } - @Test (timeout=300000) + @Test public void testLogsGetArchivedAfterSplit() throws IOException { conf.setBoolean(HBASE_SKIP_ERRORS, false); generateWALs(-1); @@ -719,13 +719,13 @@ public class TestWALSplit { assertEquals("wrong number of files in the archive log", NUM_WRITERS, archivedLogs.length); } - @Test (timeout=300000) + @Test public void testSplit() throws IOException { generateWALs(-1); splitAndCount(NUM_WRITERS, NUM_WRITERS * ENTRIES); } - @Test (timeout=300000) + @Test public void testLogDirectoryShouldBeDeletedAfterSuccessfulSplit() throws IOException { generateWALs(-1); @@ -743,7 +743,7 @@ public class TestWALSplit { } } - @Test(timeout=300000, expected = IOException.class) + @Test(expected = IOException.class) public void testSplitWillFailIfWritingToRegionFails() throws Exception { //leave 5th log open so we could append the "trap" Writer writer = generateWALs(4); @@ -770,7 +770,7 @@ public class TestWALSplit { } } - @Test (timeout=300000) + @Test public void testSplitDeletedRegion() throws IOException { REGIONS.clear(); String region = "region_that_splits"; @@ -785,7 +785,7 @@ public class TestWALSplit { assertFalse(fs.exists(regiondir)); } - @Test (timeout=300000) + @Test public void testIOEOnOutputThread() throws Exception { conf.setBoolean(HBASE_SKIP_ERRORS, false); @@ -867,7 +867,7 @@ public class TestWALSplit { } // Test for HBASE-3412 - @Test (timeout=300000) + @Test public void testMovedWALDuringRecovery() throws Exception { // This partial mock will throw LEE for every file simulating // files that were moved @@ -879,7 +879,7 @@ public class TestWALSplit { retryOverHdfsProblem(spiedFs); } - @Test (timeout=300000) + @Test public void testRetryOpenDuringRecovery() throws Exception { FileSystem spiedFs = Mockito.spy(fs); // The "Cannot obtain block length", "Could not obtain the last block", @@ -908,7 +908,7 @@ public class TestWALSplit { retryOverHdfsProblem(spiedFs); } - @Test (timeout=300000) + @Test public void testTerminationAskedByReporter() throws IOException, CorruptedLogFileException { generateWALs(1, 10, -1); FileStatus logfile = fs.listStatus(WALDIR)[0]; @@ -952,7 +952,7 @@ public class TestWALSplit { * Test log split process with fake data and lots of edits to trigger threading * issues. */ - @Test (timeout=300000) + @Test public void testThreading() throws Exception { doTestThreading(20000, 128*1024*1024, 0); } @@ -961,7 +961,7 @@ public class TestWALSplit { * Test blocking behavior of the log split process if writers are writing slower * than the reader is reading. */ - @Test (timeout=300000) + @Test public void testThreadingSlowWriterSmallBuffer() throws Exception { doTestThreading(200, 1024, 50); } @@ -1069,7 +1069,7 @@ public class TestWALSplit { } // Does leaving the writer open in testSplitDeletedRegion matter enough for two tests? - @Test (timeout=300000) + @Test public void testSplitLogFileDeletedRegionDir() throws IOException { LOG.info("testSplitLogFileDeletedRegionDir"); final String REGION = "region__1"; @@ -1086,7 +1086,7 @@ public class TestWALSplit { assertFalse(fs.exists(regiondir)); } - @Test (timeout=300000) + @Test public void testSplitLogFileEmpty() throws IOException { LOG.info("testSplitLogFileEmpty"); // we won't create the hlog dir until getWAL got called, so @@ -1102,14 +1102,14 @@ public class TestWALSplit { assertEquals(0, countWAL(fs.listStatus(OLDLOGDIR)[0].getPath())); } - @Test (timeout=300000) + @Test public void testSplitLogFileMultipleRegions() throws IOException { LOG.info("testSplitLogFileMultipleRegions"); generateWALs(1, 10, -1); splitAndCount(1, 10); } - @Test (timeout=300000) + @Test public void testSplitLogFileFirstLineCorruptionLog() throws IOException { conf.setBoolean(HBASE_SKIP_ERRORS, true); @@ -1129,7 +1129,7 @@ public class TestWALSplit { /** * {@see https://issues.apache.org/jira/browse/HBASE-4862} */ - @Test (timeout=300000) + @Test public void testConcurrentSplitLogAndReplayRecoverEdit() throws IOException { LOG.info("testConcurrentSplitLogAndReplayRecoverEdit"); // Generate wals for our destination region diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java index eed09c72717..3ca584af392 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java @@ -42,7 +42,7 @@ public class TestWALSplitBoundedLogWriterCreation extends TestWALSplit{ * The logic of this test has conflict with the limit writers split logic, skip this test */ @Override - @Test(timeout=300000) + @Test @Ignore public void testThreadingSlowWriterSmallBuffer() throws Exception { super.testThreadingSlowWriterSmallBuffer(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java index 0580cda434f..993cf7ffbd4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java @@ -127,7 +127,7 @@ public class TestZooKeeperACL { * then check the subset of world-readable nodes in the three tests after * that. */ - @Test (timeout=30000) + @Test public void testHBaseRootZNodeACL() throws Exception { if (!secureZKAvailable) { return; @@ -146,7 +146,7 @@ public class TestZooKeeperACL { * should be created with 2 ACLs: one specifies that the hbase user has * full access to the node; the other, that it is world-readable. */ - @Test (timeout=30000) + @Test public void testHBaseRootRegionServerZNodeACL() throws Exception { if (!secureZKAvailable) { return; @@ -183,7 +183,7 @@ public class TestZooKeeperACL { * created with 2 ACLs: one specifies that the hbase user has full access * to the node; the other, that it is world-readable. */ - @Test (timeout=30000) + @Test public void testHBaseMasterServerZNodeACL() throws Exception { if (!secureZKAvailable) { return; @@ -219,7 +219,7 @@ public class TestZooKeeperACL { * created with 2 ACLs: one specifies that the hbase user has full access * to the node; the other, that it is world-readable. */ - @Test (timeout=30000) + @Test public void testHBaseIDZNodeACL() throws Exception { if (!secureZKAvailable) { return; @@ -325,7 +325,7 @@ public class TestZooKeeperACL { } } - @Test(timeout = 10000) + @Test public void testAdminDrainAllowedOnSecureZK() throws Exception { if (!secureZKAvailable) { return; diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java index 7e01029fac1..c0d34b9ceea 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java @@ -76,7 +76,7 @@ public class TestCallQueue { } - @Test(timeout = 60000) + @Test public void testPutTake() throws Exception { ThriftMetrics metrics = createMetrics(); CallQueue callQueue = new CallQueue(new LinkedBlockingQueue<>(), metrics); @@ -89,7 +89,7 @@ public class TestCallQueue { verifyMetrics(metrics, "timeInQueue_num_ops", elementsRemoved); } - @Test(timeout = 60000) + @Test public void testOfferPoll() throws Exception { ThriftMetrics metrics = createMetrics(); CallQueue callQueue = new CallQueue(new LinkedBlockingQueue<>(), metrics); diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java index 8a9f9032794..bd156bc4eb2 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java @@ -133,7 +133,7 @@ public class TestThriftHttpServer { @Rule public ExpectedException exception = ExpectedException.none(); - @Test(timeout=600000) + @Test public void testRunThriftServerWithHeaderBufferLength() throws Exception { // Test thrift server with HTTP header length less than 64k @@ -149,7 +149,7 @@ public class TestThriftHttpServer { runThriftServer(1024 * 64); } - @Test(timeout=600000) + @Test public void testRunThriftServer() throws Exception { runThriftServer(0); } diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java index 26546fd9c53..f7585db18e3 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java @@ -153,7 +153,7 @@ public class TestThriftServerCmdLine { cmdLineThread.start(); } - @Test(timeout=600000) + @Test public void testRunThriftServer() throws Exception { List args = new ArrayList<>(); if (implType != null) { diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestInstancePending.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestInstancePending.java index 99379532ed1..caa0beb116b 100644 --- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestInstancePending.java +++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestInstancePending.java @@ -33,7 +33,7 @@ public class TestInstancePending { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestInstancePending.class); - @Test(timeout = 1000) + @Test public void test() throws Exception { final InstancePending pending = new InstancePending<>(); final AtomicReference getResultRef = new AtomicReference<>(); diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java index 7be8b2f7159..55e7aa8e5d1 100644 --- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java +++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java @@ -81,7 +81,7 @@ public class TestZKMulti { TEST_UTIL.shutdownMiniZKCluster(); } - @Test (timeout=60000) + @Test public void testSimpleMulti() throws Exception { // null multi ZKUtil.multiOrSequential(zkw, null, false); @@ -110,7 +110,7 @@ public class TestZKMulti { assertTrue(ZKUtil.checkExists(zkw, path) == -1); } - @Test (timeout=60000) + @Test public void testComplexMulti() throws Exception { String path1 = ZNodePaths.joinZNode(zkw.znodePaths.baseZNode, "testComplexMulti1"); String path2 = ZNodePaths.joinZNode(zkw.znodePaths.baseZNode, "testComplexMulti2"); @@ -152,7 +152,7 @@ public class TestZKMulti { assertTrue(Bytes.equals(ZKUtil.getData(zkw, path6), Bytes.toBytes(path6))); } - @Test (timeout=60000) + @Test public void testSingleFailure() throws Exception { // try to delete a node that doesn't exist boolean caughtNoNode = false; @@ -190,7 +190,7 @@ public class TestZKMulti { assertTrue(caughtNodeExists); } - @Test (timeout=60000) + @Test public void testSingleFailureInMulti() throws Exception { // try a multi where all but one operation succeeds String pathA = ZNodePaths.joinZNode(zkw.znodePaths.baseZNode, "testSingleFailureInMultiA"); @@ -213,7 +213,7 @@ public class TestZKMulti { assertTrue(ZKUtil.checkExists(zkw, pathC) == -1); } - @Test (timeout=60000) + @Test public void testMultiFailure() throws Exception { String pathX = ZNodePaths.joinZNode(zkw.znodePaths.baseZNode, "testMultiFailureX"); String pathY = ZNodePaths.joinZNode(zkw.znodePaths.baseZNode, "testMultiFailureY"); @@ -267,7 +267,7 @@ public class TestZKMulti { assertTrue(ZKUtil.checkExists(zkw, pathV) == -1); } - @Test (timeout=60000) + @Test public void testRunSequentialOnMultiFailure() throws Exception { String path1 = ZNodePaths.joinZNode(zkw.znodePaths.baseZNode, "runSequential1"); String path2 = ZNodePaths.joinZNode(zkw.znodePaths.baseZNode, "runSequential2"); @@ -300,7 +300,7 @@ public class TestZKMulti { * Verifies that for the given root node, it should delete all the child nodes * recursively using multi-update api. */ - @Test (timeout=60000) + @Test public void testdeleteChildrenRecursivelyMulti() throws Exception { String parentZNode = "/testRootMulti"; createZNodeTree(parentZNode); @@ -318,7 +318,7 @@ public class TestZKMulti { * Verifies that for the given root node, it should delete all the nodes recursively using * multi-update api. */ - @Test(timeout = 60000) + @Test public void testDeleteNodeRecursivelyMulti() throws Exception { String parentZNode = "/testdeleteNodeRecursivelyMulti"; createZNodeTree(parentZNode); @@ -327,7 +327,7 @@ public class TestZKMulti { assertTrue("Parent znode should be deleted.", ZKUtil.checkExists(zkw, parentZNode) == -1); } - @Test(timeout = 60000) + @Test public void testDeleteNodeRecursivelyMultiOrSequential() throws Exception { String parentZNode1 = "/testdeleteNode1"; String parentZNode2 = "/testdeleteNode2"; @@ -343,7 +343,7 @@ public class TestZKMulti { assertTrue("Parent znode 3 should be deleted.", ZKUtil.checkExists(zkw, parentZNode3) == -1); } - @Test(timeout = 60000) + @Test public void testDeleteChildrenRecursivelyMultiOrSequential() throws Exception { String parentZNode1 = "/testdeleteChildren1"; String parentZNode2 = "/testdeleteChildren2";