HBASE-23780 Edit of test classifications (#1109)
These classifications come of running at various fork counts.. A test may complete quick if low fork count but if it is accessing disk, it will run much slower if fork count is high. This edit accommodates some of this phenomenon. Signed-off-by: Bharath Vissapragada <bharathv@apache.org> Signed-off-by: Viraj Jasani <vjasani@apache.org> Signed-off-by: Jan Hentschel <janh@apache.org>
This commit is contained in:
parent
f94dbebffa
commit
12f4e0977c
|
@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.TableName;
|
|||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
import org.apache.hadoop.hbase.security.UserProvider;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
|
@ -68,7 +68,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMaster
|
|||
/**
|
||||
* Confirm that we will set the priority in {@link HBaseRpcController} for several admin operations.
|
||||
*/
|
||||
@Category({ ClientTests.class, MediumTests.class })
|
||||
@Category({ ClientTests.class, SmallTests.class })
|
||||
public class TestAsyncAdminRpcPriority {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -65,7 +65,7 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
|
|||
import org.apache.hadoop.hbase.exceptions.RegionOpeningException;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.junit.Assert;
|
||||
|
@ -77,7 +77,7 @@ import org.mockito.Mockito;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ClientTests.class, MediumTests.class})
|
||||
@Category({ClientTests.class, LargeTests.class})
|
||||
public class TestAsyncProcess {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -28,14 +28,14 @@ import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.FailInitia
|
|||
import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.SampleStopper;
|
||||
import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.SleepingChore;
|
||||
import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.SlowChore;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
@Category(MediumTests.class)
|
||||
public class TestChoreService {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -22,20 +22,19 @@ import static org.junit.Assert.assertFalse;
|
|||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentNavigableMap;
|
||||
import java.util.concurrent.ConcurrentSkipListMap;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ MiscTests.class, SmallTests.class })
|
||||
@Category({ MiscTests.class, MediumTests.class })
|
||||
public class TestCopyOnWriteMaps {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
|
|
@ -21,12 +21,11 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.Random;
|
||||
import java.util.TreeMap;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.AvlUtil.AvlIterableList;
|
||||
import org.apache.hadoop.hbase.util.AvlUtil.AvlKeyComparator;
|
||||
import org.apache.hadoop.hbase.util.AvlUtil.AvlLinkedNode;
|
||||
|
@ -38,7 +37,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
public class TestAvlUtil {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataInputStream;
|
||||
|
@ -47,8 +46,8 @@ import java.util.stream.Collectors;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
|
@ -58,7 +57,7 @@ import org.junit.experimental.categories.Category;
|
|||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
@RunWith(Parameterized.class)
|
||||
public class TestByteBufferUtils {
|
||||
|
||||
|
|
|
@ -33,14 +33,14 @@ import java.util.List;
|
|||
import java.util.Random;
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
import org.junit.Assert;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
public class TestBytes extends TestCase {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
|
|
@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -30,8 +29,8 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
@ -42,7 +41,7 @@ import org.slf4j.LoggerFactory;
|
|||
/**
|
||||
* Test {@link CommonFSUtils}.
|
||||
*/
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
public class TestCommonFSUtils {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -19,14 +19,14 @@ package org.apache.hadoop.hbase.util;
|
|||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Assert;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
public class TestCounter {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -24,15 +24,14 @@ import static org.junit.Assert.assertTrue;
|
|||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
public class TestEnvironmentEdgeManager {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -28,8 +28,8 @@ import org.apache.hadoop.fs.FileUtil;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.ssl.SSLFactory;
|
||||
|
@ -46,7 +46,7 @@ import org.slf4j.LoggerFactory;
|
|||
* HTTPS using the created certficates and calls an echo servlet using the
|
||||
* corresponding HTTPS URL.
|
||||
*/
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
public class TestSSLHttpServer extends HttpServerFunctionalTest {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.client.Result;
|
|||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.hbase.mapred.RowCounter.RowCounterMapper;
|
||||
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapred.OutputCollector;
|
||||
import org.apache.hadoop.mapred.Reporter;
|
||||
|
@ -45,7 +45,7 @@ import org.mockito.Mockito;
|
|||
|
||||
import org.apache.hbase.thirdparty.com.google.common.base.Joiner;
|
||||
|
||||
@Category({MapReduceTests.class, SmallTests.class})
|
||||
@Category({MapReduceTests.class, MediumTests.class})
|
||||
public class TestRowCounter {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
|
|||
/**
|
||||
* Test different variants of initTableMapperJob method
|
||||
*/
|
||||
@Category({MapReduceTests.class, SmallTests.class})
|
||||
@Category({MapReduceTests.class, MediumTests.class})
|
||||
public class TestTableMapReduceUtil {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
|||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure;
|
||||
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -41,7 +41,7 @@ import org.apache.hbase.thirdparty.com.google.protobuf.Int32Value;
|
|||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
|
||||
|
||||
@Category({MasterTests.class, SmallTests.class})
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
public class TestProcedureEvents {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
|
|||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
|
@ -45,7 +45,7 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.Int32Value;
|
||||
|
||||
@Category({MasterTests.class, SmallTests.class})
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
public class TestProcedureRecovery {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.procedure2;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
@ -30,8 +29,8 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
|
|||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
|
||||
import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -48,7 +47,7 @@ import org.apache.hbase.thirdparty.com.google.protobuf.Int64Value;
|
|||
* we should use lock to obtain the correct order. Ignored.
|
||||
*/
|
||||
@Ignore
|
||||
@Category({ MasterTests.class, LargeTests.class })
|
||||
@Category({ MasterTests.class, SmallTests.class })
|
||||
public class TestProcedureReplayOrder {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
@ -33,7 +33,7 @@ import org.junit.experimental.categories.Category;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
@Category({MasterTests.class, SmallTests.class})
|
||||
public class TestProcedureSchedulerConcurrency {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
|
|||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
|
|||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
|
||||
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
@Category({ MasterTests.class, SmallTests.class })
|
||||
public class TestProcedureSkipPersistence {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
|||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure;
|
||||
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -38,7 +38,7 @@ import org.junit.experimental.categories.Category;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MasterTests.class, SmallTests.class})
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
public class TestStateMachineProcedure {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.procedure2.store.wal;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
@ -32,8 +31,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
|||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
|
||||
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -43,7 +42,7 @@ import org.junit.experimental.categories.Category;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
public class TestStressWALProcedureStore {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -18,10 +18,9 @@
|
|||
package org.apache.hadoop.hbase;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.List;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
@ -29,7 +28,7 @@ import org.junit.experimental.categories.Category;
|
|||
/**
|
||||
* Checks tests are categorized.
|
||||
*/
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
public class TestCheckTestClasses {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.master.HMaster;
|
|||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.security.UserProvider;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
|
||||
|
@ -62,7 +62,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
@Category(MediumTests.class)
|
||||
public class TestClientClusterMetrics {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -22,7 +22,7 @@ import static org.junit.Assert.assertEquals;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
|
||||
|
@ -32,7 +32,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ MiscTests.class, LargeTests.class })
|
||||
@Category({ MiscTests.class, MediumTests.class })
|
||||
public class TestFullLogReconstruction {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -32,8 +31,8 @@ import org.apache.hadoop.hbase.client.RegionInfo;
|
|||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
|
||||
import org.junit.After;
|
||||
|
@ -46,10 +45,8 @@ import org.mockito.invocation.InvocationOnMock;
|
|||
import org.mockito.stubbing.Answer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse;
|
||||
|
@ -58,7 +55,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRespon
|
|||
* Test MetaTableAccessor but without spinning up a cluster.
|
||||
* We mock regionserver back and forth (we do spin up a zk cluster).
|
||||
*/
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
public class TestMetaTableAccessorNoCluster {
|
||||
|
||||
@ClassRule
|
||||
|
@ -112,8 +109,8 @@ public class TestMetaTableAccessorNoCluster {
|
|||
assertTrue(hri == null);
|
||||
// OK, give it what it expects
|
||||
kvs.clear();
|
||||
kvs.add(new KeyValue(HConstants.EMPTY_BYTE_ARRAY, f,
|
||||
HConstants.REGIONINFO_QUALIFIER, RegionInfo.toByteArray(RegionInfoBuilder.FIRST_META_REGIONINFO)));
|
||||
kvs.add(new KeyValue(HConstants.EMPTY_BYTE_ARRAY, f, HConstants.REGIONINFO_QUALIFIER,
|
||||
RegionInfo.toByteArray(RegionInfoBuilder.FIRST_META_REGIONINFO)));
|
||||
hri = MetaTableAccessor.getRegionInfo(Result.create(kvs));
|
||||
assertNotNull(hri);
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(hri, RegionInfoBuilder.FIRST_META_REGIONINFO) == 0);
|
||||
|
@ -123,8 +120,6 @@ public class TestMetaTableAccessorNoCluster {
|
|||
* Test that MetaTableAccessor will ride over server throwing
|
||||
* "Server not running" IOEs.
|
||||
* @see <a href="https://issues.apache.org/jira/browse/HBASE-3446">HBASE-3446</a>
|
||||
* @throws IOException
|
||||
* @throws InterruptedException
|
||||
*/
|
||||
@Test
|
||||
public void testRideOverServerNotRunning()
|
||||
|
@ -190,8 +185,8 @@ public class TestMetaTableAccessorNoCluster {
|
|||
// Return the RegionLocations object when locateRegion
|
||||
// The ugly format below comes of 'Important gotcha on spying real objects!' from
|
||||
// http://mockito.googlecode.com/svn/branches/1.6/javadoc/org/mockito/Mockito.html
|
||||
Mockito.doReturn(rl).when
|
||||
(connection).locateRegion((TableName)Mockito.any(), (byte[])Mockito.any(),
|
||||
Mockito.doReturn(rl).when(connection).
|
||||
locateRegion((TableName)Mockito.any(), (byte[])Mockito.any(),
|
||||
Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.anyInt());
|
||||
|
||||
// Now shove our HRI implementation into the spied-upon connection.
|
||||
|
@ -202,14 +197,17 @@ public class TestMetaTableAccessorNoCluster {
|
|||
NavigableMap<RegionInfo, Result> hris =
|
||||
MetaTableAccessor.getServerUserRegions(connection, sn);
|
||||
assertEquals(1, hris.size());
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(hris.firstEntry().getKey(), RegionInfoBuilder.FIRST_META_REGIONINFO) == 0);
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(hris.firstEntry().getKey(),
|
||||
RegionInfoBuilder.FIRST_META_REGIONINFO) == 0);
|
||||
assertTrue(Bytes.equals(rowToVerify, hris.firstEntry().getValue().getRow()));
|
||||
// Finally verify that scan was called four times -- three times
|
||||
// with exception and then on 4th attempt we succeed
|
||||
Mockito.verify(implementation, Mockito.times(4)).
|
||||
scan((RpcController)Mockito.any(), (ScanRequest)Mockito.any());
|
||||
} finally {
|
||||
if (connection != null && !connection.isClosed()) connection.close();
|
||||
if (connection != null && !connection.isClosed()) {
|
||||
connection.close();
|
||||
}
|
||||
zkw.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.filter.Filter;
|
|||
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
|
||||
import org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter;
|
||||
import org.apache.hadoop.hbase.filter.RandomRowFilter;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
|
@ -67,7 +67,7 @@ import org.slf4j.LoggerFactory;
|
|||
* Unless the flag {@link Scan#setAllowPartialResults(boolean)} has been set to true, the caller of
|
||||
* {@link ResultScanner#next()} should never see partial results.
|
||||
*/
|
||||
@Category(MediumTests.class)
|
||||
@Category(LargeTests.class)
|
||||
public class TestPartialResultsFromClientSide {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
|
|||
import org.apache.hadoop.hbase.filter.RowFilter;
|
||||
import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
|
||||
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -47,7 +47,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(MediumTests.class)
|
||||
@Category(LargeTests.class)
|
||||
public class TestServerSideScanMetricsFromClientSide {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.coordination.ZkSplitLogWorkerCoordination;
|
|||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.hadoop.hbase.master.LoadBalancer;
|
||||
import org.apache.hadoop.hbase.master.balancer.SimpleLoadBalancer;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
|
@ -56,7 +56,7 @@ import org.junit.rules.TestName;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MiscTests.class, LargeTests.class})
|
||||
@Category({MiscTests.class, MediumTests.class})
|
||||
public class TestZooKeeper {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
|
|||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreFile;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
||||
|
@ -79,7 +79,7 @@ import org.slf4j.LoggerFactory;
|
|||
* Test that the {@link HFileArchiver} correctly removes all the parts of a region when cleaning up
|
||||
* a region
|
||||
*/
|
||||
@Category({MediumTests.class, MiscTests.class})
|
||||
@Category({LargeTests.class, MiscTests.class})
|
||||
public class TestHFileArchiving {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/**
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
|
@ -27,7 +26,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
|
|||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -40,7 +39,7 @@ import org.junit.rules.TestName;
|
|||
/**
|
||||
* Run Append tests that use the HBase clients;
|
||||
*/
|
||||
@Category(LargeTests.class)
|
||||
@Category(MediumTests.class)
|
||||
public class TestAppendFromClientSide {
|
||||
|
||||
@ClassRule
|
||||
|
@ -59,9 +58,6 @@ public class TestAppendFromClientSide {
|
|||
TEST_UTIL.startMiniCluster(3);
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws java.lang.Exception
|
||||
*/
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
TEST_UTIL.shutdownMiniCluster();
|
||||
|
@ -73,7 +69,8 @@ public class TestAppendFromClientSide {
|
|||
Table table = TEST_UTIL.createTable(TABLENAME, FAMILY);
|
||||
long timestamp = 999;
|
||||
Append append = new Append(ROW);
|
||||
append.add(CellUtil.createCell(ROW, FAMILY, QUALIFIER, timestamp, KeyValue.Type.Put.getCode(), Bytes.toBytes(100L)));
|
||||
append.add(CellUtil.createCell(ROW, FAMILY, QUALIFIER, timestamp, KeyValue.Type.Put.getCode(),
|
||||
Bytes.toBytes(100L)));
|
||||
Result r = table.append(append);
|
||||
assertEquals(1, r.size());
|
||||
assertEquals(timestamp, r.rawCells()[0].getTimestamp());
|
||||
|
|
|
@ -24,7 +24,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.FutureUtils;
|
||||
import org.junit.After;
|
||||
|
@ -34,7 +34,7 @@ import org.junit.experimental.categories.Category;
|
|||
|
||||
import org.apache.hbase.thirdparty.com.google.common.io.Closeables;
|
||||
|
||||
@Category({ MediumTests.class, ClientTests.class })
|
||||
@Category({ LargeTests.class, ClientTests.class })
|
||||
public class TestAsyncClientPushback extends ClientPushbackTestBase {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
|
|||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -37,7 +37,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ MediumTests.class, ClientTests.class })
|
||||
@Category({ LargeTests.class, ClientTests.class })
|
||||
public class TestAsyncTableScanRenewLease {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
@ -35,7 +34,7 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
|
|||
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
|
||||
import org.apache.hadoop.hbase.regionserver.StorefileRefresherChore;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FutureUtils;
|
||||
import org.junit.After;
|
||||
|
@ -45,7 +44,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ ClientTests.class, LargeTests.class })
|
||||
@Category({ ClientTests.class, MediumTests.class })
|
||||
public class TestAsyncTableUseMetaReplicas {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
|
|||
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
|
||||
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -48,7 +48,7 @@ import org.junit.experimental.categories.Category;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MediumTests.class, ClientTests.class})
|
||||
@Category({LargeTests.class, ClientTests.class})
|
||||
public class TestClientOperationInterrupt {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
|
|||
import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
|
||||
import org.apache.hadoop.hbase.client.coprocessor.Batch;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
@ -35,7 +35,7 @@ import org.junit.experimental.categories.Category;
|
|||
|
||||
import org.apache.hbase.thirdparty.com.google.common.io.Closeables;
|
||||
|
||||
@Category({ MediumTests.class, ClientTests.class })
|
||||
@Category({ LargeTests.class, ClientTests.class })
|
||||
public class TestClientPushback extends ClientPushbackTestBase {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.master.HMaster;
|
|||
import org.apache.hadoop.hbase.master.MetaRegionLocationCache;
|
||||
import org.apache.hadoop.hbase.master.RegionState;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil;
|
||||
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
|
||||
|
@ -46,7 +46,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({SmallTests.class, MasterTests.class })
|
||||
@Category({MediumTests.class, MasterTests.class })
|
||||
public class TestMetaRegionLocationCache {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/**
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -20,14 +20,13 @@ package org.apache.hadoop.hbase.client;
|
|||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
|
@ -38,7 +37,7 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
@Category({ LargeTests.class, ClientTests.class })
|
||||
@Category({ MediumTests.class, ClientTests.class })
|
||||
public class TestMvccConsistentScanner {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -27,13 +27,13 @@ import java.util.concurrent.CompletableFuture;
|
|||
import java.util.concurrent.ExecutionException;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ MediumTests.class, ClientTests.class })
|
||||
@Category({ LargeTests.class, ClientTests.class })
|
||||
public class TestRawAsyncScanCursor extends AbstractTestScanCursor {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -22,7 +22,7 @@ import java.util.List;
|
|||
import java.util.function.Supplier;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.runner.RunWith;
|
||||
|
@ -31,7 +31,7 @@ import org.junit.runners.Parameterized.Parameter;
|
|||
import org.junit.runners.Parameterized.Parameters;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
@Category({ MediumTests.class, ClientTests.class })
|
||||
@Category({ LargeTests.class, ClientTests.class })
|
||||
public class TestRawAsyncTableScan extends AbstractTestAsyncTableScan {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.regionserver.StorefileRefresherChore;
|
|||
import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
|
||||
|
@ -64,7 +64,7 @@ import org.junit.experimental.categories.Category;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MediumTests.class, ClientTests.class})
|
||||
@Category({LargeTests.class, ClientTests.class})
|
||||
public class TestReplicaWithCluster {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
|||
import org.apache.hadoop.hbase.regionserver.StorefileRefresherChore;
|
||||
import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.junit.After;
|
||||
|
@ -75,7 +75,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
|
|||
* Tests for region replicas. Sad that we cannot isolate these without bringing up a whole
|
||||
* cluster. See {@link org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster}.
|
||||
*/
|
||||
@Category({MediumTests.class, ClientTests.class})
|
||||
@Category({LargeTests.class, ClientTests.class})
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestReplicasClient {
|
||||
|
||||
|
|
|
@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.client;
|
|||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ MediumTests.class, ClientTests.class })
|
||||
@Category({ LargeTests.class, ClientTests.class })
|
||||
public class TestResultScannerCursor extends AbstractTestResultScannerCursor {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.ArrayBackedTag;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
|
@ -27,11 +26,8 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.Tag;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -41,7 +37,7 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
@Category(LargeTests.class)
|
||||
@Category(MediumTests.class)
|
||||
public class TestResultSizeEstimation {
|
||||
|
||||
@ClassRule
|
||||
|
@ -115,11 +111,11 @@ public class TestResultSizeEstimation {
|
|||
Table table = TEST_UTIL.createTable(tableName, FAMILIES);
|
||||
Put p = new Put(ROW1);
|
||||
p.add(new KeyValue(ROW1, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE,
|
||||
new Tag[] { new ArrayBackedTag((byte)1, new byte[TAG_DATA_SIZE]) } ));
|
||||
new Tag[] { new ArrayBackedTag((byte)1, new byte[TAG_DATA_SIZE]) }));
|
||||
table.put(p);
|
||||
p = new Put(ROW2);
|
||||
p.add(new KeyValue(ROW2, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE,
|
||||
new Tag[] { new ArrayBackedTag((byte)1, new byte[TAG_DATA_SIZE]) } ));
|
||||
new Tag[] { new ArrayBackedTag((byte)1, new byte[TAG_DATA_SIZE]) }));
|
||||
table.put(p);
|
||||
|
||||
Scan s = new Scan();
|
||||
|
|
|
@ -63,7 +63,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFile;
|
|||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
|
||||
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
|
@ -73,7 +73,7 @@ import org.mockito.Mockito;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({CoprocessorTests.class, SmallTests.class})
|
||||
@Category({CoprocessorTests.class, MediumTests.class})
|
||||
public class TestCoprocessorInterface {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.client.Put;
|
|||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.hamcrest.CustomTypeSafeMatcher;
|
||||
import org.hamcrest.Matcher;
|
||||
|
@ -67,7 +67,7 @@ import org.junit.experimental.categories.Category;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ CoprocessorTests.class, MediumTests.class })
|
||||
@Category({ CoprocessorTests.class, LargeTests.class })
|
||||
public class TestMetaTableMetrics {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.client.Durability;
|
|||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.wal.WALEdit;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -44,7 +44,7 @@ import org.slf4j.LoggerFactory;
|
|||
* slow/expensive and a flush is triggered at the same time the coprocessow is doing its work. To
|
||||
* simulate this we call flush from the coprocessor itself
|
||||
*/
|
||||
@Category(LargeTests.class)
|
||||
@Category(MediumTests.class)
|
||||
public class TestNegativeMemStoreSizeWithSlowCoprocessor {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -76,7 +76,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTrack
|
|||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
|
||||
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
|
@ -99,7 +99,7 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
|
||||
@Category({ CoprocessorTests.class, MediumTests.class })
|
||||
@Category({ CoprocessorTests.class, LargeTests.class })
|
||||
public class TestRegionObserverInterface {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.TableName;
|
|||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.master.RackManager;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Triple;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -54,7 +54,7 @@ import org.mockito.Mockito;
|
|||
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
|
||||
|
||||
@Category({MasterTests.class, SmallTests.class})
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
public class TestFavoredNodeAssignmentHelper {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
|
|||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionScanner;
|
||||
import org.apache.hadoop.hbase.testclassification.FilterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.wal.WAL;
|
||||
import org.junit.After;
|
||||
|
@ -67,7 +67,7 @@ import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
|
|||
/**
|
||||
* Test filters at the HRegion doorstep.
|
||||
*/
|
||||
@Category({FilterTests.class, SmallTests.class})
|
||||
@Category({FilterTests.class, MediumTests.class})
|
||||
public class TestFilter {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
|||
import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
|
||||
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
||||
import org.apache.hadoop.hbase.testclassification.FilterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Assert;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -51,7 +51,7 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
|||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
|
||||
@Category({FilterTests.class, SmallTests.class})
|
||||
@Category({FilterTests.class, MediumTests.class})
|
||||
public class TestFilterList {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.CompareOperator;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange;
|
||||
import org.apache.hadoop.hbase.testclassification.FilterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
|
|||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
|
||||
@Category({FilterTests.class, SmallTests.class})
|
||||
@Category({FilterTests.class, MediumTests.class})
|
||||
public class TestFilterSerialization {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
|
|||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.testclassification.FilterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -46,7 +46,7 @@ import org.slf4j.LoggerFactory;
|
|||
/**
|
||||
* Test if Filter is incompatible with scan-limits
|
||||
*/
|
||||
@Category({FilterTests.class, MediumTests.class})
|
||||
@Category({FilterTests.class, LargeTests.class})
|
||||
public class TestFilterWithScanLimits extends FilterTestingCluster {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.client.Scan;
|
|||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.testclassification.FilterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
|
@ -49,7 +49,7 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
@Category({FilterTests.class, SmallTests.class})
|
||||
@Category({FilterTests.class, MediumTests.class})
|
||||
public class TestMultipleColumnPrefixFilter {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -20,15 +20,14 @@ package org.apache.hadoop.hbase.filter;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.apache.hadoop.hbase.CompareOperator;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
@ -41,7 +40,7 @@ import org.junit.experimental.categories.Category;
|
|||
* It tests the entire work flow from when a string is given by the user
|
||||
* and how it is parsed to construct the corresponding Filter object
|
||||
*/
|
||||
@Category({RegionServerTests.class, SmallTests.class})
|
||||
@Category({RegionServerTests.class, MediumTests.class})
|
||||
public class TestParseFilter {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -20,12 +20,10 @@ package org.apache.hadoop.hbase.http;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
@ -37,8 +35,8 @@ import org.apache.hadoop.hbase.TableName;
|
|||
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
|
||||
import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
|
||||
import org.apache.hadoop.hbase.security.token.TokenProvider;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.minikdc.MiniKdc;
|
||||
|
@ -73,7 +71,7 @@ import org.slf4j.LoggerFactory;
|
|||
/**
|
||||
* Testing info servers for admin acl.
|
||||
*/
|
||||
@Category({ MiscTests.class, SmallTests.class })
|
||||
@Category({ MiscTests.class, MediumTests.class })
|
||||
public class TestInfoServersACL {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.regionserver.BloomType;
|
|||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.Region;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
|
||||
import org.apache.hadoop.hbase.util.Strings;
|
||||
|
@ -58,7 +58,7 @@ import org.junit.runners.Parameterized.Parameters;
|
|||
/**
|
||||
* Tests encoded seekers by loading and reading values.
|
||||
*/
|
||||
@Category({IOTests.class, MediumTests.class})
|
||||
@Category({IOTests.class, LargeTests.class})
|
||||
@RunWith(Parameterized.class)
|
||||
public class TestEncodedSeekers {
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
|
|||
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -56,7 +56,7 @@ import org.slf4j.LoggerFactory;
|
|||
// (seconds). It is large because it depends on being able to reset the global
|
||||
// blockcache instance which is in a global variable. Experience has it that
|
||||
// tests clash on the global variable if this test is run as small sized test.
|
||||
@Category({IOTests.class, LargeTests.class})
|
||||
@Category({IOTests.class, MediumTests.class})
|
||||
public class TestCacheConfig {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -66,7 +66,7 @@ import org.apache.hadoop.hbase.nio.ByteBuff;
|
|||
import org.apache.hadoop.hbase.nio.MultiByteBuff;
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ChecksumType;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
@ -84,7 +84,7 @@ import org.mockito.Mockito;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({IOTests.class, MediumTests.class})
|
||||
@Category({IOTests.class, LargeTests.class})
|
||||
@RunWith(Parameterized.class)
|
||||
public class TestHFileBlock {
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
|
|||
import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.ChecksumType;
|
||||
import org.apache.hadoop.hbase.util.RedundantKVGenerator;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -51,7 +51,7 @@ import org.junit.runners.Parameterized;
|
|||
import org.junit.runners.Parameterized.Parameters;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
@Category({IOTests.class, SmallTests.class})
|
||||
@Category({IOTests.class, MediumTests.class})
|
||||
public class TestHFileDataBlockEncoder {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileReaderImpl.HFileScannerImpl;
|
|||
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
|
||||
import org.apache.hadoop.hbase.io.hfile.bucket.TestBucketCache;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
|
@ -67,7 +67,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
@Category({ IOTests.class, SmallTests.class })
|
||||
@Category({ IOTests.class, LargeTests.class })
|
||||
public class TestHFileScannerImplReferenceCount {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.KeyValue;
|
|||
import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
@ -57,7 +57,7 @@ import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
|
|||
* Remove after tfile is committed and use the tfile version of this class
|
||||
* instead.</p>
|
||||
*/
|
||||
@Category({IOTests.class, MediumTests.class})
|
||||
@Category({IOTests.class, SmallTests.class})
|
||||
public class TestHFileSeek extends TestCase {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
|||
import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
@ -64,7 +64,7 @@ import org.slf4j.LoggerFactory;
|
|||
* Testing writing a version 3 {@link HFile} for all encoded blocks
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
@Category({IOTests.class, SmallTests.class})
|
||||
@Category({IOTests.class, MediumTests.class})
|
||||
public class TestHFileWriterV3WithDataEncoders {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -36,14 +36,14 @@ import org.apache.hadoop.hbase.fs.HFileSystem;
|
|||
import org.apache.hadoop.hbase.io.ByteBuffAllocator;
|
||||
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({IOTests.class, SmallTests.class})
|
||||
@Category({IOTests.class, MediumTests.class})
|
||||
public class TestPrefetch {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.io.hfile;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -43,8 +42,8 @@ import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
|||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
|
||||
import org.junit.After;
|
||||
|
@ -56,7 +55,7 @@ import org.junit.rules.TestName;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ RegionServerTests.class, MediumTests.class })
|
||||
@Category({ RegionServerTests.class, SmallTests.class })
|
||||
public class TestScannerFromBucketCache {
|
||||
|
||||
@ClassRule
|
||||
|
@ -69,7 +68,7 @@ public class TestScannerFromBucketCache {
|
|||
|
||||
HRegion region = null;
|
||||
private HBaseTestingUtility test_util;
|
||||
public Configuration conf;
|
||||
private Configuration conf;
|
||||
private final int MAX_VERSIONS = 2;
|
||||
byte[] val = new byte[512 * 1024];
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
|
|||
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
|
@ -58,7 +58,7 @@ import org.slf4j.LoggerFactory;
|
|||
* expired.
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
@Category({IOTests.class, MediumTests.class})
|
||||
@Category({IOTests.class, LargeTests.class})
|
||||
public class TestScannerSelectionUsingTTL {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMCache;
|
|||
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMQueueEntry;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
|
@ -74,7 +74,7 @@ import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;
|
|||
* Tests will ensure that blocks' data correctness under several threads concurrency
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
@Category({ IOTests.class, MediumTests.class })
|
||||
@Category({ IOTests.class, LargeTests.class })
|
||||
public class TestBucketCache {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -38,12 +38,12 @@ import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
|
|||
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.WriterThread;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ IOTests.class, MediumTests.class })
|
||||
@Category({ IOTests.class, SmallTests.class })
|
||||
public class TestBucketCacheRefCnt {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMCache;
|
|||
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMQueueEntry;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Assert;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
@ -41,7 +41,7 @@ import org.junit.experimental.categories.Category;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ IOTests.class, MediumTests.class })
|
||||
@Category({ IOTests.class, SmallTests.class })
|
||||
public class TestRAMCache {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestRAMCache.class);
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
|
|||
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheTestUtils;
|
||||
import org.apache.hadoop.hbase.io.hfile.Cacheable;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
@ -44,7 +44,7 @@ import org.junit.runners.Parameterized;
|
|||
* Basic test for check file's integrity before start BucketCache in fileIOEngine
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
@Category(MediumTests.class)
|
||||
@Category(SmallTests.class)
|
||||
public class TestVerifyBucketCacheFile {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
|
|
@ -27,12 +27,12 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
|
|||
import org.apache.hadoop.hbase.Server;
|
||||
import org.apache.hadoop.hbase.codec.Codec;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RPCTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ RPCTests.class, SmallTests.class })
|
||||
@Category({ RPCTests.class, MediumTests.class })
|
||||
public class TestBlockingIPC extends AbstractTestIPC {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Field;
|
||||
import java.net.InetSocketAddress;
|
||||
|
@ -32,8 +31,8 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandlerImpl;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RPCTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
@ -43,7 +42,7 @@ import org.mockito.stubbing.Answer;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({RPCTests.class, LargeTests.class})
|
||||
@Category({RPCTests.class, SmallTests.class})
|
||||
public class TestFifoRpcScheduler {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/**
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -20,8 +20,8 @@ package org.apache.hadoop.hbase.ipc;
|
|||
import java.net.InetSocketAddress;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RPCTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
|
||||
import org.junit.Assert;
|
||||
|
@ -29,9 +29,8 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({RPCTests.class, MediumTests.class}) // Can't be small, we're playing with the EnvironmentEdge
|
||||
@Category({RPCTests.class, SmallTests.class})
|
||||
public class TestHBaseClient {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
HBaseClassTestRule.forClass(TestHBaseClient.class);
|
||||
|
@ -39,47 +38,48 @@ public class TestHBaseClient {
|
|||
@Test
|
||||
public void testFailedServer(){
|
||||
ManualEnvironmentEdge ee = new ManualEnvironmentEdge();
|
||||
EnvironmentEdgeManager.injectEdge( ee );
|
||||
EnvironmentEdgeManager.injectEdge(ee);
|
||||
FailedServers fs = new FailedServers(new Configuration());
|
||||
Throwable testThrowable = new Throwable();//throwable already tested in TestFailedServers.java
|
||||
|
||||
InetSocketAddress ia = InetSocketAddress.createUnresolved("bad", 12);
|
||||
InetSocketAddress ia2 = InetSocketAddress.createUnresolved("bad", 12); // same server as ia
|
||||
// same server as ia
|
||||
InetSocketAddress ia2 = InetSocketAddress.createUnresolved("bad", 12);
|
||||
InetSocketAddress ia3 = InetSocketAddress.createUnresolved("badtoo", 12);
|
||||
InetSocketAddress ia4 = InetSocketAddress.createUnresolved("badtoo", 13);
|
||||
|
||||
|
||||
Assert.assertFalse( fs.isFailedServer(ia) );
|
||||
Assert.assertFalse(fs.isFailedServer(ia));
|
||||
|
||||
fs.addToFailedServers(ia,testThrowable);
|
||||
Assert.assertTrue( fs.isFailedServer(ia) );
|
||||
Assert.assertTrue( fs.isFailedServer(ia2) );
|
||||
Assert.assertTrue(fs.isFailedServer(ia));
|
||||
Assert.assertTrue(fs.isFailedServer(ia2));
|
||||
|
||||
ee.incValue( 1 );
|
||||
Assert.assertTrue( fs.isFailedServer(ia) );
|
||||
Assert.assertTrue( fs.isFailedServer(ia2) );
|
||||
ee.incValue(1);
|
||||
Assert.assertTrue(fs.isFailedServer(ia));
|
||||
Assert.assertTrue(fs.isFailedServer(ia2));
|
||||
|
||||
ee.incValue( RpcClient.FAILED_SERVER_EXPIRY_DEFAULT + 1 );
|
||||
Assert.assertFalse( fs.isFailedServer(ia) );
|
||||
Assert.assertFalse( fs.isFailedServer(ia2) );
|
||||
ee.incValue(RpcClient.FAILED_SERVER_EXPIRY_DEFAULT + 1);
|
||||
Assert.assertFalse(fs.isFailedServer(ia));
|
||||
Assert.assertFalse(fs.isFailedServer(ia2));
|
||||
|
||||
fs.addToFailedServers(ia,testThrowable);
|
||||
fs.addToFailedServers(ia3,testThrowable);
|
||||
fs.addToFailedServers(ia4,testThrowable);
|
||||
|
||||
Assert.assertTrue( fs.isFailedServer(ia) );
|
||||
Assert.assertTrue( fs.isFailedServer(ia2) );
|
||||
Assert.assertTrue( fs.isFailedServer(ia3) );
|
||||
Assert.assertTrue( fs.isFailedServer(ia4) );
|
||||
Assert.assertTrue(fs.isFailedServer(ia));
|
||||
Assert.assertTrue(fs.isFailedServer(ia2));
|
||||
Assert.assertTrue(fs.isFailedServer(ia3));
|
||||
Assert.assertTrue(fs.isFailedServer(ia4));
|
||||
|
||||
ee.incValue( RpcClient.FAILED_SERVER_EXPIRY_DEFAULT + 1 );
|
||||
Assert.assertFalse( fs.isFailedServer(ia) );
|
||||
Assert.assertFalse( fs.isFailedServer(ia2) );
|
||||
Assert.assertFalse( fs.isFailedServer(ia3) );
|
||||
Assert.assertFalse( fs.isFailedServer(ia4) );
|
||||
ee.incValue(RpcClient.FAILED_SERVER_EXPIRY_DEFAULT + 1);
|
||||
Assert.assertFalse(fs.isFailedServer(ia));
|
||||
Assert.assertFalse(fs.isFailedServer(ia2));
|
||||
Assert.assertFalse(fs.isFailedServer(ia3));
|
||||
Assert.assertFalse(fs.isFailedServer(ia4));
|
||||
|
||||
|
||||
fs.addToFailedServers(ia3,testThrowable);
|
||||
Assert.assertFalse( fs.isFailedServer(ia4) );
|
||||
Assert.assertFalse(fs.isFailedServer(ia4));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/**
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -28,8 +28,8 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
|
|||
import org.apache.hadoop.hbase.Server;
|
||||
import org.apache.hadoop.hbase.codec.Codec;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RPCTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.JVM;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -39,7 +39,6 @@ import org.junit.runner.RunWith;
|
|||
import org.junit.runners.Parameterized;
|
||||
import org.junit.runners.Parameterized.Parameter;
|
||||
import org.junit.runners.Parameterized.Parameters;
|
||||
|
||||
import org.apache.hbase.thirdparty.io.netty.channel.Channel;
|
||||
import org.apache.hbase.thirdparty.io.netty.channel.epoll.EpollEventLoopGroup;
|
||||
import org.apache.hbase.thirdparty.io.netty.channel.epoll.EpollSocketChannel;
|
||||
|
@ -47,7 +46,7 @@ import org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup;
|
|||
import org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
@Category({ RPCTests.class, SmallTests.class })
|
||||
@Category({ RPCTests.class, MediumTests.class })
|
||||
public class TestNettyIPC extends AbstractTestIPC {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/**
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -21,7 +21,6 @@ import static org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.SERVICE;
|
|||
import static org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.newBlockingStub;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.Arrays;
|
||||
|
@ -30,8 +29,8 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RPCTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.junit.After;
|
||||
|
@ -43,9 +42,7 @@ import org.junit.runner.RunWith;
|
|||
import org.junit.runners.Parameterized;
|
||||
import org.junit.runners.Parameterized.Parameter;
|
||||
import org.junit.runners.Parameterized.Parameters;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos;
|
||||
import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto;
|
||||
import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto;
|
||||
|
@ -57,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProto
|
|||
* <code>src/test/protobuf/test_rpc_service.proto</code>
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
@Category({ RPCTests.class, MediumTests.class })
|
||||
@Category({ RPCTests.class, SmallTests.class })
|
||||
public class TestProtoBufRpc {
|
||||
|
||||
@ClassRule
|
||||
|
@ -65,7 +62,7 @@ public class TestProtoBufRpc {
|
|||
HBaseClassTestRule.forClass(TestProtoBufRpc.class);
|
||||
|
||||
public final static String ADDRESS = "localhost";
|
||||
public static int PORT = 0;
|
||||
private static int PORT = 0;
|
||||
private InetSocketAddress isa;
|
||||
private Configuration conf;
|
||||
private RpcServerInterface server;
|
||||
|
|
|
@ -28,7 +28,6 @@ import static org.mockito.Mockito.mock;
|
|||
import static org.mockito.Mockito.timeout;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Field;
|
||||
import java.net.InetSocketAddress;
|
||||
|
@ -46,8 +45,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
|
|||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandlerImpl;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RPCTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdge;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
|
@ -60,18 +59,16 @@ import org.mockito.invocation.InvocationOnMock;
|
|||
import org.mockito.stubbing.Answer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
|
||||
|
||||
@Category({RPCTests.class, SmallTests.class})
|
||||
@Category({RPCTests.class, MediumTests.class})
|
||||
public class TestSimpleRpcScheduler {
|
||||
|
||||
@ClassRule
|
||||
|
@ -115,17 +112,20 @@ public class TestSimpleRpcScheduler {
|
|||
|
||||
RpcExecutor rpcExecutor = (RpcExecutor)ExecutorField.get(scheduler);
|
||||
|
||||
Field handlerCountField = rpcExecutor.getClass().getSuperclass().getSuperclass().getDeclaredField("handlerCount");
|
||||
Field handlerCountField = rpcExecutor.getClass().getSuperclass().getSuperclass().
|
||||
getDeclaredField("handlerCount");
|
||||
|
||||
handlerCountField.setAccessible(true);
|
||||
handlerCountField.set(rpcExecutor, 0);
|
||||
|
||||
Field numCallQueuesField = rpcExecutor.getClass().getSuperclass().getSuperclass().getDeclaredField("numCallQueues");
|
||||
Field numCallQueuesField = rpcExecutor.getClass().getSuperclass().getSuperclass().
|
||||
getDeclaredField("numCallQueues");
|
||||
|
||||
numCallQueuesField.setAccessible(true);
|
||||
numCallQueuesField.set(rpcExecutor, 1);
|
||||
|
||||
Field currentQueueLimitField = rpcExecutor.getClass().getSuperclass().getSuperclass().getDeclaredField("currentQueueLimit");
|
||||
Field currentQueueLimitField = rpcExecutor.getClass().getSuperclass().getSuperclass().
|
||||
getDeclaredField("currentQueueLimit");
|
||||
|
||||
currentQueueLimitField.setAccessible(true);
|
||||
currentQueueLimitField.set(rpcExecutor, 100);
|
||||
|
@ -480,14 +480,15 @@ public class TestSimpleRpcScheduler {
|
|||
}
|
||||
}
|
||||
|
||||
// FIX. I don't get this test (St.Ack). When I time this test, the minDelay is > 2 * codel delay from the get go.
|
||||
// So we are always overloaded. The test below would seem to complete the queuing of all the CallRunners inside
|
||||
// the codel check interval. I don't think we are skipping codel checking. Second, I think this test has been
|
||||
// broken since HBASE-16089 Add on FastPath for CoDel went in. The thread name we were looking for was the name
|
||||
// BEFORE we updated: i.e. "RpcServer.CodelBQ.default.handler". But same patch changed the name of the codel
|
||||
// fastpath thread to: new FastPathBalancedQueueRpcExecutor("CodelFPBQ.default", handlerCount, numCallQueues...
|
||||
// Codel is hard to test. This test is going to be flakey given it all timer-based. Disabling for now till chat
|
||||
// with authors.
|
||||
// FIX. I don't get this test (St.Ack). When I time this test, the minDelay is > 2 * codel delay
|
||||
// from the get go. So we are always overloaded. The test below would seem to complete the
|
||||
// queuing of all the CallRunners inside the codel check interval. I don't think we are skipping
|
||||
// codel checking. Second, I think this test has been broken since HBASE-16089 Add on FastPath for
|
||||
// CoDel went in. The thread name we were looking for was the name BEFORE we updated: i.e.
|
||||
// "RpcServer.CodelBQ.default.handler". But same patch changed the name of the codel fastpath
|
||||
// thread to: new FastPathBalancedQueueRpcExecutor("CodelFPBQ.default", handlerCount,
|
||||
// numCallQueues... Codel is hard to test. This test is going to be flakey given it all
|
||||
// timer-based. Disabling for now till chat with authors.
|
||||
@Test
|
||||
public void testCoDelScheduling() throws Exception {
|
||||
CoDelEnvironmentEdge envEdge = new CoDelEnvironmentEdge();
|
||||
|
@ -501,8 +502,8 @@ public class TestSimpleRpcScheduler {
|
|||
SimpleRpcScheduler scheduler =
|
||||
new SimpleRpcScheduler(schedConf, 1, 1, 1, priority, HConstants.QOS_THRESHOLD);
|
||||
try {
|
||||
// Loading mocked call runner can take a good amount of time the first time through (haven't looked why).
|
||||
// Load it for first time here outside of the timed loop.
|
||||
// Loading mocked call runner can take a good amount of time the first time through
|
||||
// (haven't looked why). Load it for first time here outside of the timed loop.
|
||||
getMockedCallRunner(System.currentTimeMillis(), 2);
|
||||
scheduler.start();
|
||||
EnvironmentEdgeManager.injectEdge(envEdge);
|
||||
|
@ -644,7 +645,9 @@ public class TestSimpleRpcScheduler {
|
|||
CallRunner cr = new CallRunner(null, putCall) {
|
||||
@Override
|
||||
public void run() {
|
||||
if (sleepTime <= 0) return;
|
||||
if (sleepTime <= 0) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
LOG.warn("Sleeping for " + sleepTime);
|
||||
Thread.sleep(sleepTime);
|
||||
|
|
|
@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.regionserver.ChunkCreator;
|
|||
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||
import org.apache.hadoop.hbase.regionserver.MemStoreLABImpl;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
|
||||
|
@ -71,7 +71,7 @@ import org.junit.rules.TestName;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MasterTests.class, SmallTests.class})
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
public class TestCatalogJanitor {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -22,7 +22,7 @@ import java.util.List;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
|
@ -32,7 +32,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({MasterTests.class, MediumTests.class}) // Plays with the ManualEnvironmentEdge
|
||||
@Category({MasterTests.class, SmallTests.class}) // Plays with the ManualEnvironmentEdge
|
||||
public class TestClusterStatusPublisher {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -20,11 +20,9 @@ package org.apache.hadoop.hbase.master;
|
|||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
|
@ -37,10 +35,9 @@ import org.apache.hadoop.hbase.client.Result;
|
|||
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -48,11 +45,10 @@ import org.junit.Rule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
public class TestMasterRepairMode {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -30,8 +30,8 @@ import org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure;
|
|||
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -40,7 +40,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ MasterTests.class, LargeTests.class })
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
public class TestServerCrashProcedureCarryingMetaStuck {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -30,8 +30,8 @@ import org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure;
|
|||
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -43,7 +43,7 @@ import org.junit.experimental.categories.Category;
|
|||
/**
|
||||
* Testcase for HBASE-20634
|
||||
*/
|
||||
@Category({ MasterTests.class, LargeTests.class })
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
public class TestServerCrashProcedureStuck {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -33,7 +33,6 @@ import static org.apache.hadoop.hbase.SplitLogCounters.tot_mgr_task_deleted;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.LongAdder;
|
||||
|
@ -53,8 +52,8 @@ import org.apache.hadoop.hbase.coordination.ZkCoordinatedStateManager;
|
|||
import org.apache.hadoop.hbase.master.SplitLogManager.Task;
|
||||
import org.apache.hadoop.hbase.master.SplitLogManager.TaskBatch;
|
||||
import org.apache.hadoop.hbase.regionserver.TestMasterAddressTracker.NodeCreationListener;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
|
||||
|
@ -71,7 +70,7 @@ import org.mockito.Mockito;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
public class TestSplitLogManager {
|
||||
|
||||
@ClassRule
|
||||
|
@ -151,7 +150,9 @@ public class TestSplitLogManager {
|
|||
@After
|
||||
public void teardown() throws IOException, KeeperException {
|
||||
master.stop("");
|
||||
if (slm != null) slm.stop();
|
||||
if (slm != null) {
|
||||
slm.stop();
|
||||
}
|
||||
TEST_UTIL.shutdownMiniZKCluster();
|
||||
}
|
||||
|
||||
|
@ -212,7 +213,6 @@ public class TestSplitLogManager {
|
|||
|
||||
/**
|
||||
* Test whether the splitlog correctly creates a task in zookeeper
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testTaskCreation() throws Exception {
|
||||
|
@ -333,7 +333,8 @@ public class TestSplitLogManager {
|
|||
return (tot_mgr_resubmit.sum() + tot_mgr_resubmit_failed.sum());
|
||||
}
|
||||
}, 0, 1, 5*60000); // wait long enough
|
||||
Assert.assertEquals("Could not run test. Lost ZK connection?", 0, tot_mgr_resubmit_failed.sum());
|
||||
Assert.assertEquals("Could not run test. Lost ZK connection?",
|
||||
0, tot_mgr_resubmit_failed.sum());
|
||||
int version1 = ZKUtil.checkExists(zkw, tasknode);
|
||||
assertTrue(version1 > version);
|
||||
byte[] taskstate = ZKUtil.getData(zkw, tasknode);
|
||||
|
@ -460,9 +461,13 @@ public class TestSplitLogManager {
|
|||
final ServerName worker1 = ServerName.valueOf("worker1,1,1");
|
||||
SplitLogTask slt = new SplitLogTask.Owned(worker1);
|
||||
ZKUtil.setData(zkw, tasknode, slt.toByteArray());
|
||||
if (tot_mgr_heartbeat.sum() == 0) waitForCounter(tot_mgr_heartbeat, 0, 1, to/2);
|
||||
if (tot_mgr_heartbeat.sum() == 0) {
|
||||
waitForCounter(tot_mgr_heartbeat, 0, 1, to/2);
|
||||
}
|
||||
slm.handleDeadWorker(worker1);
|
||||
if (tot_mgr_resubmit.sum() == 0) waitForCounter(tot_mgr_resubmit, 0, 1, to+to/2);
|
||||
if (tot_mgr_resubmit.sum() == 0) {
|
||||
waitForCounter(tot_mgr_resubmit, 0, 1, to+to/2);
|
||||
}
|
||||
if (tot_mgr_resubmit_dead_server_task.sum() == 0) {
|
||||
waitForCounter(tot_mgr_resubmit_dead_server_task, 0, 1, to + to/2);
|
||||
}
|
||||
|
@ -485,7 +490,9 @@ public class TestSplitLogManager {
|
|||
|
||||
SplitLogTask slt = new SplitLogTask.Owned(worker1);
|
||||
ZKUtil.setData(zkw, tasknode, slt.toByteArray());
|
||||
if (tot_mgr_heartbeat.sum() == 0) waitForCounter(tot_mgr_heartbeat, 0, 1, to/2);
|
||||
if (tot_mgr_heartbeat.sum() == 0) {
|
||||
waitForCounter(tot_mgr_heartbeat, 0, 1, to/2);
|
||||
}
|
||||
|
||||
// Not yet resubmitted.
|
||||
Assert.assertEquals(0, tot_mgr_resubmit.sum());
|
||||
|
@ -504,7 +511,8 @@ public class TestSplitLogManager {
|
|||
LOG.info("testEmptyLogDir");
|
||||
slm = new SplitLogManager(master, conf);
|
||||
FileSystem fs = TEST_UTIL.getTestFileSystem();
|
||||
Path emptyLogDirPath = new Path(new Path(fs.getWorkingDirectory(), HConstants.HREGION_LOGDIR_NAME),
|
||||
Path emptyLogDirPath = new Path(new Path(fs.getWorkingDirectory(),
|
||||
HConstants.HREGION_LOGDIR_NAME),
|
||||
ServerName.valueOf("emptyLogDir", 1, 1).toString());
|
||||
fs.mkdirs(emptyLogDirPath);
|
||||
slm.splitLogDistributed(emptyLogDirPath);
|
||||
|
|
|
@ -20,12 +20,10 @@ package org.apache.hadoop.hbase.master;
|
|||
import static org.apache.hadoop.hbase.HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK;
|
||||
import static org.apache.hadoop.hbase.HConstants.HBASE_SPLIT_WAL_MAX_SPLITTER;
|
||||
import static org.apache.hadoop.hbase.master.procedure.ServerProcedureInterface.ServerOperationType.SPLIT_WAL;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
|
@ -42,8 +40,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
|
|||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
|
||||
import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil;
|
||||
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
|
||||
|
@ -53,12 +51,11 @@ import org.junit.Before;
|
|||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
|
||||
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
@Category({ MasterTests.class, LargeTests.class })
|
||||
|
||||
public class TestSplitWALManager {
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.master.assignment;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -43,8 +42,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
|||
import org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.junit.After;
|
||||
|
@ -59,7 +58,7 @@ import org.junit.rules.TestName;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
public class TestMergeTableRegionsProcedure {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -31,8 +31,8 @@ import org.apache.hadoop.hbase.master.MasterServices;
|
|||
import org.apache.hadoop.hbase.master.procedure.DisableTableProcedure;
|
||||
import org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -44,7 +44,7 @@ import org.junit.experimental.categories.Category;
|
|||
/**
|
||||
* Testcase for HBASE-23636.
|
||||
*/
|
||||
@Category({ MasterTests.class, LargeTests.class })
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
public class TestRaceBetweenSCPAndDTP {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
package org.apache.hadoop.hbase.master.assignment;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||
|
@ -29,8 +28,8 @@ import org.apache.hadoop.hbase.client.Get;
|
|||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
|
||||
import org.junit.After;
|
||||
|
@ -42,13 +41,12 @@ import org.junit.experimental.categories.Category;
|
|||
import org.junit.rules.TestName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
|
||||
|
||||
/**
|
||||
* Testcase for HBASE-20792.
|
||||
*/
|
||||
@Category({ LargeTests.class, MasterTests.class })
|
||||
@Category({ MediumTests.class, MasterTests.class })
|
||||
public class TestRegionMoveAndAbandon {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestRegionMoveAndAbandon.class);
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.client.Table;
|
|||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.regionserver.Region;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
|
||||
|
@ -48,7 +48,7 @@ import org.junit.rules.TestName;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ RegionServerTests.class, LargeTests.class })
|
||||
@Category({ RegionServerTests.class, MediumTests.class })
|
||||
public class TestRegionReplicaSplit {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
|
|
@ -33,8 +33,8 @@ import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
|||
import org.apache.hadoop.hbase.master.procedure.MasterProcedureTestingUtility;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.JVMClusterUtil;
|
||||
import org.junit.After;
|
||||
|
@ -49,7 +49,7 @@ import org.junit.rules.TestName;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
public class TestRegionSplit {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.client.RegionInfo;
|
|||
import org.apache.hadoop.hbase.master.LoadBalancer;
|
||||
import org.apache.hadoop.hbase.master.RegionPlan;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.net.DNSToSwitchMapping;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -48,7 +48,7 @@ import org.slf4j.LoggerFactory;
|
|||
/**
|
||||
* Test the load balancer that is created by default.
|
||||
*/
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
@Category({MasterTests.class, SmallTests.class})
|
||||
public class TestDefaultLoadBalancer extends BalancerTestBase {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -21,7 +21,6 @@ import static org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper.FAVORE
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -41,12 +40,10 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.junit.After;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
|
||||
|
||||
/*
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
|
|||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -40,7 +40,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ MasterTests.class, SmallTests.class })
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
public class TestStochasticLoadBalancerHeterogeneousCostRules extends BalancerTestBase {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.master.locking;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -42,8 +41,8 @@ import org.apache.hadoop.hbase.procedure2.LockType;
|
|||
import org.apache.hadoop.hbase.procedure2.Procedure;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.hamcrest.core.IsInstanceOf;
|
||||
import org.hamcrest.core.StringStartsWith;
|
||||
|
@ -59,16 +58,14 @@ import org.junit.rules.ExpectedException;
|
|||
import org.junit.rules.TestName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;
|
||||
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
public class TestLockProcedure {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.master.procedure;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
@ -35,8 +34,8 @@ import org.apache.hadoop.hbase.InvalidFamilyOperationException;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.Admin;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.wal.WALSplitUtil;
|
||||
|
@ -49,7 +48,7 @@ import org.junit.ClassRule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
public class TestDeleteColumnFamilyProcedureFromClient {
|
||||
|
||||
@ClassRule
|
||||
|
@ -65,8 +64,6 @@ public class TestDeleteColumnFamilyProcedureFromClient {
|
|||
|
||||
/**
|
||||
* Start up a mini cluster and put a small table of empty regions into it.
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@BeforeClass
|
||||
public static void beforeAllTests() throws Exception {
|
||||
|
|
|
@ -20,9 +20,7 @@ package org.apache.hadoop.hbase.master.procedure;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.hbase.ConcurrentTableModificationException;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
|
@ -40,8 +38,8 @@ import org.apache.hadoop.hbase.master.procedure.MasterProcedureTestingUtility.St
|
|||
import org.apache.hadoop.hbase.procedure2.Procedure;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.NonceKey;
|
||||
import org.apache.hadoop.hbase.util.TableDescriptorChecker;
|
||||
|
@ -52,7 +50,7 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
|
@ -36,8 +35,8 @@ import org.apache.hadoop.hbase.procedure2.Procedure;
|
|||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
||||
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
@ -48,11 +47,10 @@ import org.junit.experimental.categories.Category;
|
|||
import org.junit.rules.TestName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
|
||||
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
public class TestRestoreSnapshotProcedure extends TestTableDDLProcedureBase {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.master.procedure;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ MasterTests.class, LargeTests.class })
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
public class TestSCPWithoutMetaWithoutZKCoordinated extends TestSCPWithoutMeta {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -29,8 +29,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
|||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
|
||||
import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
|
@ -40,7 +40,7 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
@Category({ MasterTests.class, LargeTests.class })
|
||||
@Category({ MasterTests.class, SmallTests.class })
|
||||
public class TestSchedulerQueueDeadLock {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.master.procedure;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
@ -32,8 +31,8 @@ import org.apache.hadoop.hbase.TableName;
|
|||
import org.apache.hadoop.hbase.client.Admin;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||
import org.apache.hadoop.hbase.master.MasterFileSystem;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
|
@ -51,7 +50,7 @@ import org.junit.rules.TestName;
|
|||
* Verify that the HTableDescriptor is updated after
|
||||
* addColumn(), deleteColumn() and modifyTable() operations.
|
||||
*/
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
public class TestTableDescriptorModificationFromClient {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -20,11 +20,9 @@ package org.apache.hadoop.hbase.master.procedure;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
@ -41,8 +39,8 @@ import org.apache.hadoop.hbase.master.MasterFileSystem;
|
|||
import org.apache.hadoop.hbase.procedure2.Procedure;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.util.ModifyRegionUtils;
|
||||
|
@ -55,7 +53,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
|
||||
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
@Category({ MasterTests.class, LargeTests.class })
|
||||
public class TestTruncateTableProcedure extends TestTableDDLProcedureBase {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.master.snapshot;
|
|||
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -34,8 +33,8 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
|
|||
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
|
||||
import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil;
|
||||
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -49,7 +48,7 @@ import org.slf4j.LoggerFactory;
|
|||
/**
|
||||
* Test that we correctly reload the cache, filter directories, etc.
|
||||
*/
|
||||
@Category({MasterTests.class, MediumTests.class})
|
||||
@Category({MasterTests.class, LargeTests.class})
|
||||
public class TestSnapshotFileCache {
|
||||
|
||||
@ClassRule
|
||||
|
@ -109,8 +108,8 @@ public class TestSnapshotFileCache {
|
|||
|
||||
@Test
|
||||
public void testSnapshotTempDirReload() throws IOException {
|
||||
SnapshotFileCache cache =
|
||||
new SnapshotFileCache(fs, rootDir, PERIOD, 10000000, "test-snapshot-file-cache-refresh", new SnapshotFiles());
|
||||
SnapshotFileCache cache = new SnapshotFileCache(fs, rootDir, PERIOD, 10000000,
|
||||
"test-snapshot-file-cache-refresh", new SnapshotFiles());
|
||||
|
||||
// Add a new non-tmp snapshot
|
||||
createAndTestSnapshotV1(cache, "snapshot0v1", false, false, false);
|
||||
|
|
|
@ -79,7 +79,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFile;
|
|||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
|
||||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
|
||||
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
|
@ -92,7 +92,7 @@ import org.junit.experimental.categories.Category;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category(MediumTests.class)
|
||||
@Category(LargeTests.class)
|
||||
public class TestNamespaceAuditor {
|
||||
|
||||
@ClassRule
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
|
|||
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
|
||||
import org.apache.hadoop.hbase.procedure2.Procedure;
|
||||
import org.apache.hadoop.hbase.security.AccessDeniedException;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.testclassification.LargeTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.After;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
|
|||
/**
|
||||
* Check if CompletedProcedureCleaner cleans up failed nonce procedures.
|
||||
*/
|
||||
@Category(MediumTests.class)
|
||||
@Category(LargeTests.class)
|
||||
public class TestFailedProcCleanup {
|
||||
|
||||
@ClassRule
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue