HBASE-11911 Break up tests into more fine grained categories (Alex Newman)

This commit is contained in:
stack 2014-09-12 21:37:06 -07:00
parent 98be489070
commit 3c9bd7d296
644 changed files with 2713 additions and 1463 deletions

View File

@ -23,10 +23,12 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestRegionLocations { public class TestRegionLocations {
ServerName sn0 = ServerName.valueOf("host0", 10, 10); ServerName sn0 = ServerName.valueOf("host0", 10, 10);

View File

@ -27,7 +27,8 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFuture; import org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFuture;
import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.client.coprocessor.Batch;
@ -62,7 +63,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
@Category(MediumTests.class) @Category({ClientTests.class, MediumTests.class})
public class TestAsyncProcess { public class TestAsyncProcess {
private static final TableName DUMMY_TABLE = private static final TableName DUMMY_TABLE =
TableName.valueOf("DUMMY_TABLE"); TableName.valueOf("DUMMY_TABLE");

View File

@ -21,13 +21,14 @@ package org.apache.hadoop.hbase.client;
import java.util.Arrays; import java.util.Arrays;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestAttributes { public class TestAttributes {
private static final byte [] ROW = new byte [] {'r'}; private static final byte [] ROW = new byte [] {'r'};
@Test @Test

View File

@ -47,7 +47,8 @@ import org.apache.hadoop.hbase.RegionLocations;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.RegionTooBusyException; import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.protobuf.generated.CellProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
@ -91,7 +92,7 @@ import com.google.protobuf.ServiceException;
* Test client behavior w/o setting up a cluster. * Test client behavior w/o setting up a cluster.
* Mock up cluster emissions. * Mock up cluster emissions.
*/ */
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestClientNoCluster extends Configured implements Tool { public class TestClientNoCluster extends Configured implements Tool {
private static final Log LOG = LogFactory.getLog(TestClientNoCluster.class); private static final Log LOG = LogFactory.getLog(TestClientNoCluster.class);
private Configuration conf; private Configuration conf;

View File

@ -16,13 +16,14 @@ import java.util.Map.Entry;
import java.util.NavigableMap; import java.util.NavigableMap;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestDeleteTimeStamp { public class TestDeleteTimeStamp {
private static final byte[] ROW = Bytes.toBytes("testRow"); private static final byte[] ROW = Bytes.toBytes("testRow");
private static final byte[] FAMILY = Bytes.toBytes("testFamily"); private static final byte[] FAMILY = Bytes.toBytes("testFamily");

View File

@ -33,8 +33,9 @@ import java.util.List;
import java.util.Set; import java.util.Set;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList;
@ -48,7 +49,7 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
// TODO: cover more test cases // TODO: cover more test cases
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestGet { public class TestGet {
private static final byte [] ROW = new byte [] {'r'}; private static final byte [] ROW = new byte [] {'r'};

View File

@ -22,11 +22,12 @@ import static org.junit.Assert.assertEquals;
import java.util.Map; import java.util.Map;
import java.util.NavigableMap; import java.util.NavigableMap;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestIncrement { public class TestIncrement {
@Test @Test
public void test() { public void test() {

View File

@ -24,9 +24,10 @@ import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -72,7 +73,7 @@ import org.junit.experimental.categories.Category;
* Run tests that use the functionality of the Operation superclass for * Run tests that use the functionality of the Operation superclass for
* Puts, Gets, Deletes, Scans, and MultiPuts. * Puts, Gets, Deletes, Scans, and MultiPuts.
*/ */
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestOperation { public class TestOperation {
private static byte [] ROW = Bytes.toBytes("testRow"); private static byte [] ROW = Bytes.toBytes("testRow");
private static byte [] FAMILY = Bytes.toBytes("testFamily"); private static byte [] FAMILY = Bytes.toBytes("testFamily");

View File

@ -17,14 +17,15 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
/** /**
* Addresses HBASE-6047 * Addresses HBASE-6047
* We test put.has call with all of its polymorphic magic * We test put.has call with all of its polymorphic magic

View File

@ -25,7 +25,8 @@ import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Set; import java.util.Set;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.security.visibility.Authorizations;
@ -35,7 +36,7 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
// TODO: cover more test cases // TODO: cover more test cases
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestScan { public class TestScan {
@Test @Test
public void testAttributesSerialization() throws IOException { public void testAttributesSerialization() throws IOException {

View File

@ -25,9 +25,10 @@ import java.io.IOException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
@ -43,7 +44,7 @@ import com.google.protobuf.RpcController;
/** /**
* Test snapshot logic from the client * Test snapshot logic from the client
*/ */
@Category(SmallTests.class) @Category({SmallTests.class, ClientTests.class})
public class TestSnapshotFromAdmin { public class TestSnapshotFromAdmin {
private static final Log LOG = LogFactory.getLog(TestSnapshotFromAdmin.class); private static final Log LOG = LogFactory.getLog(TestSnapshotFromAdmin.class);

View File

@ -31,9 +31,10 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.codec.KeyValueCodec; import org.apache.hadoop.hbase.codec.KeyValueCodec;
import org.apache.hadoop.hbase.io.SizedCellScanner; import org.apache.hadoop.hbase.io.SizedCellScanner;
@ -47,7 +48,7 @@ import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestIPCUtil { public class TestIPCUtil {
public static final Log LOG = LogFactory.getLog(IPCUtil.class); public static final Log LOG = LogFactory.getLog(IPCUtil.class);

View File

@ -28,13 +28,14 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestPayloadCarryingRpcController { public class TestPayloadCarryingRpcController {
@Test @Test
public void testListOfCellScannerables() throws IOException { public void testListOfCellScannerables() throws IOException {

View File

@ -26,17 +26,17 @@ import java.security.SecureRandom;
import javax.crypto.spec.SecretKeySpec; import javax.crypto.spec.SecretKeySpec;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
import org.apache.hadoop.hbase.io.crypto.aes.AES; import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({ClientTests.class, SmallTests.class})
public class TestEncryptionUtil { public class TestEncryptionUtil {
@Test @Test

View File

@ -20,10 +20,12 @@ package org.apache.hadoop.hbase;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestCellComparator { public class TestCellComparator {
byte[] row1 = Bytes.toBytes("row1"); byte[] row1 = Bytes.toBytes("row1");

View File

@ -24,12 +24,14 @@ import java.util.List;
import java.util.NavigableMap; import java.util.NavigableMap;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestCellUtil { public class TestCellUtil {
/** /**
* CellScannable used in test. Returns a {@link TestCellScanner} * CellScannable used in test. Returns a {@link TestCellScanner}

View File

@ -44,12 +44,14 @@ import javax.tools.ToolProvider;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestClassFinder { public class TestClassFinder {
private static final Log LOG = LogFactory.getLog(TestClassFinder.class); private static final Log LOG = LogFactory.getLog(TestClassFinder.class);
private static final HBaseCommonTestingUtility testUtil = new HBaseCommonTestingUtility(); private static final HBaseCommonTestingUtility testUtil = new HBaseCommonTestingUtility();

View File

@ -26,11 +26,13 @@ import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestCompoundConfiguration extends TestCase { public class TestCompoundConfiguration extends TestCase {
private Configuration baseConf; private Configuration baseConf;
private int baseConfSize; private int baseConfSize;

View File

@ -29,10 +29,12 @@ import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestHBaseConfiguration { public class TestHBaseConfiguration {
private static final Log LOG = LogFactory.getLog(TestHBaseConfiguration.class); private static final Log LOG = LogFactory.getLog(TestHBaseConfiguration.class);

View File

@ -28,9 +28,8 @@ import java.io.IOException;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.codec.CellCodec; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -38,7 +37,7 @@ import org.junit.experimental.categories.Category;
import com.google.common.io.CountingInputStream; import com.google.common.io.CountingInputStream;
import com.google.common.io.CountingOutputStream; import com.google.common.io.CountingOutputStream;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestCellCodec { public class TestCellCodec {
@Test @Test

View File

@ -32,7 +32,8 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
@ -41,7 +42,7 @@ import org.junit.experimental.categories.Category;
import com.google.common.io.CountingInputStream; import com.google.common.io.CountingInputStream;
import com.google.common.io.CountingOutputStream; import com.google.common.io.CountingOutputStream;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestCellCodecWithTags { public class TestCellCodecWithTags {
@Test @Test

View File

@ -28,9 +28,8 @@ import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.codec.KeyValueCodec;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -38,7 +37,7 @@ import org.junit.experimental.categories.Category;
import com.google.common.io.CountingInputStream; import com.google.common.io.CountingInputStream;
import com.google.common.io.CountingOutputStream; import com.google.common.io.CountingOutputStream;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestKeyValueCodec { public class TestKeyValueCodec {
@Test @Test
public void testEmptyWorks() throws IOException { public void testEmptyWorks() throws IOException {

View File

@ -32,7 +32,8 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
@ -41,7 +42,7 @@ import org.junit.experimental.categories.Category;
import com.google.common.io.CountingInputStream; import com.google.common.io.CountingInputStream;
import com.google.common.io.CountingOutputStream; import com.google.common.io.CountingOutputStream;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestKeyValueCodecWithTags { public class TestKeyValueCodecWithTags {
@Test @Test

View File

@ -27,14 +27,15 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.io.util.LRUDictionary; import org.apache.hadoop.hbase.io.util.LRUDictionary;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestTagCompressionContext { public class TestTagCompressionContext {
private static final byte[] ROW = Bytes.toBytes("r1"); private static final byte[] ROW = Bytes.toBytes("r1");

View File

@ -27,13 +27,14 @@ import java.util.Arrays;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.io.crypto.aes.AES; import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestCipherProvider { public class TestCipherProvider {
public static class MyCipherProvider implements CipherProvider { public static class MyCipherProvider implements CipherProvider {

View File

@ -28,12 +28,13 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestEncryption { public class TestEncryption {
private static final Log LOG = LogFactory.getLog(TestEncryption.class); private static final Log LOG = LogFactory.getLog(TestEncryption.class);

View File

@ -25,13 +25,14 @@ import java.security.Key;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.io.crypto.aes.AES; import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestKeyProvider { public class TestKeyProvider {
@Test @Test

View File

@ -32,12 +32,13 @@ import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestKeyStoreKeyProvider { public class TestKeyStoreKeyProvider {
static final Log LOG = LogFactory.getLog(TestKeyStoreKeyProvider.class); static final Log LOG = LogFactory.getLog(TestKeyStoreKeyProvider.class);

View File

@ -34,7 +34,8 @@ import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider; import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.Encryption;
@ -44,7 +45,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestAES { public class TestAES {
// Validation for AES in CTR mode with a 128 bit key // Validation for AES in CTR mode with a 128 bit key

View File

@ -25,7 +25,8 @@ import java.util.Arrays;
import java.util.Random; import java.util.Random;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -34,7 +35,7 @@ import org.junit.experimental.categories.Category;
/** /**
* Tests LRUDictionary * Tests LRUDictionary
*/ */
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestLRUDictionary { public class TestLRUDictionary {
LRUDictionary testee; LRUDictionary testee;

View File

@ -0,0 +1,41 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to the client, This tests the hbase-client package and all of the client tests in
* hbase-server.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface ClientTests {
}

View File

@ -0,0 +1,41 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to coprocessors.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface CoprocessorTests {
}

View File

@ -0,0 +1,41 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to the filter package.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface FilterTests {
}

View File

@ -0,0 +1,40 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as failing commonly on public build infrastructure.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface FlakeyTests {
}

View File

@ -0,0 +1,41 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to the io package. Things like HFile and the like.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface IOTests {
}

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase.testclassification;
/** /**
* Tag a test as 'integration/system' test, meaning that the test class has the following * Tag a test as 'integration/system' test, meaning that the test class has the following

View File

@ -17,7 +17,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase.testclassification;
/** /**
* Tag a test as 'large', meaning that the test class has the following * Tag a test as 'large', meaning that the test class has the following

View File

@ -0,0 +1,40 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to mapred or mapreduce,
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface MapReduceTests {
}

View File

@ -0,0 +1,40 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to the master.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface MasterTests {
}

View File

@ -17,7 +17,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase.testclassification;
/** /**
* Tag a test as 'Medium', meaning that the test class has the following * Tag a test as 'Medium', meaning that the test class has the following

View File

@ -0,0 +1,40 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as not easily falling into any of the below categories.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface MiscTests {
}

View File

@ -0,0 +1,40 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to RPC.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface RPCTests {
}

View File

@ -0,0 +1,41 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to the regionserver,
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface RegionServerTests {
}

View File

@ -0,0 +1,40 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to replication,
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface ReplicationTests {
}

View File

@ -0,0 +1,41 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to the rest capability of HBase.
*
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface RestTests {
}

View File

@ -0,0 +1,42 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to security.
*
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface SecurityTests {
}

View File

@ -17,7 +17,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase.testclassification;
/** /**
* Tag a test as 'small', meaning that the test class has the following * Tag a test as 'small', meaning that the test class has the following

View File

@ -0,0 +1,42 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as related to mapreduce and taking longer than 5 minutes to run on public build
* infrastructure.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface VerySlowMapReduceTests {
}

View File

@ -0,0 +1,42 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tag a test as region tests which takes longer than 5 minutes to run on public build
* infrastructure.
* @see org.apache.hadoop.hbase.testclassification.ClientTests
* @see org.apache.hadoop.hbase.testclassification.CoprocessorTests
* @see org.apache.hadoop.hbase.testclassification.FilterTests
* @see org.apache.hadoop.hbase.testclassification.FlakeyTests
* @see org.apache.hadoop.hbase.testclassification.IOTests
* @see org.apache.hadoop.hbase.testclassification.MapReduceTests
* @see org.apache.hadoop.hbase.testclassification.MasterTests
* @see org.apache.hadoop.hbase.testclassification.MiscTests
* @see org.apache.hadoop.hbase.testclassification.RegionServerTests
* @see org.apache.hadoop.hbase.testclassification.ReplicationTests
* @see org.apache.hadoop.hbase.testclassification.RPCTests
* @see org.apache.hadoop.hbase.testclassification.SecurityTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests
* @see org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests
*/
package org.apache.hadoop.hbase.testclassification;
public interface VerySlowRegionServerTests {
}

View File

@ -20,7 +20,8 @@ package org.apache.hadoop.hbase.types;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.Order;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
@ -28,7 +29,7 @@ import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestFixedLengthWrapper { public class TestFixedLengthWrapper {
static final byte[][] VALUES = new byte[][] { static final byte[][] VALUES = new byte[][] {

View File

@ -19,14 +19,15 @@ package org.apache.hadoop.hbase.types;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestOrderedBlob { public class TestOrderedBlob {
static final byte[][] VALUES = new byte[][] { static final byte[][] VALUES = new byte[][] {

View File

@ -19,14 +19,15 @@ package org.apache.hadoop.hbase.types;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestOrderedBlobVar { public class TestOrderedBlobVar {
static final byte[][] VALUES = new byte[][] { static final byte[][] VALUES = new byte[][] {

View File

@ -19,13 +19,14 @@ package org.apache.hadoop.hbase.types;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestOrderedString { public class TestOrderedString {
static final String[] VALUES = static final String[] VALUES =

View File

@ -20,7 +20,8 @@ package org.apache.hadoop.hbase.types;
import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.Order;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
@ -28,7 +29,7 @@ import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestRawString { public class TestRawString {
static final String[] VALUES = new String[] { static final String[] VALUES = new String[] {

View File

@ -25,7 +25,8 @@ import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Comparator; import java.util.Comparator;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.Order;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
@ -43,7 +44,7 @@ import org.junit.runners.Parameterized.Parameters;
* custom data type extension for an application POJO. * custom data type extension for an application POJO.
*/ */
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestStruct { public class TestStruct {
private Struct generic; private Struct generic;

View File

@ -24,13 +24,14 @@ import static org.junit.Assert.assertNull;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.util.Arrays; import java.util.Arrays;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestStructNullExtension { public class TestStructNullExtension {
/** /**

View File

@ -20,7 +20,8 @@ package org.apache.hadoop.hbase.types;
import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.Order;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
@ -28,7 +29,7 @@ import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestTerminatedWrapper { public class TestTerminatedWrapper {
static final String[] VALUES_STRINGS = new String[] { static final String[] VALUES_STRINGS = new String[] {

View File

@ -20,14 +20,15 @@ package org.apache.hadoop.hbase.types;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.Order;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestUnion2 { public class TestUnion2 {
/** /**

View File

@ -24,13 +24,14 @@ import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import junit.framework.TestCase; import junit.framework.TestCase;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
* Test order preservation characteristics of ordered Base64 dialect * Test order preservation characteristics of ordered Base64 dialect
*/ */
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestBase64 extends TestCase { public class TestBase64 extends TestCase {
// Note: uris is sorted. We need to prove that the ordered Base64 // Note: uris is sorted. We need to prove that the ordered Base64
// preserves that ordering // preserves that ordering

View File

@ -21,13 +21,14 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestByteRangeWithKVSerialization { public class TestByteRangeWithKVSerialization {
static void writeCell(PositionedByteRange pbr, KeyValue kv) throws Exception { static void writeCell(PositionedByteRange pbr, KeyValue kv) throws Exception {

View File

@ -29,12 +29,13 @@ import java.util.Random;
import junit.framework.TestCase; import junit.framework.TestCase;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Assert; import org.junit.Assert;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestBytes extends TestCase { public class TestBytes extends TestCase {
public void testNullHashCode() { public void testNullHashCode() {
byte [] b = null; byte [] b = null;

View File

@ -24,13 +24,14 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestConcatenatedLists { public class TestConcatenatedLists {
@Test @Test
public void testUnsupportedOps() { public void testUnsupportedOps() {

View File

@ -30,7 +30,8 @@ import java.io.FileOutputStream;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -38,7 +39,7 @@ import org.junit.experimental.categories.Category;
/** /**
* Test TestCoprocessorClassLoader. More tests are in TestClassLoading * Test TestCoprocessorClassLoader. More tests are in TestClassLoading
*/ */
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestCoprocessorClassLoader { public class TestCoprocessorClassLoader {
private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility(); private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();

View File

@ -18,12 +18,13 @@
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(MediumTests.class) @Category({MiscTests.class, MediumTests.class})
public class TestCounter { public class TestCounter {
private static final int[] THREAD_COUNTS = {1, 10, 100}; private static final int[] THREAD_COUNTS = {1, 10, 100};
private static final int DATA_COUNT = 1000000; private static final int DATA_COUNT = 1000000;

View File

@ -18,13 +18,14 @@
*/ */
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestDrainBarrier { public class TestDrainBarrier {
@Test @Test

View File

@ -27,14 +27,15 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
/** /**
* Test TestDynamicClassLoader * Test TestDynamicClassLoader
*/ */
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestDynamicClassLoader { public class TestDynamicClassLoader {
private static final Log LOG = LogFactory.getLog(TestDynamicClassLoader.class); private static final Log LOG = LogFactory.getLog(TestDynamicClassLoader.class);

View File

@ -18,7 +18,8 @@
*/ */
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -31,7 +32,7 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@Category(MediumTests.class) @Category({MiscTests.class, MediumTests.class})
public class TestEnvironmentEdgeManager { public class TestEnvironmentEdgeManager {
@Test @Test

View File

@ -18,14 +18,15 @@
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestKeyLocker { public class TestKeyLocker {
@Test @Test
public void testLocker(){ public void testLocker(){

View File

@ -23,13 +23,13 @@ import java.util.HashSet;
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator; import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.SmallTests; @Category({MiscTests.class, SmallTests.class})
@Category(SmallTests.class)
public class TestLoadTestKVGenerator { public class TestLoadTestKVGenerator {
private static final int MIN_LEN = 10; private static final int MIN_LEN = 10;

View File

@ -24,11 +24,12 @@ import static org.junit.Assert.assertArrayEquals;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestOrder { public class TestOrder {
byte[][] VALS = { Bytes.toBytes("foo"), Bytes.toBytes("bar"), Bytes.toBytes("baz") }; byte[][] VALS = { Bytes.toBytes("foo"), Bytes.toBytes("bar"), Bytes.toBytes("baz") };

View File

@ -25,11 +25,12 @@ import java.math.BigDecimal;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestOrderedBytes { public class TestOrderedBytes {
// integer constants for testing Numeric code paths // integer constants for testing Numeric code paths

View File

@ -20,7 +20,8 @@ package org.apache.hadoop.hbase.util;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -31,7 +32,7 @@ import java.util.Properties;
* This test is there to dump the properties. It allows to detect possible env issues when * This test is there to dump the properties. It allows to detect possible env issues when
* executing the tests on various environment. * executing the tests on various environment.
*/ */
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestShowProperties { public class TestShowProperties {
private static final Log LOG = LogFactory.getLog(TestShowProperties.class); private static final Log LOG = LogFactory.getLog(TestShowProperties.class);

View File

@ -17,12 +17,13 @@
*/ */
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestSimpleMutableByteRange { public class TestSimpleMutableByteRange {
@Test @Test

View File

@ -19,12 +19,13 @@ package org.apache.hadoop.hbase.util;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestSimplePositionedMutableByteRange { public class TestSimplePositionedMutableByteRange {
@Test @Test
public void testPosition() { public void testPosition() {

View File

@ -22,13 +22,14 @@ import static org.junit.Assert.assertTrue;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
@Category(SmallTests.class) @Category({MiscTests.class, SmallTests.class})
public class TestThreads { public class TestThreads {
private static final Log LOG = LogFactory.getLog(TestThreads.class); private static final Log LOG = LogFactory.getLog(TestThreads.class);

View File

@ -31,7 +31,8 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
@ -55,7 +56,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(MediumTests.class) @Category({CoprocessorTests.class, MediumTests.class})
public class TestBulkDeleteProtocol { public class TestBulkDeleteProtocol {
private static final byte[] FAMILY1 = Bytes.toBytes("cf1"); private static final byte[] FAMILY1 = Bytes.toBytes("cf1");
private static final byte[] FAMILY2 = Bytes.toBytes("cf2"); private static final byte[] FAMILY2 = Bytes.toBytes("cf2");

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
@ -29,6 +28,8 @@ import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos; import org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos;
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -42,7 +43,7 @@ import static junit.framework.Assert.*;
* Test case demonstrating client interactions with the {@link RowCountEndpoint} * Test case demonstrating client interactions with the {@link RowCountEndpoint}
* sample coprocessor Service implementation. * sample coprocessor Service implementation.
*/ */
@Category(MediumTests.class) @Category({CoprocessorTests.class, MediumTests.class})
public class TestRowCountEndpoint { public class TestRowCountEndpoint {
private static final byte[] TEST_TABLE = Bytes.toBytes("testrowcounter"); private static final byte[] TEST_TABLE = Bytes.toBytes("testrowcounter");
private static final byte[] TEST_FAMILY = Bytes.toBytes("f"); private static final byte[] TEST_FAMILY = Bytes.toBytes("f");

View File

@ -27,13 +27,14 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKUtil;
@ -41,7 +42,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.ZooKeeper;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category(MediumTests.class) @Category({CoprocessorTests.class, MediumTests.class})
public class TestZooKeeperScanPolicyObserver { public class TestZooKeeperScanPolicyObserver {
private static final Log LOG = LogFactory.getLog(TestZooKeeperScanPolicyObserver.class); private static final Log LOG = LogFactory.getLog(TestZooKeeperScanPolicyObserver.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();

View File

@ -18,7 +18,8 @@ package org.apache.hadoop.hbase.mapreduce;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@ -43,7 +44,7 @@ import java.io.PrintStream;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.mockito.Mockito.*; import static org.mockito.Mockito.*;
@Category(LargeTests.class) @Category({MapReduceTests.class, LargeTests.class})
public class TestMapReduceExamples { public class TestMapReduceExamples {
private static HBaseTestingUtility util = new HBaseTestingUtility(); private static HBaseTestingUtility util = new HBaseTestingUtility();

View File

@ -23,13 +23,17 @@ import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.protobuf.generated.CellProtos;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
import org.apache.hadoop.hbase.util.SimplePositionedByteRange; import org.apache.hadoop.hbase.util.SimplePositionedByteRange;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({SmallTests.class, MiscTests.class})
public class TestPBCell { public class TestPBCell {
private static final PBCell CODEC = new PBCell(); private static final PBCell CODEC = new PBCell();

View File

@ -19,9 +19,6 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.master.MetricsMasterSource;
import org.apache.hadoop.hbase.master.MetricsMasterSourceFactory;
import org.apache.hadoop.hbase.master.MetricsMasterSourceImpl;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.assertSame; import static org.junit.Assert.assertSame;

View File

@ -26,6 +26,7 @@ import java.util.Set;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.hadoop.hbase.util.LoadTestTool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;

View File

@ -23,6 +23,7 @@ import java.io.IOException;
import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.StoreEngine; import org.apache.hadoop.hbase.regionserver.StoreEngine;
import org.apache.hadoop.hbase.regionserver.StripeStoreEngine; import org.apache.hadoop.hbase.regionserver.StripeStoreEngine;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.hadoop.hbase.util.LoadTestTool;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;

View File

@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessController; import org.apache.hadoop.hbase.security.access.AccessController;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.hadoop.hbase.util.LoadTestTool;
import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL; import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileWriterV3;
import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader; import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter; import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;

View File

@ -22,6 +22,7 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.LoadTestDataGeneratorWithTags; import org.apache.hadoop.hbase.util.LoadTestDataGeneratorWithTags;
import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.hadoop.hbase.util.LoadTestTool;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.visibility.LoadTestDataGeneratorWithVisibilityLabels; import org.apache.hadoop.hbase.security.visibility.LoadTestDataGeneratorWithVisibilityLabels;
import org.apache.hadoop.hbase.security.visibility.VisibilityClient; import org.apache.hadoop.hbase.security.visibility.VisibilityClient;
import org.apache.hadoop.hbase.security.visibility.VisibilityController; import org.apache.hadoop.hbase.security.visibility.VisibilityController;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.hadoop.hbase.util.LoadTestTool;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;

View File

@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.MultiThreadedWriter; import org.apache.hadoop.hbase.util.MultiThreadedWriter;

View File

@ -25,7 +25,7 @@ import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.RegionSplitter; import org.apache.hadoop.hbase.util.RegionSplitter;
import org.apache.hadoop.hbase.util.RegionSplitter.SplitAlgorithm; import org.apache.hadoop.hbase.util.RegionSplitter.SplitAlgorithm;
import org.junit.After; import org.junit.After;

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.chaos.policies.Policy;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.ipc.RpcClient; import org.apache.hadoop.hbase.ipc.RpcClient;
import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy; import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;

View File

@ -25,6 +25,7 @@ import java.util.regex.Pattern;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.junit.internal.TextListener; import org.junit.internal.TextListener;

View File

@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.IntegrationTestBase; import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;

View File

@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;

View File

@ -21,13 +21,11 @@ package org.apache.hadoop.hbase.mapreduce;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue; import static org.junit.Assume.assumeTrue;
import java.io.IOException;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;

View File

@ -27,7 +27,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.IntegrationTestBase; import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;

View File

@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.InvalidFamilyOperationException; import org.apache.hadoop.hbase.InvalidFamilyOperationException;
import org.apache.hadoop.hbase.NamespaceExistException; import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.NamespaceNotFoundException;

View File

@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.IntegrationTestBase; import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;

View File

@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory; import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;

View File

@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.IntegrationTestBase; import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;

View File

@ -23,7 +23,7 @@ import java.util.List;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.hadoop.hbase.util.LoadTestTool;
import org.apache.hadoop.hbase.util.MultiThreadedReader; import org.apache.hadoop.hbase.util.MultiThreadedReader;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;

Some files were not shown because too many files have changed in this diff Show More