HBASE-4737 Addendum tags SmallTests
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1200012 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
e896f6d400
commit
f5944d1a72
|
@ -21,10 +21,12 @@ package org.apache.hadoop.hbase;
|
|||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test comparing HBase objects.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestCompare extends TestCase {
|
||||
|
||||
/**
|
||||
|
@ -52,4 +54,4 @@ public class TestCompare extends TestCase {
|
|||
b = new HRegionInfo(t.getName(), Bytes.toBytes("aaaa"), Bytes.toBytes("eeee"));
|
||||
assertTrue(a.compareTo(b) < 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,9 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||
import org.junit.*;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestFSTableDescriptorForceCreation {
|
||||
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
|
||||
|
||||
|
@ -69,4 +71,4 @@ public class TestFSTableDescriptorForceCreation {
|
|||
assertTrue("Should create new table descriptor",
|
||||
FSTableDescriptors.createTableDescriptor(fs, rootdir, htd, true));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,9 @@ import static org.junit.Assert.assertNotSame;
|
|||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestHRegionLocation {
|
||||
/**
|
||||
* HRegionLocations are equal if they have the same 'location' -- i.e. host and
|
||||
|
@ -76,4 +78,4 @@ public class TestHRegionLocation {
|
|||
int compare2 = hsl2.compareTo(hsl1);
|
||||
assertTrue((compare1 > 0)? compare2 < 0: compare2 > 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,10 +26,12 @@ import java.net.InetSocketAddress;
|
|||
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Tests for {@link HServerAddress}
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestHServerAddress {
|
||||
@Test
|
||||
public void testHashCode() {
|
||||
|
@ -80,4 +82,4 @@ public class TestHServerAddress {
|
|||
(HServerAddress)Writables.getWritable(bytes, new HServerAddress());
|
||||
assertNotSame(hsa1, deserialized);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,9 @@ import java.io.IOException;
|
|||
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestHServerInfo {
|
||||
|
||||
@Test
|
||||
|
|
|
@ -31,7 +31,9 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator;
|
|||
import org.apache.hadoop.hbase.KeyValue.MetaComparator;
|
||||
import org.apache.hadoop.hbase.KeyValue.Type;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestKeyValue extends TestCase {
|
||||
private final Log LOG = LogFactory.getLog(this.getClass().getName());
|
||||
|
||||
|
|
|
@ -47,10 +47,12 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.apache.hadoop.io.DataInputBuffer;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test HBase Writables serializations
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestSerialization {
|
||||
|
||||
@Test public void testCompareFilter() throws Exception {
|
||||
|
@ -84,6 +86,7 @@ public class TestSerialization {
|
|||
assertTrue(Bytes.equals("value".getBytes(), hmw.get("key".getBytes())));
|
||||
}
|
||||
|
||||
|
||||
@Test public void testTableDescriptor() throws Exception {
|
||||
final String name = "testTableDescriptor";
|
||||
HTableDescriptor htd = createTableDescriptor(name);
|
||||
|
|
|
@ -18,12 +18,15 @@
|
|||
package org.apache.hadoop.hbase.avro;
|
||||
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.avro.generated.AResult;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestAvroUtil {
|
||||
|
||||
|
||||
|
|
|
@ -18,14 +18,17 @@
|
|||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
|
||||
import org.apache.hadoop.hbase.client.HConnectionManager.HConnectionImplementation;
|
||||
import org.apache.hadoop.hbase.client.HConnectionManager.HConnectionKey;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
/**
|
||||
* {@link HConnection} testing utility.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class HConnectionTestingUtility {
|
||||
/*
|
||||
* Not part of {@link HBaseTestingUtility} because this class is not
|
||||
|
|
|
@ -28,10 +28,13 @@ import java.io.DataOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestAttributes {
|
||||
@Test
|
||||
public void testAttributesSerialization() throws IOException {
|
||||
|
|
|
@ -28,11 +28,14 @@ import java.io.DataOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
// TODO: cover more test cases
|
||||
@Category(SmallTests.class)
|
||||
public class TestGet {
|
||||
@Test
|
||||
public void testAttributesSerialization() throws IOException {
|
||||
|
|
|
@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -31,11 +33,13 @@ import java.util.Map;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Run tests that use the funtionality of the Operation superclass for
|
||||
* Run tests that use the functionality of the Operation superclass for
|
||||
* Puts, Gets, Deletes, Scans, and MultiPuts.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestOperation {
|
||||
private static byte [] ROW = Bytes.toBytes("testRow");
|
||||
private static byte [] FAMILY = Bytes.toBytes("testFamily");
|
||||
|
|
|
@ -22,7 +22,9 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.apache.hadoop.hbase.HBaseTestCase.assertByteEquals;
|
||||
|
||||
|
@ -31,6 +33,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestResult extends TestCase {
|
||||
|
||||
static KeyValue[] genKVs(final byte[] row, final byte[] family,
|
||||
|
|
|
@ -28,11 +28,14 @@ import java.io.DataOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
// TODO: cover more test cases
|
||||
@Category(SmallTests.class)
|
||||
public class TestScan {
|
||||
@Test
|
||||
public void testAttributesSerialization() throws IOException {
|
||||
|
|
|
@ -28,13 +28,7 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.CoprocessorEnvironment;
|
||||
import org.apache.hadoop.hbase.Coprocessor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
|
||||
|
@ -45,11 +39,12 @@ import org.apache.hadoop.hbase.regionserver.Store;
|
|||
import org.apache.hadoop.hbase.regionserver.StoreFile;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.PairOfSameType;
|
||||
import org.apache.hadoop.hbase.Server;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestCoprocessorInterface extends HBaseTestCase {
|
||||
static final Log LOG = LogFactory.getLog(TestCoprocessorInterface.class);
|
||||
static final String DIR = "test/build/data/TestCoprocessorInterface/";
|
||||
|
|
|
@ -26,12 +26,7 @@ import java.util.Map;
|
|||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.Coprocessor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
|
||||
|
@ -39,7 +34,9 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestRegionObserverStacking extends TestCase {
|
||||
static final String DIR = "test/build/data/TestRegionObserverStacking/";
|
||||
|
||||
|
|
|
@ -31,15 +31,17 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.Server;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.executor.EventHandler.EventType;
|
||||
import org.apache.hadoop.hbase.executor.ExecutorService.Executor;
|
||||
import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorStatus;
|
||||
import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorType;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestExecutorService {
|
||||
private static final Log LOG = LogFactory.getLog(TestExecutorService.class);
|
||||
|
||||
|
@ -172,4 +174,4 @@ public class TestExecutorService {
|
|||
counter.incrementAndGet();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,10 +17,13 @@
|
|||
package org.apache.hadoop.hbase.filter;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Tests for the bit comparator
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestBitComparator extends TestCase {
|
||||
|
||||
private static byte[] zeros = new byte[]{0, 0, 0, 0, 0, 0};
|
||||
|
|
|
@ -25,14 +25,17 @@ import java.io.DataInputStream;
|
|||
import java.io.DataOutputStream;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test for the ColumnPaginationFilter, used mainly to test the successful serialization of the filter.
|
||||
* More test functionality can be found within {@link org.apache.hadoop.hbase.filter.TestFilter#testColumnPaginationFilter()}
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestColumnPaginationFilter extends TestCase
|
||||
{
|
||||
private static final byte[] ROW = Bytes.toBytes("row_1_test");
|
||||
|
|
|
@ -27,19 +27,16 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueTestUtil;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestColumnPrefixFilter {
|
||||
|
||||
private final static HBaseTestingUtility TEST_UTIL = new
|
||||
|
|
|
@ -27,11 +27,7 @@ import java.util.List;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||
|
@ -41,7 +37,9 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestDependentColumnFilter extends TestCase {
|
||||
private final Log LOG = LogFactory.getLog(this.getClass());
|
||||
private static final byte[][] ROWS = {
|
||||
|
|
|
@ -29,12 +29,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import junit.framework.Assert;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
|
@ -43,10 +38,12 @@ import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
|||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test filters at the HRegion doorstep.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestFilter extends HBaseTestCase {
|
||||
private final Log LOG = LogFactory.getLog(this.getClass());
|
||||
private HRegion region;
|
||||
|
|
|
@ -33,13 +33,16 @@ import java.util.List;
|
|||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.filter.FilterList.Operator;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Tests filter sets
|
||||
*
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestFilterList extends TestCase {
|
||||
static final int MAX_PAGES = 2;
|
||||
static final char FIRST_CHAR = 'a';
|
||||
|
|
|
@ -24,13 +24,16 @@ import java.io.ByteArrayOutputStream;
|
|||
import java.io.DataInputStream;
|
||||
import java.io.DataOutputStream;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Tests the inclusive stop row filter
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestInclusiveStopFilter extends TestCase {
|
||||
private final byte [] STOP_ROW = Bytes.toBytes("stop_row");
|
||||
private final byte [] GOOD_ROW = Bytes.toBytes("good_row");
|
||||
|
@ -86,4 +89,4 @@ public class TestInclusiveStopFilter extends TestCase {
|
|||
|
||||
assertFalse("Filter a null", filter.filterRowKey(null, 0, 0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,19 +27,16 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueTestUtil;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.InternalScanner;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestMultipleColumnPrefixFilter {
|
||||
|
||||
private final static HBaseTestingUtility TEST_UTIL = new
|
||||
|
|
|
@ -25,10 +25,13 @@ import java.io.DataInputStream;
|
|||
import java.io.DataOutputStream;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Tests for the page filter
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestPageFilter extends TestCase {
|
||||
static final int ROW_LIMIT = 3;
|
||||
|
||||
|
|
|
@ -31,12 +31,7 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueTestUtil;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
|
@ -45,12 +40,14 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* This class tests ParseFilter.java
|
||||
* It tests the entire work flow from when a string is given by the user
|
||||
* and how it is parsed to construct the corresponding Filter object
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestParseFilter {
|
||||
|
||||
ParseFilter f;
|
||||
|
|
|
@ -22,7 +22,9 @@ package org.apache.hadoop.hbase.filter;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
|
@ -30,6 +32,7 @@ import java.io.DataInputStream;
|
|||
import java.io.DataOutputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestPrefixFilter extends TestCase {
|
||||
Filter mainFilter;
|
||||
static final char FIRST_CHAR = 'a';
|
||||
|
@ -97,4 +100,4 @@ public class TestPrefixFilter extends TestCase {
|
|||
private byte [] createRow(final char c) {
|
||||
return Bytes.toBytes(HOST_PREFIX + Character.toString(c));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,8 +27,11 @@ import java.io.DataOutputStream;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestRandomRowFilter extends TestCase {
|
||||
protected RandomRowFilter quarterChanceFilter;
|
||||
|
||||
|
@ -88,4 +91,4 @@ public class TestRandomRowFilter extends TestCase {
|
|||
|
||||
return newFilter;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,8 +22,10 @@ package org.apache.hadoop.hbase.filter;
|
|||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Tests for {@link SingleColumnValueExcludeFilter}. Because this filter
|
||||
|
@ -33,6 +35,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
* @author ferdy
|
||||
*
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestSingleColumnValueExcludeFilter extends TestCase {
|
||||
private static final byte[] ROW = Bytes.toBytes("test");
|
||||
private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test");
|
||||
|
|
|
@ -25,14 +25,17 @@ import java.io.DataInputStream;
|
|||
import java.io.DataOutputStream;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Tests the value filter
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestSingleColumnValueFilter extends TestCase {
|
||||
private static final byte[] ROW = Bytes.toBytes("test");
|
||||
private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test");
|
||||
|
|
|
@ -29,15 +29,15 @@ import java.util.List;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestHalfStoreFileReader {
|
||||
|
||||
/**
|
||||
|
|
|
@ -26,8 +26,7 @@ import java.util.List;
|
|||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.filter.FilterBase;
|
||||
import org.apache.hadoop.hbase.filter.FilterList;
|
||||
|
@ -37,7 +36,9 @@ import org.apache.hadoop.io.Text;
|
|||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.WritableComparator;
|
||||
import org.junit.Assert;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestHbaseObjectWritable extends TestCase {
|
||||
|
||||
@Override
|
||||
|
|
|
@ -38,6 +38,7 @@ import junit.framework.TestCase;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.io.hfile.CachedBlock;
|
||||
import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
|
||||
|
@ -46,11 +47,13 @@ import org.apache.hadoop.hbase.regionserver.MemStore;
|
|||
import org.apache.hadoop.hbase.regionserver.Store;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Testing the sizing that HeapSize offers and compares to the size given by
|
||||
* ClassSize.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestHeapSize extends TestCase {
|
||||
static final Log LOG = LogFactory.getLog(TestHeapSize.class);
|
||||
// List of classes implementing HeapSize
|
||||
|
|
|
@ -22,11 +22,15 @@ package org.apache.hadoop.hbase.io;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestImmutableBytesWritable extends TestCase {
|
||||
public void testHash() throws Exception {
|
||||
assertEquals(
|
||||
|
|
|
@ -26,12 +26,15 @@ import static org.junit.Assert.assertTrue;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Tests the BlockCacheColumnFamilySummary class
|
||||
*
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestBlockCacheColumnFamilySummary {
|
||||
|
||||
|
||||
|
|
|
@ -24,7 +24,10 @@ import java.nio.ByteBuffer;
|
|||
import java.util.LinkedList;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestCachedBlockQueue extends TestCase {
|
||||
|
||||
public void testQueue() throws Exception {
|
||||
|
@ -154,4 +157,4 @@ public class TestCachedBlockQueue extends TestCase {
|
|||
accessTime,false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
@Category(MediumTests.class)
|
||||
public class TestFixedFileTrailer {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(TestFixedFileTrailer.class);
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.junit.experimental.categories.Category;
|
|||
* Remove after tfile is committed and use the tfile version of this class
|
||||
* instead.</p>
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestHFile extends HBaseTestCase {
|
||||
static final Log LOG = LogFactory.getLog(TestHFile.class);
|
||||
|
||||
|
|
|
@ -55,6 +55,7 @@ import org.junit.runners.Parameterized.Parameters;
|
|||
import static org.junit.Assert.*;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
@Category(MediumTests.class)
|
||||
public class TestHFileBlockIndex {
|
||||
|
||||
@Parameters
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.junit.experimental.categories.Category;
|
|||
* Remove after tfile is committed and use the tfile version of this class
|
||||
* instead.</p>
|
||||
*/
|
||||
@Category(MediumTests.class)
|
||||
public class TestHFilePerformance extends TestCase {
|
||||
private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
||||
private static String ROOT_DIR =
|
||||
|
|
|
@ -27,13 +27,16 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestHFileReaderV1 {
|
||||
|
||||
private static final HBaseTestingUtility TEST_UTIL =
|
||||
|
|
|
@ -39,9 +39,11 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.RawLocalFileSystem;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.MediumTests;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mortbay.log.Log;
|
||||
|
||||
/**
|
||||
|
@ -52,6 +54,7 @@ import org.mortbay.log.Log;
|
|||
* Remove after tfile is committed and use the tfile version of this class
|
||||
* instead.</p>
|
||||
*/
|
||||
@Category(MediumTests.class)
|
||||
public class TestHFileSeek extends TestCase {
|
||||
private static final boolean USE_PREAD = true;
|
||||
private MyOptions options;
|
||||
|
|
|
@ -34,17 +34,18 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Testing writing a version 2 {@link HFile}. This is a low-level test written
|
||||
* during the development of {@link HFileWriterV2}.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestHFileWriterV2 {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(TestHFileWriterV2.class);
|
||||
|
|
|
@ -25,14 +25,17 @@ import java.util.List;
|
|||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
* Test {@link HFileScanner#reseekTo(byte[])}
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestReseekTo {
|
||||
|
||||
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
||||
|
@ -86,4 +89,4 @@ public class TestReseekTo {
|
|||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,13 +23,14 @@ import java.io.IOException;
|
|||
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test {@link HFileScanner#seekTo(byte[])} and its variants.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestSeekTo extends HBaseTestCase {
|
||||
|
||||
static KeyValue toKV(String row) {
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.junit.experimental.categories.Category;
|
|||
* Tests will ensure that evictions operate when they're supposed to and do what
|
||||
* they should, and that cached blocks are accessible when expected to be.
|
||||
*/
|
||||
// Starts 100 threads, high variability of execution time => Medium
|
||||
@Category(MediumTests.class)
|
||||
public class TestSingleSizeCache {
|
||||
SingleSizeCache cache;
|
||||
|
|
|
@ -22,9 +22,13 @@ package org.apache.hadoop.hbase.io.hfile.slab;
|
|||
|
||||
import static org.junit.Assert.*;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.*;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**Test cases for Slab.java*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestSlab {
|
||||
static final int BLOCKSIZE = 1000;
|
||||
static final int NUMBLOCKS = 100;
|
||||
|
|
|
@ -23,12 +23,14 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.MediumTests;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheTestUtils;
|
||||
import org.apache.hadoop.hbase.io.hfile.slab.SlabCache.SlabStats;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Basic test of SlabCache. Puts and gets.
|
||||
|
@ -40,6 +42,8 @@ import org.junit.Ignore;
|
|||
* choose to evict at any time.
|
||||
*
|
||||
*/
|
||||
// Starts 50 threads, high variability of execution time => Medium
|
||||
@Category(MediumTests.class)
|
||||
public class TestSlabCache {
|
||||
static final int CACHE_SIZE = 1000000;
|
||||
static final int NUM_BLOCKS = 101;
|
||||
|
|
|
@ -32,17 +32,20 @@ import java.util.List;
|
|||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.ipc.VersionedProtocol;
|
||||
import org.apache.log4j.AppenderSkeleton;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.log4j.spi.LoggingEvent;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test that delayed RPCs work. Fire up three calls, the first of which should
|
||||
* be delayed. Check that the last two, which are undelayed, return before the
|
||||
* first one.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestDelayedRpc {
|
||||
public static RpcServer rpcServer;
|
||||
|
||||
|
|
|
@ -19,14 +19,15 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.mapreduce;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test of simple partitioner.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestSimpleTotalOrderPartitioner extends HBaseTestCase {
|
||||
public void testSplit() throws Exception {
|
||||
String start = "a";
|
||||
|
|
|
@ -17,13 +17,16 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.mapreduce;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import java.util.HashSet;
|
||||
|
||||
import static junit.framework.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestTableSplit {
|
||||
@Test
|
||||
public void testHashCode() {
|
||||
|
|
|
@ -35,17 +35,7 @@ import java.util.TreeMap;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
|
||||
import org.apache.hadoop.hbase.Server;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.TableDescriptors;
|
||||
import org.apache.hadoop.hbase.TableExistsException;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.catalog.CatalogTracker;
|
||||
import org.apache.hadoop.hbase.client.HConnection;
|
||||
import org.apache.hadoop.hbase.client.HConnectionManager;
|
||||
|
@ -59,8 +49,10 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestCatalogJanitor {
|
||||
/**
|
||||
* Pseudo server for below tests.
|
||||
|
|
|
@ -26,15 +26,13 @@ import junit.framework.Assert;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.ClockOutOfSyncException;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.Server;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.catalog.CatalogTracker;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
@Category(MediumTests.class)
|
||||
public class TestClockSkewDetection {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestClockSkewDetection.class);
|
||||
|
@ -97,4 +95,4 @@ public class TestClockSkewDetection {
|
|||
LOG.info("Recieved expected exception: "+e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.master;
|
|||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.hbase.MediumTests;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
@Category(MediumTests.class)
|
||||
public class TestDeadServer {
|
||||
@Test public void testIsDead() {
|
||||
DeadServer ds = new DeadServer();
|
||||
|
@ -56,4 +58,4 @@ public class TestDeadServer {
|
|||
assertFalse(ds.isDeadServer(deadServer));
|
||||
assertFalse(ds.cleanPreviousInstance(deadServerHostComingAlive));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,17 +37,17 @@ import java.util.TreeSet;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
/**
|
||||
* Test the load balancer that is created by default.
|
||||
*/
|
||||
@Category(MediumTests.class)
|
||||
public class TestDefaultLoadBalancer {
|
||||
private static final Log LOG = LogFactory.getLog(TestDefaultLoadBalancer.class);
|
||||
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.apache.hbase.tmpl.master.AssignmentManagerStatusTmpl;
|
|||
import org.apache.hbase.tmpl.master.MasterStatusTmpl;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -51,6 +52,7 @@ import com.google.common.collect.Maps;
|
|||
/**
|
||||
* Tests for the master status page and its template.
|
||||
*/
|
||||
@Category(MediumTests.class)
|
||||
public class TestMasterStatusServlet {
|
||||
|
||||
private HMaster master;
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.util.Map;
|
|||
import javax.management.MBeanAttributeInfo;
|
||||
import javax.management.MBeanInfo;
|
||||
|
||||
import org.apache.hadoop.hbase.MediumTests;
|
||||
import org.apache.hadoop.metrics.MetricsContext;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.hadoop.metrics.MetricsUtil;
|
||||
|
@ -35,7 +36,9 @@ import org.apache.hadoop.metrics.util.MetricsRegistry;
|
|||
import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(MediumTests.class)
|
||||
public class TestMetricsMBeanBase extends TestCase {
|
||||
|
||||
private class TestStatistics extends MetricsMBeanBase {
|
||||
|
|
|
@ -22,18 +22,17 @@ import java.io.IOException;
|
|||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.catalog.MetaMigrationRemovingHTD;
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Migration tests that do not need spin up of a cluster.
|
||||
* @deprecated Remove after we release 0.92
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestMigrationFrom090To092 {
|
||||
@Test
|
||||
public void testMigrateHRegionInfoFromVersion0toVersion1()
|
||||
|
@ -55,4 +54,4 @@ public class TestMigrationFrom090To092 {
|
|||
htd.addFamily(new HColumnDescriptor("family"));
|
||||
return htd;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,13 +24,16 @@ import static org.junit.Assert.*;
|
|||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
|
||||
import org.apache.hadoop.hbase.MediumTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test case for the MemoryBoundedLogMessageBuffer utility.
|
||||
* Ensures that it uses no more memory than it's supposed to,
|
||||
* and that it properly deals with multibyte encodings.
|
||||
*/
|
||||
@Category(MediumTests.class)
|
||||
public class TestMemoryBoundedLogMessageBuffer {
|
||||
|
||||
private static final long TEN_KB = 10 * 1024;
|
||||
|
|
|
@ -23,8 +23,11 @@ import static org.junit.Assert.*;
|
|||
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.apache.hadoop.hbase.MediumTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(MediumTests.class)
|
||||
public class TestTaskMonitor {
|
||||
|
||||
@Test
|
||||
|
|
|
@ -24,25 +24,21 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Append;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
/**
|
||||
* Testing of HRegion.incrementColumnValue, HRegion.increment,
|
||||
* and HRegion.append
|
||||
*/
|
||||
@Category(MediumTests.class) // Starts 100 threads
|
||||
public class TestAtomicOperation extends HBaseTestCase {
|
||||
static final Log LOG = LogFactory.getLog(TestAtomicOperation.class);
|
||||
|
||||
|
|
|
@ -32,17 +32,14 @@ import java.util.Set;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueTestUtil;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestColumnSeeking {
|
||||
|
||||
private final static HBaseTestingUtility TEST_UTIL =
|
||||
|
|
|
@ -31,17 +31,15 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.HLog;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestCompactSelection extends TestCase {
|
||||
private final static Log LOG = LogFactory.getLog(TestCompactSelection.class);
|
||||
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
|
@ -54,6 +55,7 @@ import org.mockito.stubbing.Answer;
|
|||
/**
|
||||
* Test compactions
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestCompaction extends HBaseTestCase {
|
||||
static final Log LOG = LogFactory.getLog(TestCompaction.class.getName());
|
||||
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
|
||||
|
|
|
@ -37,8 +37,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.io.hfile.BlockCache;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
|
@ -53,11 +52,13 @@ import org.apache.hadoop.hbase.util.CompoundBloomFilterBase;
|
|||
import org.apache.hadoop.hbase.util.CompoundBloomFilterWriter;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Tests writing Bloom filter blocks in the same part of the file as data
|
||||
* blocks.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestCompoundBloomFilter {
|
||||
|
||||
private static final HBaseTestingUtility TEST_UTIL =
|
||||
|
|
|
@ -26,12 +26,13 @@ import java.util.List;
|
|||
import java.util.TreeSet;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestExplicitColumnTracker extends HBaseTestCase {
|
||||
private boolean PRINT = false;
|
||||
|
||||
|
|
|
@ -27,11 +27,7 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
|
@ -39,11 +35,13 @@ import org.apache.hadoop.hbase.client.Scan;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* {@link TestGet} is a medley of tests of get all done up as a single test.
|
||||
* This class
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestGetClosestAtOrBefore extends HBaseTestCase {
|
||||
private static final Log LOG = LogFactory.getLog(TestGetClosestAtOrBefore.class);
|
||||
|
||||
|
|
|
@ -27,16 +27,15 @@ import static org.junit.Assert.fail;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestHRegionInfo {
|
||||
@Test
|
||||
public void testCreateHRegionInfoName() throws Exception {
|
||||
|
|
|
@ -21,18 +21,16 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestKeepDeletes extends HBaseTestCase {
|
||||
private final byte[] T0 = Bytes.toBytes("0");
|
||||
private final byte[] T1 = Bytes.toBytes("1");
|
||||
|
|
|
@ -26,12 +26,12 @@ import java.util.Collections;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.CollectionBackedScanner;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestKeyValueHeap extends HBaseTestCase {
|
||||
private static final boolean PRINT = false;
|
||||
|
||||
|
|
|
@ -23,10 +23,11 @@ package org.apache.hadoop.hbase.regionserver;
|
|||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueTestUtil;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestKeyValueScanFixture extends TestCase {
|
||||
|
||||
|
||||
|
|
|
@ -23,10 +23,13 @@ import java.util.Iterator;
|
|||
import java.util.SortedSet;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestKeyValueSkipListSet extends TestCase {
|
||||
private final KeyValueSkipListSet kvsls =
|
||||
new KeyValueSkipListSet(KeyValue.COMPARATOR);
|
||||
|
@ -144,4 +147,4 @@ public class TestKeyValueSkipListSet extends TestCase {
|
|||
head = this.kvsls.headSet(splitter);
|
||||
assertTrue(Bytes.equals(head.first().getValue(), value2));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,10 +33,7 @@ import junit.framework.TestCase;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueTestUtil;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.regionserver.Store.ScanInfo;
|
||||
import org.apache.hadoop.hbase.regionserver.StoreScanner.ScanType;
|
||||
|
@ -45,8 +42,10 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/** memstore test case */
|
||||
@Category(SmallTests.class)
|
||||
public class TestMemStore extends TestCase {
|
||||
private final Log LOG = LogFactory.getLog(this.getClass());
|
||||
private MemStore memstore;
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.MultithreadedTestUtil;
|
||||
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.regionserver.MemStoreLAB.Allocation;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -36,7 +37,9 @@ import com.google.common.collect.Iterables;
|
|||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.primitives.Ints;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestMemStoreLAB {
|
||||
|
||||
/**
|
||||
|
|
|
@ -22,19 +22,19 @@ package org.apache.hadoop.hbase.regionserver;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.filter.TimestampsFilter;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test Minimum Versions feature (HBASE-4071).
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestMinVersions extends HBaseTestCase {
|
||||
private final byte[] T0 = Bytes.toBytes("0");
|
||||
private final byte[] T1 = Bytes.toBytes("1");
|
||||
|
|
|
@ -35,12 +35,7 @@ import java.util.TreeSet;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueTestUtil;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
|
@ -48,6 +43,7 @@ import org.apache.hadoop.hbase.io.hfile.Compression;
|
|||
import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.junit.runners.Parameterized.Parameters;
|
||||
|
@ -56,6 +52,7 @@ import org.junit.runners.Parameterized.Parameters;
|
|||
* Tests optimized scanning of multiple columns.
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
@Category(MediumTests.class)
|
||||
public class TestMultiColumnScanner {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(TestMultiColumnScanner.class);
|
||||
|
|
|
@ -30,24 +30,12 @@ import java.util.Random;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.MultithreadedTestUtil;
|
||||
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
|
||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
|
@ -75,12 +63,14 @@ import org.apache.hadoop.hbase.util.Threads;
|
|||
import org.junit.Test;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
/**
|
||||
* Testing of multiPut in parallel.
|
||||
*
|
||||
*/
|
||||
@Category(MediumTests.class)
|
||||
public class TestParallelPut extends HBaseTestCase {
|
||||
static final Log LOG = LogFactory.getLog(TestParallelPut.class);
|
||||
|
||||
|
|
|
@ -24,16 +24,16 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.KeyValue.KVComparator;
|
||||
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestQueryMatcher extends HBaseTestCase {
|
||||
private static final boolean PRINT = false;
|
||||
|
||||
|
|
|
@ -25,17 +25,14 @@ import java.io.IOException;
|
|||
import java.io.StringWriter;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.regionserver.metrics.RegionServerMetrics;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
||||
import org.apache.hbase.tmpl.regionserver.RSStatusTmpl;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -43,6 +40,7 @@ import com.google.common.collect.Lists;
|
|||
/**
|
||||
* Tests for the region server status page and its template.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestRSStatusServlet {
|
||||
private HRegionServer rs;
|
||||
|
||||
|
|
|
@ -20,11 +20,14 @@
|
|||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestReadWriteConsistencyControl extends TestCase {
|
||||
static class Writer implements Runnable {
|
||||
final AtomicBoolean finished;
|
||||
|
|
|
@ -23,14 +23,14 @@ import java.io.IOException;
|
|||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestRegionSplitPolicy {
|
||||
|
||||
private Configuration conf;
|
||||
|
|
|
@ -27,16 +27,14 @@ import java.io.IOException;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Increment;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestResettingCounters {
|
||||
|
||||
@Test
|
||||
|
|
|
@ -20,13 +20,13 @@
|
|||
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestScanDeleteTracker extends HBaseTestCase {
|
||||
|
||||
private ScanDeleteTracker sdt;
|
||||
|
|
|
@ -24,11 +24,12 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestScanWildcardColumnTracker extends HBaseTestCase {
|
||||
|
||||
final static int VERSIONS = 2;
|
||||
|
|
|
@ -27,13 +27,7 @@ import java.util.List;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.UnknownScannerException;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
|
@ -46,10 +40,12 @@ import org.apache.hadoop.hbase.io.hfile.Compression;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
/**
|
||||
* Test of a long-lived scanner validating as we go.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestScanner extends HBaseTestCase {
|
||||
private final Log LOG = LogFactory.getLog(this.getClass());
|
||||
|
||||
|
|
|
@ -31,13 +31,7 @@ import java.util.List;
|
|||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.Server;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.HLog;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
@ -46,14 +40,16 @@ import org.apache.zookeeper.KeeperException;
|
|||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
/**
|
||||
* Test the {@link SplitTransaction} class against an HRegion (as opposed to
|
||||
* running cluster).
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestSplitTransaction {
|
||||
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
||||
private final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
||||
private final Path testdir =
|
||||
TEST_UTIL.getDataTestDir(this.getClass().getName());
|
||||
private HRegion parent;
|
||||
|
@ -64,15 +60,22 @@ public class TestSplitTransaction {
|
|||
private static final byte [] ENDROW = new byte [] {'{', '{', '{'};
|
||||
private static final byte [] GOOD_SPLIT_ROW = new byte [] {'d', 'd', 'd'};
|
||||
private static final byte [] CF = HConstants.CATALOG_FAMILY;
|
||||
|
||||
static {
|
||||
System.out.println("AAAA static");
|
||||
}
|
||||
|
||||
@Before public void setup() throws IOException {
|
||||
System.out.println("AAAA setup");
|
||||
this.fs = FileSystem.get(TEST_UTIL.getConfiguration());
|
||||
this.fs.delete(this.testdir, true);
|
||||
this.wal = new HLog(fs, new Path(this.testdir, "logs"),
|
||||
new Path(this.testdir, "archive"),
|
||||
TEST_UTIL.getConfiguration());
|
||||
System.out.println("AAAA setup createRegion");
|
||||
this.parent = createRegion(this.testdir, this.wal);
|
||||
TEST_UTIL.getConfiguration().setBoolean("hbase.testing.nocluster", true);
|
||||
System.out.println("AAAA setup ends");
|
||||
}
|
||||
|
||||
@After public void teardown() throws IOException {
|
||||
|
@ -86,6 +89,7 @@ public class TestSplitTransaction {
|
|||
}
|
||||
|
||||
@Test public void testFailAfterPONR() throws IOException, KeeperException {
|
||||
System.out.println("AAAA testFailAfterPONR");
|
||||
final int rowcount = TEST_UTIL.loadRegion(this.parent, CF);
|
||||
assertTrue(rowcount > 0);
|
||||
int parentRowCount = countRows(this.parent);
|
||||
|
@ -289,7 +293,7 @@ public class TestSplitTransaction {
|
|||
return rowcount;
|
||||
}
|
||||
|
||||
static HRegion createRegion(final Path testdir, final HLog wal)
|
||||
HRegion createRegion(final Path testdir, final HLog wal)
|
||||
throws IOException {
|
||||
// Make a region with start and end keys. Use 'aaa', to 'AAA'. The load
|
||||
// region utility will add rows between 'aaa' and 'zzz'.
|
||||
|
@ -301,4 +305,4 @@ public class TestSplitTransaction {
|
|||
return HRegion.openHRegion(testdir, hri, htd, wal,
|
||||
TEST_UTIL.getConfiguration());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,10 +33,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.io.Reference.Range;
|
||||
import org.apache.hadoop.hbase.io.hfile.BlockCache;
|
||||
|
@ -47,6 +44,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
|||
import org.apache.hadoop.hbase.util.BloomFilterFactory;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
|
@ -56,6 +54,7 @@ import com.google.common.collect.Lists;
|
|||
/**
|
||||
* Test HStoreFile
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestStoreFile extends HBaseTestCase {
|
||||
static final Log LOG = LogFactory.getLog(TestStoreFile.class);
|
||||
private CacheConfig cacheConf = new CacheConfig(conf);
|
||||
|
|
|
@ -21,12 +21,12 @@
|
|||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueTestUtil;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.regionserver.Store.ScanInfo;
|
||||
import org.apache.hadoop.hbase.regionserver.StoreScanner.ScanType;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -36,6 +36,7 @@ import java.util.NavigableSet;
|
|||
import java.util.TreeSet;
|
||||
import static org.apache.hadoop.hbase.regionserver.KeyValueScanFixture.scanFixture;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestStoreScanner extends TestCase {
|
||||
private static final String CF_STR = "cf";
|
||||
final byte [] CF = Bytes.toBytes(CF_STR);
|
||||
|
|
|
@ -28,18 +28,15 @@ import java.util.Random;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.io.hfile.Compression;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestWideScanner extends HBaseTestCase {
|
||||
private final Log LOG = LogFactory.getLog(this.getClass());
|
||||
|
||||
|
|
|
@ -25,11 +25,7 @@ import java.io.IOException;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.Server;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
|
||||
import org.apache.hadoop.hbase.util.MockRegionServerServices;
|
||||
|
@ -37,11 +33,13 @@ import org.apache.hadoop.hbase.util.MockServer;
|
|||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.KeeperException.NodeExistsException;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
/**
|
||||
* Test of the {@link CloseRegionHandler}.
|
||||
*/
|
||||
@Category(MediumTests.class)
|
||||
public class TestCloseRegionHandler {
|
||||
static final Log LOG = LogFactory.getLog(TestCloseRegionHandler.class);
|
||||
private final static HBaseTestingUtility HTU = new HBaseTestingUtility();
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.io.IOException;
|
|||
import java.util.Random;
|
||||
|
||||
import org.apache.commons.logging.impl.Log4JLogger;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -33,18 +34,14 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.conf.Configured;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRPC;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(MediumTests.class)
|
||||
public class TestHLogBench extends Configured implements Tool {
|
||||
|
||||
static final Log LOG = LogFactory.getLog(TestHLogBench.class);
|
||||
|
|
|
@ -28,18 +28,19 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.KeyValueTestUtil;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.HLogSplitter.EntryBuffers;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.HLogSplitter.RegionEntryBuffer;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
/**
|
||||
* Simple testing of a few HLog methods.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestHLogMethods {
|
||||
private static final byte[] TEST_REGION = Bytes.toBytes("test_region");;
|
||||
private static final byte[] TEST_TABLE = Bytes.toBytes("test_table");
|
||||
|
|
|
@ -33,11 +33,14 @@ import org.junit.After;
|
|||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
* Test that the actions are called while playing with an HLog
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestWALActionsListener {
|
||||
protected static final Log LOG = LogFactory.getLog(TestWALActionsListener.class);
|
||||
|
||||
|
|
|
@ -27,11 +27,14 @@ import java.io.StringWriter;
|
|||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Base64;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestCellModel extends TestCase {
|
||||
|
||||
private static final long TIMESTAMP = 1245219839331L;
|
||||
|
|
|
@ -28,11 +28,14 @@ import java.util.Iterator;
|
|||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Base64;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestCellSetModel extends TestCase {
|
||||
|
||||
private static final byte[] ROW1 = Bytes.toBytes("testrow1");
|
||||
|
|
|
@ -27,7 +27,10 @@ import javax.xml.bind.JAXBContext;
|
|||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestColumnSchemaModel extends TestCase {
|
||||
|
||||
protected static final String COLUMN_NAME = "testcolumn";
|
||||
|
|
|
@ -27,10 +27,13 @@ import java.util.Iterator;
|
|||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestRowModel extends TestCase {
|
||||
|
||||
private static final byte[] ROW1 = Bytes.toBytes("testrow1");
|
||||
|
|
|
@ -27,11 +27,14 @@ import java.io.StringWriter;
|
|||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Base64;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestScannerModel extends TestCase {
|
||||
private static final byte[] START_ROW = Bytes.toBytes("abracadabra");
|
||||
private static final byte[] END_ROW = Bytes.toBytes("zzyzx");
|
||||
|
|
|
@ -28,11 +28,14 @@ import java.util.Iterator;
|
|||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Base64;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestStorageClusterStatusModel extends TestCase {
|
||||
|
||||
private static final String AS_XML =
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue