HBASE-6083 Modify old filter tests to use Junit4/no longer use HBaseTestCase

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1344111 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-05-30 05:44:27 +00:00
parent 9bb6236ecc
commit 31d4659cce
11 changed files with 130 additions and 66 deletions

View File

@ -16,15 +16,17 @@
*/
package org.apache.hadoop.hbase.filter;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
/**
* Tests for the bit comparator
*/
@Category(SmallTests.class)
public class TestBitComparator extends TestCase {
public class TestBitComparator {
private static byte[] zeros = new byte[]{0, 0, 0, 0, 0, 0};
private static byte[] ones = new byte[]{1, 1, 1, 1, 1, 1};
@ -35,6 +37,7 @@ public class TestBitComparator extends TestCase {
private final int Equal = 0;
private final int NotEqual = 1;
@Test
public void testANDOperation() {
testOperation(zeros, ones, BitComparator.BitwiseOp.AND, NotEqual);
testOperation(data1, ones, BitComparator.BitwiseOp.AND, Equal);
@ -44,6 +47,7 @@ public class TestBitComparator extends TestCase {
testOperation(ones, data3, BitComparator.BitwiseOp.AND, NotEqual);
}
@Test
public void testOROperation() {
testOperation(ones, zeros, BitComparator.BitwiseOp.OR, Equal);
testOperation(zeros, zeros, BitComparator.BitwiseOp.OR, NotEqual);
@ -52,6 +56,7 @@ public class TestBitComparator extends TestCase {
testOperation(ones, data3, BitComparator.BitwiseOp.OR, NotEqual);
}
@Test
public void testXOROperation() {
testOperation(ones, zeros, BitComparator.BitwiseOp.XOR, Equal);
testOperation(zeros, zeros, BitComparator.BitwiseOp.XOR, NotEqual);

View File

@ -28,15 +28,18 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertTrue;
/**
* Test for the ColumnPaginationFilter, used mainly to test the successful serialization of the filter.
* More test functionality can be found within {@link org.apache.hadoop.hbase.filter.TestFilter#testColumnPaginationFilter()}
*/
@Category(SmallTests.class)
public class TestColumnPaginationFilter extends TestCase
public class TestColumnPaginationFilter
{
private static final byte[] ROW = Bytes.toBytes("row_1_test");
private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test");
@ -45,9 +48,8 @@ public class TestColumnPaginationFilter extends TestCase
private Filter columnPaginationFilter;
@Override
protected void setUp() throws Exception {
super.setUp();
@Before
public void setUp() throws Exception {
columnPaginationFilter = getColumnPaginationFilter();
}
@ -88,6 +90,7 @@ public class TestColumnPaginationFilter extends TestCase
* Tests serialization
* @throws Exception
*/
@Test
public void testSerialization() throws Exception {
Filter newFilter = serializationTest(columnPaginationFilter);
basicFilterTests((ColumnPaginationFilter)newFilter);

View File

@ -36,11 +36,16 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;
import junit.framework.TestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestDependentColumnFilter extends TestCase {
public class TestDependentColumnFilter {
private final Log LOG = LogFactory.getLog(this.getClass());
private static final byte[][] ROWS = {
Bytes.toBytes("test1"),Bytes.toBytes("test2")
@ -57,31 +62,26 @@ public class TestDependentColumnFilter extends TestCase {
Bytes.toBytes("bad1"), Bytes.toBytes("bad2"), Bytes.toBytes("bad3")
};
private static final byte[] MATCH_VAL = Bytes.toBytes("match");
private HBaseTestingUtility testUtil;
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
List<KeyValue> testVals;
private HRegion region;
@Override
protected void setUp() throws Exception {
super.setUp();
testUtil = new HBaseTestingUtility();
@Before
public void setUp() throws Exception {
testVals = makeTestVals();
HTableDescriptor htd = new HTableDescriptor(getName());
HTableDescriptor htd = new HTableDescriptor(this.getClass().getName());
htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
htd.addFamily(new HColumnDescriptor(FAMILIES[1]));
HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
this.region = HRegion.createHRegion(info, testUtil.getDataTestDir(),
testUtil.getConfiguration(), htd);
this.region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(),
TEST_UTIL.getConfiguration(), htd);
addData();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
@After
public void tearDown() throws Exception {
HRegion.closeHRegion(this.region);
}
@ -160,6 +160,7 @@ public class TestDependentColumnFilter extends TestCase {
/**
* Test scans using a DependentColumnFilter
*/
@Test
public void testScans() throws Exception {
Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER);
@ -215,6 +216,7 @@ public class TestDependentColumnFilter extends TestCase {
*
* @throws Exception
*/
@Test
public void testFilterDropping() throws Exception {
Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER);
List<KeyValue> accepted = new ArrayList<KeyValue>();

View File

@ -27,7 +27,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import junit.framework.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*;
@ -41,7 +40,13 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.google.common.base.Throwables;
@ -49,9 +54,10 @@ import com.google.common.base.Throwables;
* Test filters at the HRegion doorstep.
*/
@Category(SmallTests.class)
public class TestFilter extends HBaseTestCase {
public class TestFilter {
private final static Log LOG = LogFactory.getLog(TestFilter.class);
private HRegion region;
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
//
// Rows, Qualifiers, and Values are in two groups, One and Two.
@ -116,10 +122,9 @@ public class TestFilter extends HBaseTestCase {
private long numRows = ROWS_ONE.length + ROWS_TWO.length;
private long colsPerRow = FAMILIES.length * QUALIFIERS_ONE.length;
protected void setUp() throws Exception {
super.setUp();
HTableDescriptor htd = new HTableDescriptor(getName());
@Before
public void setUp() throws Exception {
HTableDescriptor htd = new HTableDescriptor("TestFilter");
htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
htd.addFamily(new HColumnDescriptor(FAMILIES[1]));
htd.addFamily(new HColumnDescriptor(FAMILIES_1[0]));
@ -128,7 +133,8 @@ public class TestFilter extends HBaseTestCase {
htd.addFamily(new HColumnDescriptor(NEW_FAMILIES[1]));
htd.addFamily(new HColumnDescriptor(FAMILIES_1[1]));
HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
this.region = HRegion.createHRegion(info, this.testDir, this.conf, htd);
this.region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(),
TEST_UTIL.getConfiguration(), htd);
// Insert first half
for(byte [] ROW : ROWS_ONE) {
@ -200,14 +206,14 @@ public class TestFilter extends HBaseTestCase {
numRows -= 2;
}
protected void tearDown() throws Exception {
@After
public void tearDown() throws Exception {
HLog hlog = region.getLog();
region.close();
hlog.closeAndDelete();
super.tearDown();
}
@Test
public void testRegionScannerReseek() throws Exception {
// create new rows and column family to show how reseek works..
for (byte[] ROW : ROWS_THREE) {
@ -274,6 +280,7 @@ public class TestFilter extends HBaseTestCase {
}
}
@Test
public void testNoFilter() throws Exception {
// No filter
long expectedRows = this.numRows;
@ -289,6 +296,7 @@ public class TestFilter extends HBaseTestCase {
verifyScan(s, expectedRows, expectedKeys/2);
}
@Test
public void testPrefixFilter() throws Exception {
// Grab rows from group one (half of total)
long expectedRows = this.numRows / 2;
@ -298,6 +306,7 @@ public class TestFilter extends HBaseTestCase {
verifyScan(s, expectedRows, expectedKeys);
}
@Test
public void testPageFilter() throws Exception {
// KVs in first 6 rows
@ -393,6 +402,7 @@ public class TestFilter extends HBaseTestCase {
*
* @throws Exception
*/
@Test
public void testWhileMatchFilterWithFilterRow() throws Exception {
final int pageSize = 4;
@ -407,13 +417,13 @@ public class TestFilter extends HBaseTestCase {
scannerCounter++;
if (scannerCounter >= pageSize) {
Assert.assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining());
assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining());
}
if (!isMoreResults) {
break;
}
}
Assert.assertEquals("The page filter returned more rows than expected", pageSize, scannerCounter);
assertEquals("The page filter returned more rows than expected", pageSize, scannerCounter);
}
/**
@ -425,6 +435,7 @@ public class TestFilter extends HBaseTestCase {
*
* @throws Exception
*/
@Test
public void testWhileMatchFilterWithFilterRowKey() throws Exception {
Scan s = new Scan();
String prefix = "testRowOne";
@ -436,7 +447,7 @@ public class TestFilter extends HBaseTestCase {
ArrayList<KeyValue> values = new ArrayList<KeyValue>();
boolean isMoreResults = scanner.next(values);
if (!isMoreResults || !Bytes.toString(values.get(0).getRow()).startsWith(prefix)) {
Assert.assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining());
assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining());
}
if (!isMoreResults) {
break;
@ -453,6 +464,7 @@ public class TestFilter extends HBaseTestCase {
*
* @throws Exception
*/
@Test
public void testWhileMatchFilterWithFilterKeyValue() throws Exception {
Scan s = new Scan();
WhileMatchFilter filter = new WhileMatchFilter(
@ -464,13 +476,14 @@ public class TestFilter extends HBaseTestCase {
while (true) {
ArrayList<KeyValue> values = new ArrayList<KeyValue>();
boolean isMoreResults = scanner.next(values);
Assert.assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining());
assertTrue("The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining());
if (!isMoreResults) {
break;
}
}
}
@Test
public void testInclusiveStopFilter() throws IOException {
// Grab rows from group one
@ -505,6 +518,7 @@ public class TestFilter extends HBaseTestCase {
}
@Test
public void testQualifierFilter() throws IOException {
// Match two keys (one from each family) in half the rows
@ -662,7 +676,8 @@ public class TestFilter extends HBaseTestCase {
}
public void testFamilyFilter() throws IOException {
@Test
public void testFamilyFilter() throws IOException {
// Match family, only half of columns returned.
long expectedRows = this.numRows;
@ -796,6 +811,7 @@ public class TestFilter extends HBaseTestCase {
}
@Test
public void testRowFilter() throws IOException {
// Match a single row, all keys
@ -942,6 +958,7 @@ public class TestFilter extends HBaseTestCase {
}
@Test
public void testValueFilter() throws IOException {
// Match group one rows
@ -1065,6 +1082,7 @@ public class TestFilter extends HBaseTestCase {
verifyScanFull(s, kvs);
}
@Test
public void testSkipFilter() throws IOException {
// Test for qualifier regex: "testQualifierOne-2"
@ -1102,6 +1120,7 @@ public class TestFilter extends HBaseTestCase {
// TODO: This is important... need many more tests for ordering, etc
// There are limited tests elsewhere but we need HRegion level ones here
@Test
public void testFilterList() throws IOException {
// Test getting a single row, single key using Row, Qualifier, and Value
@ -1134,6 +1153,7 @@ public class TestFilter extends HBaseTestCase {
}
@Test
public void testFirstKeyOnlyFilter() throws IOException {
Scan s = new Scan();
s.setFilter(new FirstKeyOnlyFilter());
@ -1149,6 +1169,7 @@ public class TestFilter extends HBaseTestCase {
verifyScanFull(s, kvs);
}
@Test
public void testFilterListWithSingleColumnValueFilter() throws IOException {
// Test for HBASE-3191
@ -1225,6 +1246,7 @@ public class TestFilter extends HBaseTestCase {
verifyScanFull(s, kvs);
}
@Test
public void testSingleColumnValueFilter() throws IOException {
// From HBASE-1821
@ -1470,6 +1492,7 @@ public class TestFilter extends HBaseTestCase {
}
@Test
public void testColumnPaginationFilter() throws Exception {
// Set of KVs (page: 1; pageSize: 1) - the first set of 1 column per row
@ -1562,6 +1585,7 @@ public class TestFilter extends HBaseTestCase {
this.verifyScanFull(s, expectedKVs4);
}
@Test
public void testKeyOnlyFilter() throws Exception {
// KVs in first 6 rows

View File

@ -30,12 +30,16 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import junit.framework.TestCase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNull;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
@ -43,7 +47,7 @@ import org.junit.experimental.categories.Category;
*
*/
@Category(SmallTests.class)
public class TestFilterList extends TestCase {
public class TestFilterList {
static final int MAX_PAGES = 2;
static final char FIRST_CHAR = 'a';
static final char LAST_CHAR = 'e';
@ -54,6 +58,7 @@ public class TestFilterList extends TestCase {
* Test "must pass one"
* @throws Exception
*/
@Test
public void testMPONE() throws Exception {
List<Filter> filters = new ArrayList<Filter>();
filters.add(new PageFilter(MAX_PAGES));
@ -113,6 +118,7 @@ public class TestFilterList extends TestCase {
* Test "must pass all"
* @throws Exception
*/
@Test
public void testMPALL() throws Exception {
List<Filter> filters = new ArrayList<Filter>();
filters.add(new PageFilter(MAX_PAGES));
@ -155,6 +161,7 @@ public class TestFilterList extends TestCase {
* Test list ordering
* @throws Exception
*/
@Test
public void testOrdering() throws Exception {
List<Filter> filters = new ArrayList<Filter>();
filters.add(new PrefixFilter(Bytes.toBytes("yyy")));
@ -211,6 +218,7 @@ public class TestFilterList extends TestCase {
* Test serialization
* @throws Exception
*/
@Test
public void testSerialization() throws Exception {
List<Filter> filters = new ArrayList<Filter>();
filters.add(new PageFilter(MAX_PAGES));
@ -236,6 +244,7 @@ public class TestFilterList extends TestCase {
/**
* Test pass-thru of hints.
*/
@Test
public void testHintPassThru() throws Exception {
final KeyValue minKeyValue = new KeyValue(Bytes.toBytes(0L), null, null);

View File

@ -27,23 +27,25 @@ import java.io.DataOutputStream;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Tests the inclusive stop row filter
*/
@Category(SmallTests.class)
public class TestInclusiveStopFilter extends TestCase {
public class TestInclusiveStopFilter {
private final byte [] STOP_ROW = Bytes.toBytes("stop_row");
private final byte [] GOOD_ROW = Bytes.toBytes("good_row");
private final byte [] PAST_STOP_ROW = Bytes.toBytes("zzzzzz");
Filter mainFilter;
@Override
protected void setUp() throws Exception {
super.setUp();
@Before
public void setUp() throws Exception {
mainFilter = new InclusiveStopFilter(STOP_ROW);
}
@ -51,6 +53,7 @@ public class TestInclusiveStopFilter extends TestCase {
* Tests identification of the stop row
* @throws Exception
*/
@Test
public void testStopRowIdentification() throws Exception {
stopRowTests(mainFilter);
}
@ -59,6 +62,7 @@ public class TestInclusiveStopFilter extends TestCase {
* Tests serialization
* @throws Exception
*/
@Test
public void testSerialization() throws Exception {
// Decompose mainFilter to bytes.
ByteArrayOutputStream stream = new ByteArrayOutputStream();

View File

@ -24,21 +24,24 @@ import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
/**
* Tests for the page filter
*/
@Category(SmallTests.class)
public class TestPageFilter extends TestCase {
public class TestPageFilter {
static final int ROW_LIMIT = 3;
/**
* test page size filter
* @throws Exception
*/
@Test
public void testPageSize() throws Exception {
Filter f = new PageFilter(ROW_LIMIT);
pageSizeTests(f);
@ -48,6 +51,7 @@ public class TestPageFilter extends TestCase {
* Test filter serialization
* @throws Exception
*/
@Test
public void testSerialization() throws Exception {
Filter f = new PageFilter(ROW_LIMIT);
// Decompose mainFilter to bytes.

View File

@ -20,10 +20,11 @@
package org.apache.hadoop.hbase.filter;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.ByteArrayInputStream;
@ -32,8 +33,10 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.UnsupportedEncodingException;
import static org.junit.Assert.*;
@Category(SmallTests.class)
public class TestPrefixFilter extends TestCase {
public class TestPrefixFilter {
Filter mainFilter;
static final char FIRST_CHAR = 'a';
static final char LAST_CHAR = 'e';
@ -48,19 +51,22 @@ public class TestPrefixFilter extends TestCase {
}
}
protected void setUp() throws Exception {
super.setUp();
@Before
public void setUp() throws Exception {
this.mainFilter = new PrefixFilter(Bytes.toBytes(HOST_PREFIX));
}
@Test
public void testPrefixOnRow() throws Exception {
prefixRowTests(mainFilter);
}
@Test
public void testPrefixOnRowInsideWhileMatchRow() throws Exception {
prefixRowTests(new WhileMatchFilter(this.mainFilter), true);
}
@Test
public void testSerialization() throws Exception {
// Decompose mainFilter to bytes.
ByteArrayOutputStream stream = new ByteArrayOutputStream();

View File

@ -25,19 +25,20 @@ import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
@Category(SmallTests.class)
public class TestRandomRowFilter extends TestCase {
public class TestRandomRowFilter {
protected RandomRowFilter quarterChanceFilter;
@Override
protected void setUp() throws Exception {
super.setUp();
@Before
public void setUp() throws Exception {
quarterChanceFilter = new RandomRowFilter(0.25f);
}
@ -46,6 +47,7 @@ public class TestRandomRowFilter extends TestCase {
*
* @throws Exception
*/
@Test
public void testBasics() throws Exception {
int included = 0;
int max = 1000000;
@ -68,6 +70,7 @@ public class TestRandomRowFilter extends TestCase {
*
* @throws Exception
*/
@Test
public void testSerialization() throws Exception {
RandomRowFilter newFilter = serializationTest(quarterChanceFilter);
// use epsilon float comparison

View File

@ -19,24 +19,23 @@
*/
package org.apache.hadoop.hbase.filter;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
/**
* Tests for {@link SingleColumnValueExcludeFilter}. Because this filter
* extends {@link SingleColumnValueFilter}, only the added functionality is
* tested. That is, method filterKeyValue(KeyValue).
*
* @author ferdy
*
*/
@Category(SmallTests.class)
public class TestSingleColumnValueExcludeFilter extends TestCase {
public class TestSingleColumnValueExcludeFilter {
private static final byte[] ROW = Bytes.toBytes("test");
private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test");
private static final byte[] COLUMN_QUALIFIER = Bytes.toBytes("foo");
@ -48,6 +47,7 @@ public class TestSingleColumnValueExcludeFilter extends TestCase {
* Test the overridden functionality of filterKeyValue(KeyValue)
* @throws Exception
*/
@Test
public void testFilterKeyValue() throws Exception {
Filter filter = new SingleColumnValueExcludeFilter(COLUMN_FAMILY, COLUMN_QUALIFIER,
CompareOp.EQUAL, VAL_1);

View File

@ -30,14 +30,17 @@ import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
/**
* Tests the value filter
*/
@Category(SmallTests.class)
public class TestSingleColumnValueFilter extends TestCase {
public class TestSingleColumnValueFilter {
private static final byte[] ROW = Bytes.toBytes("test");
private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test");
private static final byte [] COLUMN_QUALIFIER = Bytes.toBytes("foo");
@ -58,9 +61,8 @@ public class TestSingleColumnValueFilter extends TestCase {
Filter regexFilter;
Filter regexPatternFilter;
@Override
protected void setUp() throws Exception {
super.setUp();
@Before
public void setUp() throws Exception {
basicFilter = basicFilterNew();
substrFilter = substrFilterNew();
regexFilter = regexFilterNew();
@ -172,6 +174,7 @@ public class TestSingleColumnValueFilter extends TestCase {
* Tests identification of the stop row
* @throws Exception
*/
@Test
public void testStop() throws Exception {
basicFilterTests((SingleColumnValueFilter)basicFilter);
substrFilterTests(substrFilter);
@ -183,6 +186,7 @@ public class TestSingleColumnValueFilter extends TestCase {
* Tests serialization
* @throws Exception
*/
@Test
public void testSerialization() throws Exception {
Filter newFilter = serializationTest(basicFilter);
basicFilterTests((SingleColumnValueFilter)newFilter);