HBASE-18303 Clean up @Parameter boilerplate

This commit is contained in:
Mike Drob 2017-06-30 12:13:56 -05:00
parent 4e9961b4fc
commit 0ded122b1e
14 changed files with 59 additions and 70 deletions

View File

@ -26,6 +26,7 @@ import java.util.List;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -44,20 +45,12 @@ import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class TestKeyOnlyFilter {
private final boolean lenAsVal;
@Parameterized.Parameter
public boolean lenAsVal;
@Parameters
public static Collection<Object[]> parameters() {
List<Object[]> paramList = new ArrayList<>(2);
{
paramList.add(new Object[] { false });
paramList.add(new Object[] { true });
}
return paramList;
}
public TestKeyOnlyFilter(boolean lenAsVal) {
this.lenAsVal = lenAsVal;
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
}
@Test

View File

@ -20,6 +20,8 @@ package org.apache.hadoop.hbase;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
@ -28,6 +30,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.compress.Compression;
/**
* Common helpers for testing HBase that do not depend on specific server/etc. things.
@ -37,6 +40,25 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
public class HBaseCommonTestingUtility {
protected static final Log LOG = LogFactory.getLog(HBaseCommonTestingUtility.class);
/** Compression algorithms to use in parameterized JUnit 4 tests */
public static final List<Object[]> COMPRESSION_ALGORITHMS_PARAMETERIZED =
Arrays.asList(new Object[][] {
{ Compression.Algorithm.NONE },
{ Compression.Algorithm.GZ }
});
/** This is for unit tests parameterized with a two booleans. */
public static final List<Object[]> BOOLEAN_PARAMETERIZED =
Arrays.asList(new Object[][] {
{false},
{true}
});
/** Compression algorithms to use in testing */
public static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = {
Compression.Algorithm.NONE, Compression.Algorithm.GZ
};
protected Configuration conf;
public HBaseCommonTestingUtility() {

View File

@ -47,17 +47,15 @@ import org.junit.runners.Parameterized.Parameters;
@Category({MiscTests.class, SmallTests.class})
public class TestStruct {
private Struct generic;
@SuppressWarnings("rawtypes")
private DataType specialized;
private Object[][] constructorArgs;
@Parameterized.Parameter(value = 0)
public Struct generic;
public TestStruct(Struct generic, @SuppressWarnings("rawtypes") DataType specialized,
Object[][] constructorArgs) {
this.generic = generic;
this.specialized = specialized;
this.constructorArgs = constructorArgs;
}
@SuppressWarnings("rawtypes")
@Parameterized.Parameter(value = 1)
public DataType specialized;
@Parameterized.Parameter(value = 2)
public Object[][] constructorArgs;
@Parameters
public static Collection<Object[]> params() {

View File

@ -44,6 +44,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -69,12 +70,7 @@ public class TestByteBufferUtils {
@Parameterized.Parameters
public static Collection<Object[]> parameters() {
List<Object[]> paramList = new ArrayList<>(2);
{
paramList.add(new Object[] { false });
paramList.add(new Object[] { true });
}
return paramList;
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
}
private static void setUnsafe(String fieldName, boolean value) throws Exception {

View File

@ -31,12 +31,15 @@ import org.apache.hadoop.hbase.codec.prefixtree.row.TestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataRandomKeyValuesWithTags;
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataTrivialWithTags;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import static org.junit.Assume.assumeFalse;
@Category({MiscTests.class, SmallTests.class})
@RunWith(Parameterized.class)
public class TestKeyValueTool {
@ -46,17 +49,14 @@ public class TestKeyValueTool {
return TestRowData.InMemory.getAllAsObjectArray();
}
private TestRowData rows;
public TestKeyValueTool(TestRowData testRows) {
this.rows = testRows;
}
@Parameterized.Parameter
public TestRowData rows;
@Test
public void testRoundTripToBytes() {
if(rows instanceof TestRowDataTrivialWithTags || rows instanceof TestRowDataRandomKeyValuesWithTags) {
return;
}
assumeFalse(rows instanceof TestRowDataTrivialWithTags);
assumeFalse(rows instanceof TestRowDataRandomKeyValuesWithTags);
List<KeyValue> kvs = rows.getInputs();
ByteBuffer bb = KeyValueTestUtil.toByteBufferAndRewind(kvs, false);
List<KeyValue> roundTrippedKvs = KeyValueTestUtil.rewindThenToList(bb, false, false);

View File

@ -55,11 +55,7 @@ public class TestRowEncoder {
@Parameters
public static Collection<Object[]> parameters() {
List<Object[]> parameters = Lists.newArrayList();
for (TestRowData testRows : TestRowData.InMemory.getAll()) {
parameters.add(new Object[] { testRows });
}
return parameters;
return TestRowData.InMemory.getAllAsObjectArray();
}
protected TestRowData rows;

View File

@ -82,10 +82,7 @@ public class TestMultiRowResource {
@Parameterized.Parameters
public static Collection<Object[]> data() {
List<Object[]> params = new ArrayList<>(2);
params.add(new Object[] {Boolean.TRUE});
params.add(new Object[] {Boolean.FALSE});
return params;
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
}
public TestMultiRowResource(Boolean csrf) {

View File

@ -216,26 +216,8 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
/** Filesystem URI used for map-reduce mini-cluster setup */
private static String FS_URI;
/** Compression algorithms to use in parameterized JUnit 4 tests */
public static final List<Object[]> COMPRESSION_ALGORITHMS_PARAMETERIZED =
Arrays.asList(new Object[][] {
{ Compression.Algorithm.NONE },
{ Compression.Algorithm.GZ }
});
/** This is for unit tests parameterized with a two booleans. */
public static final List<Object[]> BOOLEAN_PARAMETERIZED =
Arrays.asList(new Object[][] {
{false},
{true}
});
/** This is for unit tests parameterized with a single boolean. */
public static final List<Object[]> MEMSTORETS_TAGS_PARAMETRIZED = memStoreTSAndTagsCombination();
/** Compression algorithms to use in testing */
public static final Compression.Algorithm[] COMPRESSION_ALGORITHMS ={
Compression.Algorithm.NONE, Compression.Algorithm.GZ
};
/**
* Checks to see if a specific port is available.
@ -278,7 +260,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
private static List<Object[]> bloomAndCompressionCombinations() {
List<Object[]> configurations = new ArrayList<>();
for (Compression.Algorithm comprAlgo :
HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {
for (BloomType bloomType : BloomType.values()) {
configurations.add(new Object[] { comprAlgo, bloomType });
}

View File

@ -27,7 +27,7 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.compress.Compression;
@ -51,7 +51,7 @@ public class TestSeekToBlockWithEncoders {
@Parameters
public static Collection<Object[]> parameters() {
return HBaseTestingUtility.BOOLEAN_PARAMETERIZED;
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
}
public TestSeekToBlockWithEncoders(boolean useOffheapData) {

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -179,7 +180,7 @@ public class TestCacheOnWrite {
List<Object[]> params = new ArrayList<>();
for (BlockCache blockCache : getBlockCaches()) {
for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
for (Compression.Algorithm compress : HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {
for (boolean cacheCompressedData : new boolean[] { false, true }) {
params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });
}

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@ -440,7 +441,7 @@ public class TestHFile {
public void testNullMetaBlocks() throws Exception {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
for (Compression.Algorithm compressAlgo :
HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {
Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
FSDataOutputStream fout = createFSOutput(mFile);
HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo)

View File

@ -44,6 +44,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -73,7 +74,7 @@ public class TestHFileBlockIndex {
@Parameters
public static Collection<Object[]> compressionAlgorithms() {
return HBaseTestingUtility.COMPRESSION_ALGORITHMS_PARAMETERIZED;
return HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS_PARAMETERIZED;
}
public TestHFileBlockIndex(Compression.Algorithm compr) {

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@ -81,7 +82,7 @@ public class TestHFileWriterV3 {
}
@Parameters
public static Collection<Object[]> parameters() {
return HBaseTestingUtility.BOOLEAN_PARAMETERIZED;
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
}
@Before

View File

@ -31,6 +31,7 @@ import java.util.NavigableSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -89,7 +90,7 @@ public class TestCoprocessorScanPolicy {
@Parameters
public static Collection<Object[]> parameters() {
return HBaseTestingUtility.BOOLEAN_PARAMETERIZED;
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
}
public TestCoprocessorScanPolicy(boolean parallelSeekEnable) {