From e91ef96624ccbaf85a93de8209cef4c0ddcb12ca Mon Sep 17 00:00:00 2001 From: eclark Date: Sun, 18 Aug 2013 20:29:54 +0000 Subject: [PATCH] HBASE-9253 clean up IT test code. git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1515207 13f79535-47bb-0310-9956-ffa450edef68 --- .../hbase/IntegrationTestLazyCfLoading.java | 2 +- .../hbase/IntegrationTestManyRegions.java | 2 +- .../hadoop/hbase/IntegrationTestsDriver.java | 4 ++-- .../hadoop/hbase/chaos/actions/Action.java | 2 +- .../hbase/chaos/actions/AddColumnAction.java | 2 -- .../chaos/actions/CompactTableAction.java | 4 ++-- .../chaos/actions/RemoveColumnAction.java | 3 ++- .../chaos/actions/RestartRandomRsAction.java | 4 ++-- .../hbase/chaos/monkies/ChaosMonkey.java | 21 ++++++++++++++++ .../chaos/monkies/PolicyBasedChaosMonkey.java | 20 +--------------- .../mapreduce/IntegrationTestBulkLoad.java | 10 +------- .../mapreduce/IntegrationTestImportTsv.java | 4 +++- .../hbase/mttr/IntegrationTestMTTR.java | 5 ++-- .../test/IntegrationTestBigLinkedList.java | 24 +++++++++---------- .../test/IntegrationTestLoadAndVerify.java | 14 +++++------ 15 files changed, 57 insertions(+), 64 deletions(-) diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java index 2b77ad5f588..381aff7d501 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java @@ -167,7 +167,7 @@ public class IntegrationTestLazyCfLoading { scf.setFilterIfMissing(true); return scf; } - }; + } @Before public void setUp() throws Exception { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestManyRegions.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestManyRegions.java index d1587fad1d4..d6bc2e22b30 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestManyRegions.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestManyRegions.java @@ -85,7 +85,7 @@ public class IntegrationTestManyRegions { LOG.info(String.format("Deleting existing table %s.", TABLE_NAME)); if (admin.isTableEnabled(TABLE_NAME)) admin.disableTable(TABLE_NAME); admin.deleteTable(TABLE_NAME); - LOG.info(String.format("Existing table %s deleted.")); + LOG.info(String.format("Existing table %s deleted.", TABLE_NAME)); } LOG.info("Cluster ready"); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java index d3f41ad97f0..436bbe1c6d0 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java @@ -102,8 +102,8 @@ public class IntegrationTestsDriver extends AbstractHBaseTool { IntegrationTestingUtility.setUseDistributedCluster(conf); Class[] classes = findIntegrationTestClasses(); LOG.info("Found " + classes.length + " integration tests to run:"); - for (int i = 0; i < classes.length; i++) { - LOG.info(" " + classes[i]); + for (Class aClass : classes) { + LOG.info(" " + aClass); } JUnitCore junit = new JUnitCore(); junit.addListener(new TextListener(System.out)); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java index f9c33831cef..6900291e339 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java @@ -56,7 +56,7 @@ public class Action { initialServers = regionServers.toArray(new ServerName[regionServers.size()]); } - public void perform() throws Exception { }; + public void perform() throws Exception { } /** Returns current region servers */ protected ServerName[] getCurrentServers() throws IOException { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java index 82afed931a5..aa8a35c2d5b 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java @@ -31,13 +31,11 @@ import org.apache.hadoop.hbase.util.Bytes; * Action the adds a column family to a table. */ public class AddColumnAction extends Action { - private final Random random; private final byte[] tableName; private HBaseAdmin admin; public AddColumnAction(String tableName) { this.tableName = Bytes.toBytes(tableName); - this.random = new Random(); } @Override diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/CompactTableAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/CompactTableAction.java index 0fae29b08a7..c1f83d119ed 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/CompactTableAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/CompactTableAction.java @@ -24,8 +24,8 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.util.Bytes; /** -* Created by eclark on 8/12/13. -*/ + * Action that queues a table compaction. + */ public class CompactTableAction extends Action { private final byte[] tableNameBytes; private final int majorRatio; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RemoveColumnAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RemoveColumnAction.java index 17e0bd6936f..583dc55f698 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RemoveColumnAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RemoveColumnAction.java @@ -58,7 +58,8 @@ public class RemoveColumnAction extends Action { } int index = random.nextInt(columnDescriptors.length); - while(protectedColumns.contains(columnDescriptors[index].getNameAsString())) { + while(protectedColumns != null && + protectedColumns.contains(columnDescriptors[index].getNameAsString())) { index = random.nextInt(columnDescriptors.length); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomRsAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomRsAction.java index 18cdf4ad8fc..7b09dd31051 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomRsAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomRsAction.java @@ -22,8 +22,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey; /** -* Created by eclark on 8/12/13. -*/ + * Action that restarts a random HRegionServer + */ public class RestartRandomRsAction extends RestartActionBaseAction { public RestartRandomRsAction(long sleepTime) { super(sleepTime); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/ChaosMonkey.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/ChaosMonkey.java index a1cbf161e13..da75c3be480 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/ChaosMonkey.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/ChaosMonkey.java @@ -20,6 +20,27 @@ package org.apache.hadoop.hbase.chaos.monkies; import org.apache.hadoop.hbase.Stoppable; +/** + * A utility to injects faults in a running cluster. + *

+ * ChaosMonkey defines Action's and Policy's. Actions are sequences of events, like + * - Select a random server to kill + * - Sleep for 5 sec + * - Start the server on the same host + * Actions can also be complex events, like rolling restart of all of the servers. + *

+ * Policies on the other hand are responsible for executing the actions based on a strategy. + * The default policy is to execute a random action every minute based on predefined action + * weights. ChaosMonkey executes predefined named policies until it is stopped. More than one + * policy can be active at any time. + *

+ * Chaos monkey can be run from the command line, or can be invoked from integration tests. + * See {@link org.apache.hadoop.hbase.IntegrationTestIngest} or other integration tests that use + * chaos monkey for code examples. + *

+ * ChaosMonkey class is indeed inspired by the Netflix's same-named tool: + * http://techblog.netflix.com/2012/07/chaos-monkey-released-into-wild.html + */ public abstract class ChaosMonkey implements Stoppable { public abstract void start() throws Exception; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java index 5a0416e6786..e2671103734 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java @@ -30,25 +30,7 @@ import org.apache.hadoop.hbase.chaos.policies.Policy; import org.apache.hadoop.hbase.util.Pair; /** - * A utility to injects faults in a running cluster. - *

- * ChaosMonkey defines Action's and Policy's. Actions are sequences of events, like - * - Select a random server to kill - * - Sleep for 5 sec - * - Start the server on the same host - * Actions can also be complex events, like rolling restart of all of the servers. - *

- * Policies on the other hand are responsible for executing the actions based on a strategy. - * The default policy is to execute a random action every minute based on predefined action - * weights. ChaosMonkey executes predefined named policies until it is stopped. More than one - * policy can be active at any time. - *

- * Chaos monkey can be run from the command line, or can be invoked from integration tests. - * See {@link org.apache.hadoop.hbase.IntegrationTestIngest} or other integration tests that use - * chaos monkey for code examples. - *

- * ChaosMonkey class is indeed inspired by the Netflix's same-named tool: - * http://techblog.netflix.com/2012/07/chaos-monkey-released-into-wild.html + * Chaos monkey that given multiple policies will run actions against the cluster. */ public class PolicyBasedChaosMonkey extends ChaosMonkey { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java index 239f9aa70e2..84c33adac86 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java @@ -329,10 +329,6 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase { private Long order; - public LinkKey() { - - } - public LinkKey(long chainId, long order) { this.chainId = chainId; this.order = order; @@ -373,9 +369,6 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase { return rk; } - public LinkChain() { - } - public LinkChain(Long rk, Long next) { this.rk = rk; this.next = next; @@ -416,8 +409,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase { LinkChain linkChain, int numPartitions) { int hash = linkKey.getChainId().hashCode(); - int partition = hash % numPartitions; - return partition; + return hash % numPartitions; } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java index 8542840220f..53eda270c2f 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java @@ -339,7 +339,9 @@ public class IntegrationTestImportTsv implements Configurable, Tool { fout.write(Bytes.toBytes("testRunFromOutputCommitter\n")); LOG.debug(format("Wrote test data to file: %s", inputPath)); } finally { - fout.close(); + if (fout != null) { + fout.close(); + } } // create a parent job that ships the HBase dependencies. This is diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java index da7566e22b1..cb9f31177c5 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java @@ -445,7 +445,7 @@ public class IntegrationTestMTTR { rs = table.getScanner(s); Result result = rs.next(); - return rs != null && result != null && result.size() > 0; + return result != null && result.size() > 0; } finally { if (rs != null) { rs.close(); @@ -510,7 +510,6 @@ public class IntegrationTestMTTR { @Override public Boolean call() throws Exception { int colsPerKey = 10; - int recordSize = 500; int numServers = util.getHBaseClusterInterface().getInitialClusterStatus().getServersSize(); int numKeys = numServers * 5000; int writeThreads = 10; @@ -521,7 +520,7 @@ public class IntegrationTestMTTR { do { int ret = loadTool.run(new String[]{ "-tn", loadTableName.getNameAsString(), - "-write", String.format("%d:%d:%d", colsPerKey, recordSize, writeThreads), + "-write", String.format("%d:%d:%d", colsPerKey, 500, writeThreads), "-num_keys", String.valueOf(numKeys), "-skip_init" }); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index 097b1672f4f..9df9ad6e1e3 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -28,6 +28,7 @@ import java.util.Random; import java.util.Set; import java.util.UUID; +import com.google.common.collect.Sets; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; @@ -204,10 +205,6 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { private static final Log LOG = LogFactory.getLog(Generator.class); - public static enum Counts { - UNREFERENCED, UNDEFINED, REFERENCED, CORRUPT - } - static class GeneratorInputFormat extends InputFormat { static class GeneratorInputSplit extends InputSplit implements Writable { @Override @@ -322,7 +319,6 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { */ static class GeneratorMapper extends Mapper { - Random rand = new Random(); byte[][] first = null; byte[][] prev = null; @@ -350,11 +346,11 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { if (this.numNodes < this.wrap) { this.wrap = this.numNodes; } - }; + } protected void cleanup(Context context) throws IOException ,InterruptedException { table.close(); - }; + } @Override protected void map(BytesWritable key, NullWritable value, Context output) throws IOException { @@ -593,7 +589,9 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { context.getCounter(Counts.UNREFERENCED).increment(1); } else { if (refs.size() > 1) { - context.write(new Text(keyString), new Text(refsSb.toString())); + if (refsSb != null) { + context.write(new Text(keyString), new Text(refsSb.toString())); + } context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1); } // node is defined and referenced @@ -1067,18 +1065,18 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { @Override protected Set getColumnFamilies() { - return null; + return Sets.newHashSet(Bytes.toString(FAMILY_NAME)); } private static void setJobConf(Job job, int numMappers, long numNodes, - Integer width, Integer wrapMuplitplier) { + Integer width, Integer wrapMultiplier) { job.getConfiguration().setInt(GENERATOR_NUM_MAPPERS_KEY, numMappers); job.getConfiguration().setLong(GENERATOR_NUM_ROWS_PER_MAP_KEY, numNodes); if (width != null) { - job.getConfiguration().setInt(GENERATOR_WIDTH_KEY, width.intValue()); + job.getConfiguration().setInt(GENERATOR_WIDTH_KEY, width); } - if (wrapMuplitplier != null) { - job.getConfiguration().setInt(GENERATOR_WRAP_KEY, wrapMuplitplier.intValue()); + if (wrapMultiplier != null) { + job.getConfiguration().setInt(GENERATOR_WRAP_KEY, wrapMultiplier); } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java index b29a350be76..22a866b5d85 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java @@ -24,7 +24,6 @@ import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.cli.CommandLine; import org.apache.hadoop.conf.Configuration; @@ -114,7 +113,7 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase { private enum Counters { ROWS_WRITTEN, REFERENCES_WRITTEN, - REFERENCES_CHECKED; + REFERENCES_CHECKED } @Before @@ -122,9 +121,11 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase { util = getTestingUtil(getConf()); util.initializeCluster(3); this.setConf(util.getConfiguration()); - getConf().setLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT / 100); - getConf().setInt(NUM_MAP_TASKS_KEY, NUM_MAP_TASKS_DEFAULT / 100); - getConf().setInt(NUM_REDUCE_TASKS_KEY, NUM_REDUCE_TASKS_DEFAULT / 10); + if (!util.isDistributedCluster()) { + getConf().setLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT / 100); + getConf().setInt(NUM_MAP_TASKS_KEY, NUM_MAP_TASKS_DEFAULT / 100); + getConf().setInt(NUM_REDUCE_TASKS_KEY, NUM_REDUCE_TASKS_DEFAULT / 10); + } } /** @@ -360,8 +361,7 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase { htd.addFamily(new HColumnDescriptor(TEST_FAMILY)); HBaseAdmin admin = getTestingUtil(getConf()).getHBaseAdmin(); - int numPreCreate = 40; - admin.createTable(htd, Bytes.toBytes(0L), Bytes.toBytes(-1L), numPreCreate); + admin.createTable(htd, Bytes.toBytes(0L), Bytes.toBytes(-1L), 40); doLoad(getConf(), htd); doVerify(getConf(), htd);