HBASE-9237 Integration test cleanup after ChaosMonkey refactor

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1514864 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
eclark 2013-08-16 19:36:28 +00:00
parent 0a9173ea85
commit ec67046424
5 changed files with 24 additions and 14 deletions

View File

@ -70,11 +70,11 @@ public abstract class IntegrationTestBase extends AbstractHBaseTool {
@Override @Override
protected int doWork() throws Exception { protected int doWork() throws Exception {
setUpMonkey();
setUp(); setUp();
setUpMonkey();
int result = -1; int result = -1;
try { try {
runTestFromCommandLine(); result = runTestFromCommandLine();
} finally { } finally {
cleanUpMonkey(); cleanUpMonkey();
cleanUp(); cleanUp();
@ -90,6 +90,7 @@ public abstract class IntegrationTestBase extends AbstractHBaseTool {
monkey = fact.setUtil(util) monkey = fact.setUtil(util)
.setTableName(getTablename()) .setTableName(getTablename())
.setColumnFamilies(getColumnFamilies()).build(); .setColumnFamilies(getColumnFamilies()).build();
monkey.start();
} }
@After @After

View File

@ -41,8 +41,6 @@ public class IntegrationTestIngest extends IntegrationTestBase {
private static final int SERVER_COUNT = 4; // number of slaves for the smallest cluster private static final int SERVER_COUNT = 4; // number of slaves for the smallest cluster
private static final long DEFAULT_RUN_TIME = 20 * 60 * 1000; private static final long DEFAULT_RUN_TIME = 20 * 60 * 1000;
protected static String tableName = null;
/** A soft limit on how long we should run */ /** A soft limit on how long we should run */
private static final String RUN_TIME_KEY = "hbase.%s.runtime"; private static final String RUN_TIME_KEY = "hbase.%s.runtime";
@ -52,7 +50,6 @@ public class IntegrationTestIngest extends IntegrationTestBase {
private LoadTestTool loadTool; private LoadTestTool loadTool;
protected void setUp(int numSlavesBase) throws Exception { protected void setUp(int numSlavesBase) throws Exception {
tableName = this.getClass().getSimpleName();
util = getTestingUtil(null); util = getTestingUtil(null);
LOG.debug("Initializing/checking cluster has " + numSlavesBase + " servers"); LOG.debug("Initializing/checking cluster has " + numSlavesBase + " servers");
util.initializeCluster(numSlavesBase); util.initializeCluster(numSlavesBase);
@ -63,7 +60,7 @@ public class IntegrationTestIngest extends IntegrationTestBase {
loadTool.setConf(util.getConfiguration()); loadTool.setConf(util.getConfiguration());
// Initialize load test tool before we start breaking things; // Initialize load test tool before we start breaking things;
// LoadTestTool init, even when it is a no-op, is very fragile. // LoadTestTool init, even when it is a no-op, is very fragile.
int ret = loadTool.run(new String[] { "-tn", tableName, "-init_only" }); int ret = loadTool.run(new String[] { "-tn", getTablename(), "-init_only" });
Assert.assertEquals("Failed to initialize LoadTestTool", 0, ret); Assert.assertEquals("Failed to initialize LoadTestTool", 0, ret);
} }
@ -92,7 +89,7 @@ public class IntegrationTestIngest extends IntegrationTestBase {
@Override @Override
public String getTablename() { public String getTablename() {
return tableName; return this.getClass().getSimpleName();
} }
@Override @Override
@ -101,8 +98,8 @@ public class IntegrationTestIngest extends IntegrationTestBase {
} }
private void deleteTableIfNecessary() throws IOException { private void deleteTableIfNecessary() throws IOException {
if (util.getHBaseAdmin().tableExists(tableName)) { if (util.getHBaseAdmin().tableExists(getTablename())) {
util.deleteTable(Bytes.toBytes(tableName)); util.deleteTable(Bytes.toBytes(getTablename()));
} }
} }
@ -122,7 +119,7 @@ public class IntegrationTestIngest extends IntegrationTestBase {
((runtime - (System.currentTimeMillis() - start))/60000) + " min"); ((runtime - (System.currentTimeMillis() - start))/60000) + " min");
int ret = loadTool.run(new String[] { int ret = loadTool.run(new String[] {
"-tn", tableName, "-tn", getTablename(),
"-write", String.format("%d:%d:%d", colsPerKey, recordSize, writeThreads), "-write", String.format("%d:%d:%d", colsPerKey, recordSize, writeThreads),
"-start_key", String.valueOf(startKey), "-start_key", String.valueOf(startKey),
"-num_keys", String.valueOf(numKeys), "-num_keys", String.valueOf(numKeys),
@ -135,7 +132,7 @@ public class IntegrationTestIngest extends IntegrationTestBase {
} }
ret = loadTool.run(new String[] { ret = loadTool.run(new String[] {
"-tn", tableName, "-tn", getTablename(),
"-update", String.format("60:%d", writeThreads), "-update", String.format("60:%d", writeThreads),
"-start_key", String.valueOf(startKey), "-start_key", String.valueOf(startKey),
"-num_keys", String.valueOf(numKeys), "-num_keys", String.valueOf(numKeys),
@ -148,7 +145,7 @@ public class IntegrationTestIngest extends IntegrationTestBase {
} }
ret = loadTool.run(new String[] { ret = loadTool.run(new String[] {
"-tn", tableName, "-tn", getTablename(),
"-read", "100:20", "-read", "100:20",
"-start_key", String.valueOf(startKey), "-start_key", String.valueOf(startKey),
"-num_keys", String.valueOf(numKeys), "-num_keys", String.valueOf(numKeys),

View File

@ -139,6 +139,10 @@ public class PolicyBasedChaosMonkey extends ChaosMonkey {
@Override @Override
public void stop(String why) { public void stop(String why) {
if (policies == null) {
return;
}
for (Policy policy : policies) { for (Policy policy : policies) {
policy.stop(why); policy.stop(why);
} }
@ -155,6 +159,9 @@ public class PolicyBasedChaosMonkey extends ChaosMonkey {
*/ */
@Override @Override
public void waitForStop() throws InterruptedException { public void waitForStop() throws InterruptedException {
if (monkeyThreads == null) {
return;
}
for (Thread monkeyThread : monkeyThreads) { for (Thread monkeyThread : monkeyThreads) {
monkeyThread.join(); monkeyThread.join();
} }

View File

@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl; import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
@ -494,6 +495,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", false); job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", false);
TableMapReduceUtil.addDependencyJars(job); TableMapReduceUtil.addDependencyJars(job);
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
TableMapReduceUtil.initCredentials(job); TableMapReduceUtil.initCredentials(job);
boolean success = job.waitForCompletion(true); boolean success = job.waitForCompletion(true);
@ -637,6 +639,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
TableMapReduceUtil.initTableMapperJob(getTableName(getConf()).getName(), scan, TableMapReduceUtil.initTableMapperJob(getTableName(getConf()).getName(), scan,
VerifyMapper.class, BytesWritable.class, BytesWritable.class, job); VerifyMapper.class, BytesWritable.class, BytesWritable.class, job);
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", false); job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", false);

View File

@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.NMapInputFormat; import org.apache.hadoop.hbase.mapreduce.NMapInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
@ -311,8 +312,8 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase {
FileOutputFormat.setOutputPath(job, outputDir); FileOutputFormat.setOutputPath(job, outputDir);
TableMapReduceUtil.addDependencyJars(job); TableMapReduceUtil.addDependencyJars(job);
TableMapReduceUtil.addDependencyJars(
job.getConfiguration(), HTable.class, Lists.class); TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
TableMapReduceUtil.initCredentials(job); TableMapReduceUtil.initCredentials(job);
assertTrue(job.waitForCompletion(true)); assertTrue(job.waitForCompletion(true));
} }
@ -329,6 +330,7 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase {
TableMapReduceUtil.initTableMapperJob( TableMapReduceUtil.initTableMapperJob(
htd.getTableName().getNameAsString(), scan, VerifyMapper.class, htd.getTableName().getNameAsString(), scan, VerifyMapper.class,
BytesWritable.class, BytesWritable.class, job); BytesWritable.class, BytesWritable.class, job);
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING); int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING);
TableMapReduceUtil.setScannerCaching(job, scannerCaching); TableMapReduceUtil.setScannerCaching(job, scannerCaching);