HBASE-9757 Reenable fast region move in SlowDeterministicMonkey

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1541811 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
jxiang 2013-11-14 04:37:33 +00:00
parent 5a7fa744f8
commit 5583310bb1
3 changed files with 10 additions and 30 deletions

View File

@ -60,7 +60,7 @@ public class ChangeEncodingAction extends Action {
LOG.debug("Performing action: Changing encodings on " + tableNameString);
// possible DataBlockEncoding id's
int[] possibleIds = {0, 2, 3, 4, 6};
int[] possibleIds = {0, 2, 3, 4/*, 6*/};
for (HColumnDescriptor descriptor : columnDescriptors) {
short id = (short) possibleIds[random.nextInt(possibleIds.length)];
descriptor.setDataBlockEncoding(DataBlockEncoding.getEncodingById(id));

View File

@ -55,7 +55,8 @@ public class SlowDeterministicMonkeyFactory extends MonkeyFactory {
new CompactTableAction(tableName, 0.5f),
new CompactRandomRegionOfTableAction(tableName, 0.6f),
new FlushTableAction(tableName),
new FlushRandomRegionOfTableAction(tableName)
new FlushRandomRegionOfTableAction(tableName),
new MoveRandomRegionOfTableAction(tableName)
};
// Actions such as split/merge/snapshot.
@ -73,13 +74,13 @@ public class SlowDeterministicMonkeyFactory extends MonkeyFactory {
// Destructive actions to mess things around.
Action[] actions3 = new Action[] {
new MoveRegionsOfTableAction(20000, tableName),
new MoveRandomRegionOfTableAction(20000, tableName),
new MoveRegionsOfTableAction(800, tableName),
new MoveRandomRegionOfTableAction(800, tableName),
new RestartRandomRsAction(60000),
new BatchRestartRsAction(60000, 0.5f),
new BatchRestartRsAction(5000, 0.5f),
new RestartActiveMasterAction(5000),
new RollingBatchRestartRsAction(5000, 1.0f),
new RestartRsHoldingMetaAction(35000),
new RestartRsHoldingMetaAction(35000)
};
// Action to log more info for debugging

View File

@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.chaos.monkies.CalmChaosMonkey;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HConnection;
@ -65,7 +64,6 @@ import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.HBaseFsck;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
@ -754,7 +752,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
}
}
protected boolean runVerify(String outputDir,
protected void runVerify(String outputDir,
int numReducers, long expectedNumNodes) throws Exception {
Path outputPath = new Path(outputDir);
UUID uuid = UUID.randomUUID(); //create a random UUID.
@ -768,19 +766,10 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
}
if (!verify.verify(expectedNumNodes)) {
try {
HBaseFsck fsck = new HBaseFsck(getConf());
HBaseFsck.setDisplayFullReport();
fsck.connect();
fsck.onlineHbck();
} catch (Throwable t) {
LOG.error("Failed to run hbck", t);
}
return false;
throw new RuntimeException("Verify.verify failed");
}
LOG.info("Verify finished with succees. Total nodes=" + expectedNumNodes);
return true;
}
@Override
@ -810,17 +799,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
runGenerator(numMappers, numNodes, outputDir, width, wrapMuplitplier);
expectedNumNodes += numMappers * numNodes;
if (!runVerify(outputDir, numReducers, expectedNumNodes)) {
if (it.monkey != null && !(it.monkey instanceof CalmChaosMonkey)) {
LOG.info("Verify.verify failed, let's stop CM and verify again");
it.cleanUpMonkey("Stop monkey before verify again after verify failed");
if (!runVerify(outputDir, numReducers, expectedNumNodes)) {
LOG.info("Verify.verify failed even without CM, verify one more");
runVerify(outputDir, numReducers, expectedNumNodes);
}
}
throw new RuntimeException("Verify.verify failed");
}
runVerify(outputDir, numReducers, expectedNumNodes);
}
return 0;