tests: harden this stress test

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1559614 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mark Robert Miller 2014-01-20 00:36:11 +00:00
parent 4ee5920ba2
commit d16dc6cf6d
1 changed files with 15 additions and 4 deletions

View File

@ -97,18 +97,29 @@ public class StressHdfsTest extends BasicDistributedZkTest {
URISyntaxException {
boolean overshard = random().nextBoolean();
int rep;
int nShards;
int maxReplicasPerNode;
if (overshard) {
createCollection(DELETE_DATA_DIR_COLLECTION, shardCount * 2, 1, 2);
nShards = shardCount * 2;
maxReplicasPerNode = 8;
rep = 2;
} else {
int rep = shardCount / 2;
if (rep == 0) rep = 1;
createCollection(DELETE_DATA_DIR_COLLECTION, rep, 2, 1);
nShards = shardCount / 2;
maxReplicasPerNode = 1;
rep = 2;
if (nShards == 0) nShards = 1;
}
createCollection(DELETE_DATA_DIR_COLLECTION, nShards, rep, maxReplicasPerNode);
waitForRecoveriesToFinish(DELETE_DATA_DIR_COLLECTION, false);
cloudClient.setDefaultCollection(DELETE_DATA_DIR_COLLECTION);
cloudClient.getZkStateReader().updateClusterState(true);
for (int i = 1; i < nShards + 1; i++) {
cloudClient.getZkStateReader().getLeaderRetry(DELETE_DATA_DIR_COLLECTION, "shard" + i, 15000);
}
// collect the data dirs
List<String> dataDirs = new ArrayList<String>();