mirror of https://github.com/apache/lucene.git
SOLR-4990: Beef up BasicHdfsTest and rename it to StressHdfsTest
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1504236 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7c85efaa53
commit
dbbc4261b7
|
@ -20,6 +20,9 @@ package org.apache.solr.cloud.hdfs;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
@ -27,11 +30,15 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServer;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.cloud.BasicDistributedZkTest;
|
||||
import org.apache.solr.common.params.CollectionParams.CollectionAction;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
|
@ -40,8 +47,9 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
|
|||
|
||||
@Slow
|
||||
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s)
|
||||
public class BasicHdfsTest extends BasicDistributedZkTest {
|
||||
public class StressHdfsTest extends BasicDistributedZkTest {
|
||||
|
||||
private static final String DELETE_DATA_DIR_COLLECTION = "delete_data_dir";
|
||||
private static MiniDFSCluster dfsCluster;
|
||||
|
||||
@BeforeClass
|
||||
|
@ -66,10 +74,10 @@ public class BasicHdfsTest extends BasicDistributedZkTest {
|
|||
return HdfsTestUtil.getDataDir(dfsCluster, dataDir);
|
||||
}
|
||||
|
||||
public BasicHdfsTest() {
|
||||
public StressHdfsTest() {
|
||||
super();
|
||||
sliceCount = 1;
|
||||
shardCount = 1;
|
||||
shardCount = TEST_NIGHTLY ? 13 : random().nextInt(3) + 1;
|
||||
}
|
||||
|
||||
protected String getSolrXml() {
|
||||
|
@ -78,29 +86,79 @@ public class BasicHdfsTest extends BasicDistributedZkTest {
|
|||
|
||||
@Override
|
||||
public void doTest() throws Exception {
|
||||
createCollection("delete_data_dir", 1, 1, 1);
|
||||
waitForRecoveriesToFinish("delete_data_dir", false);
|
||||
cloudClient.setDefaultCollection("delete_data_dir");
|
||||
cloudClient.getZkStateReader().updateClusterState(true);
|
||||
NamedList<Object> response = cloudClient.query(
|
||||
new SolrQuery().setRequestHandler("/admin/system")).getResponse();
|
||||
NamedList<Object> coreInfo = (NamedList<Object>) response.get("core");
|
||||
String dataDir = (String) ((NamedList<Object>) coreInfo.get("directory"))
|
||||
.get("data");
|
||||
int cnt = random().nextInt(2) + 1;
|
||||
for (int i = 0; i < cnt; i++) {
|
||||
createAndDeleteCollection();
|
||||
}
|
||||
}
|
||||
|
||||
private void createAndDeleteCollection() throws SolrServerException,
|
||||
IOException, Exception, KeeperException, InterruptedException,
|
||||
URISyntaxException {
|
||||
|
||||
boolean overshard = random().nextBoolean();
|
||||
if (overshard) {
|
||||
createCollection(DELETE_DATA_DIR_COLLECTION, shardCount * 2, 1, 2);
|
||||
} else {
|
||||
int rep = shardCount / 2;
|
||||
if (rep == 0) rep = 1;
|
||||
createCollection(DELETE_DATA_DIR_COLLECTION, rep, 2, 1);
|
||||
}
|
||||
|
||||
waitForRecoveriesToFinish(DELETE_DATA_DIR_COLLECTION, false);
|
||||
cloudClient.setDefaultCollection(DELETE_DATA_DIR_COLLECTION);
|
||||
cloudClient.getZkStateReader().updateClusterState(true);
|
||||
|
||||
|
||||
// collect the data dirs
|
||||
List<String> dataDirs = new ArrayList<String>();
|
||||
|
||||
int i = 0;
|
||||
for (SolrServer client : clients) {
|
||||
HttpSolrServer c = new HttpSolrServer(getBaseUrl(client) + "/delete_data_dir");
|
||||
c.add(getDoc("id", i++));
|
||||
if (random().nextBoolean()) c.add(getDoc("id", i++));
|
||||
if (random().nextBoolean()) c.add(getDoc("id", i++));
|
||||
if (random().nextBoolean()) {
|
||||
c.commit();
|
||||
} else {
|
||||
c.commit(true, true, true);
|
||||
}
|
||||
|
||||
c.query(new SolrQuery("id:" + i));
|
||||
c.setSoTimeout(30000);
|
||||
c.setConnectionTimeout(30000);
|
||||
NamedList<Object> response = c.query(
|
||||
new SolrQuery().setRequestHandler("/admin/system")).getResponse();
|
||||
NamedList<Object> coreInfo = (NamedList<Object>) response.get("core");
|
||||
String dataDir = (String) ((NamedList<Object>) coreInfo.get("directory"))
|
||||
.get("data");
|
||||
dataDirs.add(dataDir);
|
||||
c.shutdown();
|
||||
}
|
||||
|
||||
if (random().nextBoolean()) {
|
||||
cloudClient.deleteByQuery("*:*");
|
||||
cloudClient.commit();
|
||||
|
||||
assertEquals(0, cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound());
|
||||
}
|
||||
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
params.set("action", CollectionAction.DELETE.toString());
|
||||
params.set("name", "delete_data_dir");
|
||||
params.set("name", DELETE_DATA_DIR_COLLECTION);
|
||||
QueryRequest request = new QueryRequest(params);
|
||||
request.setPath("/admin/collections");
|
||||
cloudClient.request(request);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
conf.setBoolean("fs.hdfs.impl.disable.cache", true);
|
||||
FileSystem fs = FileSystem.newInstance(new URI(dataDir), conf);
|
||||
assertFalse(
|
||||
"Data directory exists after collection removal : "
|
||||
+ dataDir, fs.exists(new Path(dataDir)));
|
||||
fs.close();
|
||||
// check that all dirs are gone
|
||||
for (String dataDir : dataDirs) {
|
||||
Configuration conf = new Configuration();
|
||||
FileSystem fs = FileSystem.newInstance(new URI(dataDir), conf);
|
||||
assertFalse(
|
||||
"Data directory exists after collection removal : " + dataDir,
|
||||
fs.exists(new Path(dataDir)));
|
||||
fs.close();
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue