mirror of https://github.com/apache/lucene.git
merge trunk
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5493@1574967 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
a49ce84898
|
@ -16,6 +16,7 @@
|
||||||
<orderEntry type="library" scope="TEST" name="HSQLDB" level="project" />
|
<orderEntry type="library" scope="TEST" name="HSQLDB" level="project" />
|
||||||
<orderEntry type="library" scope="TEST" name="Derby" level="project" />
|
<orderEntry type="library" scope="TEST" name="Derby" level="project" />
|
||||||
<orderEntry type="library" scope="TEST" name="Solr DIH test library" level="project" />
|
<orderEntry type="library" scope="TEST" name="Solr DIH test library" level="project" />
|
||||||
|
<orderEntry type="library" scope="TEST" name="Solr example library" level="project" />
|
||||||
<orderEntry type="library" name="Solr core library" level="project" />
|
<orderEntry type="library" name="Solr core library" level="project" />
|
||||||
<orderEntry type="library" name="Solrj library" level="project" />
|
<orderEntry type="library" name="Solrj library" level="project" />
|
||||||
<orderEntry type="library" name="Solr DIH library" level="project" />
|
<orderEntry type="library" name="Solr DIH library" level="project" />
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
<orderEntry type="library" name="Solr morphlines core library" level="project" />
|
<orderEntry type="library" name="Solr morphlines core library" level="project" />
|
||||||
<orderEntry type="library" name="Solr morphlines cell library" level="project" />
|
<orderEntry type="library" name="Solr morphlines cell library" level="project" />
|
||||||
<orderEntry type="library" scope="TEST" name="Solr morphlines core test library" level="project" />
|
<orderEntry type="library" scope="TEST" name="Solr morphlines core test library" level="project" />
|
||||||
|
<orderEntry type="library" scope="TEST" name="Solr example library" level="project" />
|
||||||
<orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
|
<orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
|
||||||
<orderEntry type="module" scope="TEST" module-name="solr-test-framework" />
|
<orderEntry type="module" scope="TEST" module-name="solr-test-framework" />
|
||||||
<orderEntry type="module" module-name="solr-core" />
|
<orderEntry type="module" module-name="solr-core" />
|
||||||
|
|
|
@ -440,8 +440,10 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
|
||||||
if (dir instanceof BaseDirectoryWrapper) {
|
if (dir instanceof BaseDirectoryWrapper) {
|
||||||
((BaseDirectoryWrapper) dir).setCheckIndexOnClose(false); // don't double-checkIndex, we do it ourselves.
|
((BaseDirectoryWrapper) dir).setCheckIndexOnClose(false); // don't double-checkIndex, we do it ourselves.
|
||||||
}
|
}
|
||||||
|
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||||
|
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
|
||||||
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
new MockAnalyzer(random())).setInfoStream(new FailOnNonBulkMergesInfoStream());
|
analyzer).setInfoStream(new FailOnNonBulkMergesInfoStream());
|
||||||
|
|
||||||
if (LuceneTestCase.TEST_NIGHTLY) {
|
if (LuceneTestCase.TEST_NIGHTLY) {
|
||||||
// newIWConfig makes smallish max seg size, which
|
// newIWConfig makes smallish max seg size, which
|
||||||
|
|
|
@ -1627,7 +1627,7 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
|
||||||
if (!created)
|
if (!created)
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Could not fully createcollection: " + message.getStr("name"));
|
throw new SolrException(ErrorCode.SERVER_ERROR, "Could not fully createcollection: " + message.getStr("name"));
|
||||||
|
|
||||||
log.info("going to create cores replicas shardNames {} , repFactor : {}", shardNames, repFactor);
|
log.info("Creating SolrCores for new collection, shardNames {} , replicationFactor : {}", shardNames, repFactor);
|
||||||
Map<String ,ShardRequest> coresToCreate = new LinkedHashMap<String, ShardRequest>();
|
Map<String ,ShardRequest> coresToCreate = new LinkedHashMap<String, ShardRequest>();
|
||||||
for (int i = 1; i <= shardNames.size(); i++) {
|
for (int i = 1; i <= shardNames.size(); i++) {
|
||||||
String sliceName = shardNames.get(i-1);
|
String sliceName = shardNames.get(i-1);
|
||||||
|
@ -1671,14 +1671,17 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
|
||||||
sreq.actualShards = sreq.shards;
|
sreq.actualShards = sreq.shards;
|
||||||
sreq.params = params;
|
sreq.params = params;
|
||||||
|
|
||||||
if(isLegacyCloud) shardHandler.submit(sreq, sreq.shards[0], sreq.params);
|
if(isLegacyCloud) {
|
||||||
else coresToCreate.put(coreName, sreq);
|
shardHandler.submit(sreq, sreq.shards[0], sreq.params);
|
||||||
|
} else {
|
||||||
|
coresToCreate.put(coreName, sreq);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(!isLegacyCloud) {
|
if(!isLegacyCloud) {
|
||||||
//wait for all replica entries to be created
|
// wait for all replica entries to be created
|
||||||
Map<String, Replica> replicas = lookupReplicas(collectionName, coresToCreate.keySet());
|
Map<String, Replica> replicas = waitToSeeReplicasInState(collectionName, coresToCreate.keySet());
|
||||||
for (Map.Entry<String, ShardRequest> e : coresToCreate.entrySet()) {
|
for (Map.Entry<String, ShardRequest> e : coresToCreate.entrySet()) {
|
||||||
ShardRequest sreq = e.getValue();
|
ShardRequest sreq = e.getValue();
|
||||||
sreq.params.set(CoreAdminParams.CORE_NODE_NAME, replicas.get(e.getKey()).getName());
|
sreq.params.set(CoreAdminParams.CORE_NODE_NAME, replicas.get(e.getKey()).getName());
|
||||||
|
@ -1704,37 +1707,35 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, Replica> lookupReplicas(String collectionName, Collection<String> coreNames) throws InterruptedException {
|
private Map<String, Replica> waitToSeeReplicasInState(String collectionName, Collection<String> coreNames) throws InterruptedException {
|
||||||
Map<String, Replica> result = new HashMap<String, Replica>();
|
Map<String, Replica> result = new HashMap<String, Replica>();
|
||||||
long endTime = System.nanoTime() + TimeUnit.NANOSECONDS.convert(3, TimeUnit.SECONDS);
|
long endTime = System.nanoTime() + TimeUnit.NANOSECONDS.convert(30, TimeUnit.SECONDS);
|
||||||
for(;;) {
|
while (true) {
|
||||||
DocCollection coll = zkStateReader.getClusterState().getCollection(collectionName);
|
DocCollection coll = zkStateReader.getClusterState().getCollection(
|
||||||
for (String coreName : coreNames) {
|
collectionName);
|
||||||
if(result.containsKey(coreName)) continue;
|
for (String coreName : coreNames) {
|
||||||
|
if (result.containsKey(coreName)) continue;
|
||||||
for (Slice slice : coll.getSlices()) {
|
for (Slice slice : coll.getSlices()) {
|
||||||
for (Replica replica : slice.getReplicas()) {
|
for (Replica replica : slice.getReplicas()) {
|
||||||
if(coreName.equals(replica.getStr(ZkStateReader.CORE_NAME_PROP))) {
|
if (coreName.equals(replica.getStr(ZkStateReader.CORE_NAME_PROP))) {
|
||||||
result.put(coreName,replica);
|
result.put(coreName, replica);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(result.size() == coreNames.size()) {
|
if (result.size() == coreNames.size()) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
if( System.nanoTime() > endTime) {
|
if (System.nanoTime() > endTime) {
|
||||||
//time up . throw exception and go out
|
throw new SolrException(ErrorCode.SERVER_ERROR, "Timed out waiting to see all replicas in cluster state.");
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to create replica entries in ZK");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Thread.sleep(100);
|
Thread.sleep(100);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private void addReplica(ClusterState clusterState, ZkNodeProps message, NamedList results) throws KeeperException, InterruptedException {
|
private void addReplica(ClusterState clusterState, ZkNodeProps message, NamedList results) throws KeeperException, InterruptedException {
|
||||||
String collection = message.getStr(COLLECTION_PROP);
|
String collection = message.getStr(COLLECTION_PROP);
|
||||||
String node = message.getStr("node");
|
String node = message.getStr("node");
|
||||||
|
@ -1789,7 +1790,7 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
|
||||||
ZkStateReader.STATE_PROP, ZkStateReader.DOWN,
|
ZkStateReader.STATE_PROP, ZkStateReader.DOWN,
|
||||||
ZkStateReader.BASE_URL_PROP,zkStateReader.getBaseUrlForNodeName(node));
|
ZkStateReader.BASE_URL_PROP,zkStateReader.getBaseUrlForNodeName(node));
|
||||||
Overseer.getInQueue(zkStateReader.getZkClient()).offer(ZkStateReader.toJSON(props));
|
Overseer.getInQueue(zkStateReader.getZkClient()).offer(ZkStateReader.toJSON(props));
|
||||||
params.set(CoreAdminParams.CORE_NODE_NAME, lookupReplicas(collection, Collections.singletonList(coreName)).get(coreName).getName());
|
params.set(CoreAdminParams.CORE_NODE_NAME, waitToSeeReplicasInState(collection, Collections.singletonList(coreName)).get(coreName).getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -665,7 +665,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
|
||||||
boolean disableLegacy = random().nextBoolean();
|
boolean disableLegacy = random().nextBoolean();
|
||||||
CloudSolrServer client1 = null;
|
CloudSolrServer client1 = null;
|
||||||
|
|
||||||
if(disableLegacy) {
|
if (disableLegacy) {
|
||||||
log.info("legacyCloud=false");
|
log.info("legacyCloud=false");
|
||||||
client1 = createCloudClient(null);
|
client1 = createCloudClient(null);
|
||||||
setClusterProp(client1, ZkStateReader.LEGACY_CLOUD, "false");
|
setClusterProp(client1, ZkStateReader.LEGACY_CLOUD, "false");
|
||||||
|
|
|
@ -103,6 +103,8 @@ public class TestDistribDocBasedVersion extends AbstractFullDistribZkTestBase {
|
||||||
doTestDocVersions();
|
doTestDocVersions();
|
||||||
doTestHardFail();
|
doTestHardFail();
|
||||||
|
|
||||||
|
commit(); // work arround SOLR-5628
|
||||||
|
|
||||||
testFinished = true;
|
testFinished = true;
|
||||||
} finally {
|
} finally {
|
||||||
if (!testFinished) {
|
if (!testFinished) {
|
||||||
|
|
Loading…
Reference in New Issue