mirror of https://github.com/apache/lucene.git
SOLR-5525
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1547565 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
5416a09e73
commit
dab93e43cc
|
@ -464,7 +464,7 @@ public class Overseer {
|
||||||
//request new shardId
|
//request new shardId
|
||||||
if (collectionExists) {
|
if (collectionExists) {
|
||||||
// use existing numShards
|
// use existing numShards
|
||||||
numShards = state.getCollectionStates().get(collection).getSlices().size();
|
numShards = state.getCollection(collection).getSlices().size();
|
||||||
log.info("Collection already exists with " + ZkStateReader.NUM_SHARDS_PROP + "=" + numShards);
|
log.info("Collection already exists with " + ZkStateReader.NUM_SHARDS_PROP + "=" + numShards);
|
||||||
}
|
}
|
||||||
sliceName = Assign.assignShard(collection, state, numShards);
|
sliceName = Assign.assignShard(collection, state, numShards);
|
||||||
|
|
|
@ -1106,7 +1106,7 @@ public class OverseerCollectionProcessor implements Runnable, ClosableThread {
|
||||||
|
|
||||||
private void migrateKey(ClusterState clusterState, DocCollection sourceCollection, Slice sourceSlice, DocCollection targetCollection, Slice targetSlice, String splitKey, int timeout, NamedList results) throws KeeperException, InterruptedException {
|
private void migrateKey(ClusterState clusterState, DocCollection sourceCollection, Slice sourceSlice, DocCollection targetCollection, Slice targetSlice, String splitKey, int timeout, NamedList results) throws KeeperException, InterruptedException {
|
||||||
String tempSourceCollectionName = "split_" + sourceSlice.getName() + "_temp_" + targetSlice.getName();
|
String tempSourceCollectionName = "split_" + sourceSlice.getName() + "_temp_" + targetSlice.getName();
|
||||||
if (clusterState.getCollectionStates().containsKey(tempSourceCollectionName)) {
|
if (clusterState.hasCollection(tempSourceCollectionName)) {
|
||||||
log.info("Deleting temporary collection: " + tempSourceCollectionName);
|
log.info("Deleting temporary collection: " + tempSourceCollectionName);
|
||||||
Map<String, Object> props = ZkNodeProps.makeMap(
|
Map<String, Object> props = ZkNodeProps.makeMap(
|
||||||
QUEUE_OPERATION, DELETECOLLECTION,
|
QUEUE_OPERATION, DELETECOLLECTION,
|
||||||
|
|
|
@ -1009,10 +1009,11 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
|
||||||
|
|
||||||
private void collectStartTimes(String collectionName,
|
private void collectStartTimes(String collectionName,
|
||||||
Map<String,Long> urlToTime) throws SolrServerException, IOException {
|
Map<String,Long> urlToTime) throws SolrServerException, IOException {
|
||||||
Map<String,DocCollection> collections = getCommonCloudSolrServer().getZkStateReader()
|
ClusterState clusterState = getCommonCloudSolrServer().getZkStateReader()
|
||||||
.getClusterState().getCollectionStates();
|
.getClusterState();
|
||||||
if (collections.containsKey(collectionName)) {
|
// Map<String,DocCollection> collections = clusterState.getCollectionStates();
|
||||||
Map<String,Slice> slices = collections.get(collectionName).getSlicesMap();
|
if (clusterState.hasCollection(collectionName)) {
|
||||||
|
Map<String,Slice> slices = clusterState.getSlicesMap(collectionName);
|
||||||
|
|
||||||
Iterator<Entry<String,Slice>> it = slices.entrySet().iterator();
|
Iterator<Entry<String,Slice>> it = slices.entrySet().iterator();
|
||||||
while (it.hasNext()) {
|
while (it.hasNext()) {
|
||||||
|
@ -1036,13 +1037,13 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("Could not find collection in :"
|
throw new IllegalArgumentException("Could not find collection in :"
|
||||||
+ collections.keySet());
|
+ clusterState.getCollections());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getUrlFromZk(String collection) {
|
private String getUrlFromZk(String collection) {
|
||||||
ClusterState clusterState = getCommonCloudSolrServer().getZkStateReader().getClusterState();
|
ClusterState clusterState = getCommonCloudSolrServer().getZkStateReader().getClusterState();
|
||||||
Map<String,Slice> slices = clusterState.getCollectionStates().get(collection).getSlicesMap();
|
Map<String,Slice> slices = clusterState.getSlicesMap(collection);
|
||||||
|
|
||||||
if (slices == null) {
|
if (slices == null) {
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Could not find collection:" + collection);
|
throw new SolrException(ErrorCode.BAD_REQUEST, "Could not find collection:" + collection);
|
||||||
|
@ -1097,9 +1098,9 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
|
||||||
while (System.currentTimeMillis() < timeoutAt) {
|
while (System.currentTimeMillis() < timeoutAt) {
|
||||||
getCommonCloudSolrServer().getZkStateReader().updateClusterState(true);
|
getCommonCloudSolrServer().getZkStateReader().updateClusterState(true);
|
||||||
ClusterState clusterState = getCommonCloudSolrServer().getZkStateReader().getClusterState();
|
ClusterState clusterState = getCommonCloudSolrServer().getZkStateReader().getClusterState();
|
||||||
Map<String,DocCollection> collections = clusterState
|
// Map<String,DocCollection> collections = clusterState
|
||||||
.getCollectionStates();
|
// .getCollectionStates();
|
||||||
if (!collections.containsKey(collectionName)) {
|
if (! clusterState.hasCollection(collectionName)) {
|
||||||
found = false;
|
found = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -744,7 +744,8 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
||||||
ClusterState state = reader.getClusterState();
|
ClusterState state = reader.getClusterState();
|
||||||
|
|
||||||
int numFound = 0;
|
int numFound = 0;
|
||||||
for (DocCollection collection : state.getCollectionStates().values()) {
|
for (String c : state.getCollections()) {
|
||||||
|
DocCollection collection = state.getCollection(c);
|
||||||
for (Slice slice : collection.getSlices()) {
|
for (Slice slice : collection.getSlices()) {
|
||||||
if (slice.getReplicasMap().get("core_node1") != null) {
|
if (slice.getReplicasMap().get("core_node1") != null) {
|
||||||
numFound++;
|
numFound++;
|
||||||
|
|
|
@ -172,7 +172,7 @@ public class UnloadDistributedZkTest extends BasicDistributedZkTest {
|
||||||
|
|
||||||
zkStateReader.updateClusterState(true);
|
zkStateReader.updateClusterState(true);
|
||||||
|
|
||||||
int slices = zkStateReader.getClusterState().getCollectionStates().get("unloadcollection").getSlices().size();
|
int slices = zkStateReader.getClusterState().getCollection("unloadcollection").getSlices().size();
|
||||||
assertEquals(1, slices);
|
assertEquals(1, slices);
|
||||||
|
|
||||||
client = clients.get(1);
|
client = clients.get(1);
|
||||||
|
@ -187,7 +187,7 @@ public class UnloadDistributedZkTest extends BasicDistributedZkTest {
|
||||||
server.request(createCmd);
|
server.request(createCmd);
|
||||||
|
|
||||||
zkStateReader.updateClusterState(true);
|
zkStateReader.updateClusterState(true);
|
||||||
slices = zkStateReader.getClusterState().getCollectionStates().get("unloadcollection").getSlices().size();
|
slices = zkStateReader.getClusterState().getCollection("unloadcollection").getSlices().size();
|
||||||
assertEquals(1, slices);
|
assertEquals(1, slices);
|
||||||
|
|
||||||
waitForRecoveriesToFinish("unloadcollection", zkStateReader, false);
|
waitForRecoveriesToFinish("unloadcollection", zkStateReader, false);
|
||||||
|
|
|
@ -136,6 +136,10 @@ public class ClusterState implements JSONWriter.Writable {
|
||||||
return coll.getActiveSlices();
|
return coll.getActiveSlices();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public DocCollection getCollectionOrNull(String collection) {
|
||||||
|
return collectionStates.get(collection);
|
||||||
|
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Get the named DocCollection object, or throw an exception if it doesn't exist.
|
* Get the named DocCollection object, or throw an exception if it doesn't exist.
|
||||||
*/
|
*/
|
||||||
|
@ -156,6 +160,7 @@ public class ClusterState implements JSONWriter.Writable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Map<collectionName, Map<sliceName,Slice>>
|
* @return Map<collectionName, Map<sliceName,Slice>>
|
||||||
|
* @deprecated
|
||||||
*/
|
*/
|
||||||
public Map<String, DocCollection> getCollectionStates() {
|
public Map<String, DocCollection> getCollectionStates() {
|
||||||
return Collections.unmodifiableMap(collectionStates);
|
return Collections.unmodifiableMap(collectionStates);
|
||||||
|
|
|
@ -440,7 +440,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
||||||
/* Total number of replicas (number of cores serving an index to the collection) shown by the cluster state */
|
/* Total number of replicas (number of cores serving an index to the collection) shown by the cluster state */
|
||||||
protected int getTotalReplicas(String collection) {
|
protected int getTotalReplicas(String collection) {
|
||||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||||
DocCollection coll = zkStateReader.getClusterState().getCollectionStates().get(collection);
|
DocCollection coll = zkStateReader.getClusterState().getCollectionOrNull(collection);
|
||||||
if (coll == null) return 0; // support for when collection hasn't been created yet
|
if (coll == null) return 0; // support for when collection hasn't been created yet
|
||||||
int cnt = 0;
|
int cnt = 0;
|
||||||
for (Slice slices : coll.getSlices()) {
|
for (Slice slices : coll.getSlices()) {
|
||||||
|
@ -1690,10 +1690,10 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
||||||
int expectedShardsPerSlice = numShardsNumReplicaList.get(1);
|
int expectedShardsPerSlice = numShardsNumReplicaList.get(1);
|
||||||
int expectedTotalShards = expectedSlices * expectedShardsPerSlice;
|
int expectedTotalShards = expectedSlices * expectedShardsPerSlice;
|
||||||
|
|
||||||
Map<String,DocCollection> collections = clusterState
|
// Map<String,DocCollection> collections = clusterState
|
||||||
.getCollectionStates();
|
// .getCollectionStates();
|
||||||
if (collections.containsKey(collectionName)) {
|
if (clusterState.hasCollection(collectionName)) {
|
||||||
Map<String,Slice> slices = collections.get(collectionName).getSlicesMap();
|
Map<String,Slice> slices = clusterState.getCollection(collectionName).getSlicesMap();
|
||||||
// did we find expectedSlices slices/shards?
|
// did we find expectedSlices slices/shards?
|
||||||
if (slices.size() != expectedSlices) {
|
if (slices.size() != expectedSlices) {
|
||||||
return "Found new collection " + collectionName + ", but mismatch on number of slices. Expected: " + expectedSlices + ", actual: " + slices.size();
|
return "Found new collection " + collectionName + ", but mismatch on number of slices. Expected: " + expectedSlices + ", actual: " + slices.size();
|
||||||
|
@ -1758,7 +1758,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
||||||
return commondCloudSolrServer;
|
return commondCloudSolrServer;
|
||||||
}
|
}
|
||||||
public static String getUrlFromZk(ClusterState clusterState, String collection) {
|
public static String getUrlFromZk(ClusterState clusterState, String collection) {
|
||||||
Map<String,Slice> slices = clusterState.getCollectionStates().get(collection).getSlicesMap();
|
Map<String,Slice> slices = clusterState.getCollection(collection).getSlicesMap();
|
||||||
|
|
||||||
if (slices == null) {
|
if (slices == null) {
|
||||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Could not find collection:" + collection);
|
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Could not find collection:" + collection);
|
||||||
|
|
Loading…
Reference in New Issue