mirror of https://github.com/apache/lucene.git
SOLR-11813: Reuse a NodeStateProvider in a session
This commit is contained in:
parent
94a680c311
commit
8836fda95f
|
@ -217,19 +217,23 @@ public class Policy implements MapWriter {
|
|||
Set<String> collections = new HashSet<>();
|
||||
List<Clause> expandedClauses;
|
||||
List<Violation> violations = new ArrayList<>();
|
||||
final NodeStateProvider nodeStateProvider;
|
||||
final int znodeVersion;
|
||||
|
||||
private Session(List<String> nodes, SolrCloudManager cloudManager,
|
||||
List<Row> matrix, List<Clause> expandedClauses, int znodeVersion) {
|
||||
List<Row> matrix, List<Clause> expandedClauses, int znodeVersion, NodeStateProvider nodeStateProvider) {
|
||||
this.nodes = nodes;
|
||||
this.cloudManager = cloudManager;
|
||||
this.matrix = matrix;
|
||||
this.expandedClauses = expandedClauses;
|
||||
this.znodeVersion = znodeVersion;
|
||||
this.nodeStateProvider = nodeStateProvider;
|
||||
}
|
||||
|
||||
|
||||
Session(SolrCloudManager cloudManager) {
|
||||
ClusterState state = null;
|
||||
this.nodeStateProvider = cloudManager.getNodeStateProvider();
|
||||
try {
|
||||
state = cloudManager.getClusterStateProvider().getClusterState();
|
||||
LOG.trace("-- session created with cluster state: {}", state);
|
||||
|
@ -240,7 +244,7 @@ public class Policy implements MapWriter {
|
|||
this.nodes = new ArrayList<>(cloudManager.getClusterStateProvider().getLiveNodes());
|
||||
this.cloudManager = cloudManager;
|
||||
for (String node : nodes) {
|
||||
collections.addAll(cloudManager.getNodeStateProvider().getReplicaInfo(node, Collections.emptyList()).keySet());
|
||||
collections.addAll(nodeStateProvider.getReplicaInfo(node, Collections.emptyList()).keySet());
|
||||
}
|
||||
|
||||
expandedClauses = clusterPolicy.stream()
|
||||
|
@ -255,7 +259,7 @@ public class Policy implements MapWriter {
|
|||
Collections.sort(expandedClauses);
|
||||
|
||||
matrix = new ArrayList<>(nodes.size());
|
||||
for (String node : nodes) matrix.add(new Row(node, params, perReplicaAttributes,cloudManager));
|
||||
for (String node : nodes) matrix.add(new Row(node, params, perReplicaAttributes,this));
|
||||
applyRules();
|
||||
}
|
||||
|
||||
|
@ -272,7 +276,7 @@ public class Policy implements MapWriter {
|
|||
}
|
||||
|
||||
Session copy() {
|
||||
return new Session(nodes, cloudManager, getMatrixCopy(), expandedClauses, znodeVersion);
|
||||
return new Session(nodes, cloudManager, getMatrixCopy(), expandedClauses, znodeVersion, nodeStateProvider);
|
||||
}
|
||||
|
||||
List<Row> getMatrixCopy() {
|
||||
|
@ -328,6 +332,10 @@ public class Policy implements MapWriter {
|
|||
public List<Row> getSorted() {
|
||||
return Collections.unmodifiableList(matrix);
|
||||
}
|
||||
|
||||
public NodeStateProvider getNodeStateProvider() {
|
||||
return nodeStateProvider;
|
||||
}
|
||||
}
|
||||
|
||||
static void setApproxValuesAndSortNodes(List<Preference> clusterPreferences, List<Row> matrix) {
|
||||
|
|
|
@ -43,14 +43,13 @@ public class Row implements MapWriter {
|
|||
boolean anyValueMissing = false;
|
||||
boolean isLive = true;
|
||||
|
||||
public Row(String node, List<String> params, List<String> perReplicaAttributes, SolrCloudManager cloudManager) {
|
||||
NodeStateProvider nodeStateProvider = cloudManager.getNodeStateProvider();
|
||||
collectionVsShardVsReplicas = nodeStateProvider.getReplicaInfo(node, perReplicaAttributes);
|
||||
public Row(String node, List<String> params, List<String> perReplicaAttributes, Policy.Session session) {
|
||||
collectionVsShardVsReplicas = session.nodeStateProvider.getReplicaInfo(node, perReplicaAttributes);
|
||||
if (collectionVsShardVsReplicas == null) collectionVsShardVsReplicas = new HashMap<>();
|
||||
this.node = node;
|
||||
cells = new Cell[params.size()];
|
||||
isLive = cloudManager.getClusterStateProvider().getLiveNodes().contains(node);
|
||||
Map<String, Object> vals = isLive ? nodeStateProvider.getNodeValues(node, params) : Collections.emptyMap();
|
||||
isLive = session.cloudManager.getClusterStateProvider().getLiveNodes().contains(node);
|
||||
Map<String, Object> vals = isLive ? session.nodeStateProvider.getNodeValues(node, params) : Collections.emptyMap();
|
||||
for (int i = 0; i < params.size(); i++) {
|
||||
String s = params.get(i);
|
||||
cells[i] = new Cell(i, s, Clause.validate(s,vals.get(s), false));
|
||||
|
|
|
@ -44,7 +44,7 @@ import org.apache.solr.common.util.Utils;
|
|||
* b) it causes no new violations
|
||||
*
|
||||
*/
|
||||
public abstract class Suggester {
|
||||
public abstract class Suggester implements MapWriter {
|
||||
protected final EnumMap<Hint, Object> hints = new EnumMap<>(Hint.class);
|
||||
Policy.Session session;
|
||||
SolrRequest operation;
|
||||
|
@ -105,7 +105,7 @@ public abstract class Suggester {
|
|||
// the source node is dead so live nodes may not have it
|
||||
for (String srcNode : srcNodes) {
|
||||
if(session.matrix.stream().noneMatch(row -> row.node.equals(srcNode)))
|
||||
session.matrix.add(new Row(srcNode, session.getPolicy().params, session.getPolicy().perReplicaAttributes, session.cloudManager));
|
||||
session.matrix.add(new Row(srcNode, session.getPolicy().params, session.getPolicy().perReplicaAttributes, session));
|
||||
}
|
||||
}
|
||||
session.applyRules();
|
||||
|
@ -279,5 +279,13 @@ public abstract class Suggester {
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return jsonStr();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeMap(EntryWriter ew) throws IOException {
|
||||
ew.put("hints", (MapWriter) ew1 -> hints.forEach((hint, o) -> ew1.putNoEx(hint.toString(), o)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,8 @@ import java.util.LinkedHashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.solr.common.util.Utils;
|
||||
|
||||
/**
|
||||
* Use this class to push all entries of a Map into an output.
|
||||
* This avoids creating map instances and is supposed to be memory efficient.
|
||||
|
@ -31,6 +33,9 @@ import java.util.Map;
|
|||
*/
|
||||
public interface MapWriter extends MapSerializable {
|
||||
|
||||
default String jsonStr(){
|
||||
return Utils.toJSONString(this);
|
||||
}
|
||||
@Override
|
||||
default Map toMap(Map<String, Object> map) {
|
||||
try {
|
||||
|
@ -83,6 +88,14 @@ public interface MapWriter extends MapSerializable {
|
|||
* @param v The value can be any supported object
|
||||
*/
|
||||
EntryWriter put(String k, Object v) throws IOException;
|
||||
default EntryWriter putNoEx(String k, Object v) {
|
||||
try {
|
||||
put(k,v);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
default EntryWriter putIfNotNull(String k, Object v) throws IOException {
|
||||
if(v != null) put(k,v);
|
||||
|
|
|
@ -16,13 +16,16 @@
|
|||
*/
|
||||
package org.apache.solr.common.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.solr.common.MapWriter;
|
||||
|
||||
import static org.apache.solr.common.util.Utils.makeMap;
|
||||
import static org.apache.solr.common.util.Utils.toJSONString;
|
||||
|
||||
public class Pair<T1, T2> implements Serializable {
|
||||
public class Pair<T1, T2> implements Serializable, MapWriter {
|
||||
private final T1 first;
|
||||
private final T2 second;
|
||||
|
||||
|
@ -56,4 +59,10 @@ public class Pair<T1, T2> implements Serializable {
|
|||
return Objects.hash(first, second);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeMap(EntryWriter ew) throws IOException {
|
||||
ew.put("first", first);
|
||||
ew.put("second", second);
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue