SOLR-11813: Reuse a NodeStateProvider in a session

This commit is contained in:
Noble Paul 2018-01-04 19:45:59 +11:00
parent 94a680c311
commit 8836fda95f
5 changed files with 49 additions and 12 deletions

View File

@ -217,19 +217,23 @@ public class Policy implements MapWriter {
Set<String> collections = new HashSet<>(); Set<String> collections = new HashSet<>();
List<Clause> expandedClauses; List<Clause> expandedClauses;
List<Violation> violations = new ArrayList<>(); List<Violation> violations = new ArrayList<>();
final NodeStateProvider nodeStateProvider;
final int znodeVersion; final int znodeVersion;
private Session(List<String> nodes, SolrCloudManager cloudManager, private Session(List<String> nodes, SolrCloudManager cloudManager,
List<Row> matrix, List<Clause> expandedClauses, int znodeVersion) { List<Row> matrix, List<Clause> expandedClauses, int znodeVersion, NodeStateProvider nodeStateProvider) {
this.nodes = nodes; this.nodes = nodes;
this.cloudManager = cloudManager; this.cloudManager = cloudManager;
this.matrix = matrix; this.matrix = matrix;
this.expandedClauses = expandedClauses; this.expandedClauses = expandedClauses;
this.znodeVersion = znodeVersion; this.znodeVersion = znodeVersion;
this.nodeStateProvider = nodeStateProvider;
} }
Session(SolrCloudManager cloudManager) { Session(SolrCloudManager cloudManager) {
ClusterState state = null; ClusterState state = null;
this.nodeStateProvider = cloudManager.getNodeStateProvider();
try { try {
state = cloudManager.getClusterStateProvider().getClusterState(); state = cloudManager.getClusterStateProvider().getClusterState();
LOG.trace("-- session created with cluster state: {}", state); LOG.trace("-- session created with cluster state: {}", state);
@ -240,7 +244,7 @@ public class Policy implements MapWriter {
this.nodes = new ArrayList<>(cloudManager.getClusterStateProvider().getLiveNodes()); this.nodes = new ArrayList<>(cloudManager.getClusterStateProvider().getLiveNodes());
this.cloudManager = cloudManager; this.cloudManager = cloudManager;
for (String node : nodes) { for (String node : nodes) {
collections.addAll(cloudManager.getNodeStateProvider().getReplicaInfo(node, Collections.emptyList()).keySet()); collections.addAll(nodeStateProvider.getReplicaInfo(node, Collections.emptyList()).keySet());
} }
expandedClauses = clusterPolicy.stream() expandedClauses = clusterPolicy.stream()
@ -255,7 +259,7 @@ public class Policy implements MapWriter {
Collections.sort(expandedClauses); Collections.sort(expandedClauses);
matrix = new ArrayList<>(nodes.size()); matrix = new ArrayList<>(nodes.size());
for (String node : nodes) matrix.add(new Row(node, params, perReplicaAttributes,cloudManager)); for (String node : nodes) matrix.add(new Row(node, params, perReplicaAttributes,this));
applyRules(); applyRules();
} }
@ -272,7 +276,7 @@ public class Policy implements MapWriter {
} }
Session copy() { Session copy() {
return new Session(nodes, cloudManager, getMatrixCopy(), expandedClauses, znodeVersion); return new Session(nodes, cloudManager, getMatrixCopy(), expandedClauses, znodeVersion, nodeStateProvider);
} }
List<Row> getMatrixCopy() { List<Row> getMatrixCopy() {
@ -328,6 +332,10 @@ public class Policy implements MapWriter {
public List<Row> getSorted() { public List<Row> getSorted() {
return Collections.unmodifiableList(matrix); return Collections.unmodifiableList(matrix);
} }
public NodeStateProvider getNodeStateProvider() {
return nodeStateProvider;
}
} }
static void setApproxValuesAndSortNodes(List<Preference> clusterPreferences, List<Row> matrix) { static void setApproxValuesAndSortNodes(List<Preference> clusterPreferences, List<Row> matrix) {

View File

@ -43,14 +43,13 @@ public class Row implements MapWriter {
boolean anyValueMissing = false; boolean anyValueMissing = false;
boolean isLive = true; boolean isLive = true;
public Row(String node, List<String> params, List<String> perReplicaAttributes, SolrCloudManager cloudManager) { public Row(String node, List<String> params, List<String> perReplicaAttributes, Policy.Session session) {
NodeStateProvider nodeStateProvider = cloudManager.getNodeStateProvider(); collectionVsShardVsReplicas = session.nodeStateProvider.getReplicaInfo(node, perReplicaAttributes);
collectionVsShardVsReplicas = nodeStateProvider.getReplicaInfo(node, perReplicaAttributes);
if (collectionVsShardVsReplicas == null) collectionVsShardVsReplicas = new HashMap<>(); if (collectionVsShardVsReplicas == null) collectionVsShardVsReplicas = new HashMap<>();
this.node = node; this.node = node;
cells = new Cell[params.size()]; cells = new Cell[params.size()];
isLive = cloudManager.getClusterStateProvider().getLiveNodes().contains(node); isLive = session.cloudManager.getClusterStateProvider().getLiveNodes().contains(node);
Map<String, Object> vals = isLive ? nodeStateProvider.getNodeValues(node, params) : Collections.emptyMap(); Map<String, Object> vals = isLive ? session.nodeStateProvider.getNodeValues(node, params) : Collections.emptyMap();
for (int i = 0; i < params.size(); i++) { for (int i = 0; i < params.size(); i++) {
String s = params.get(i); String s = params.get(i);
cells[i] = new Cell(i, s, Clause.validate(s,vals.get(s), false)); cells[i] = new Cell(i, s, Clause.validate(s,vals.get(s), false));

View File

@ -44,7 +44,7 @@ import org.apache.solr.common.util.Utils;
* b) it causes no new violations * b) it causes no new violations
* *
*/ */
public abstract class Suggester { public abstract class Suggester implements MapWriter {
protected final EnumMap<Hint, Object> hints = new EnumMap<>(Hint.class); protected final EnumMap<Hint, Object> hints = new EnumMap<>(Hint.class);
Policy.Session session; Policy.Session session;
SolrRequest operation; SolrRequest operation;
@ -105,7 +105,7 @@ public abstract class Suggester {
// the source node is dead so live nodes may not have it // the source node is dead so live nodes may not have it
for (String srcNode : srcNodes) { for (String srcNode : srcNodes) {
if(session.matrix.stream().noneMatch(row -> row.node.equals(srcNode))) if(session.matrix.stream().noneMatch(row -> row.node.equals(srcNode)))
session.matrix.add(new Row(srcNode, session.getPolicy().params, session.getPolicy().perReplicaAttributes, session.cloudManager)); session.matrix.add(new Row(srcNode, session.getPolicy().params, session.getPolicy().perReplicaAttributes, session));
} }
} }
session.applyRules(); session.applyRules();
@ -279,5 +279,13 @@ public abstract class Suggester {
} }
@Override
public String toString() {
return jsonStr();
}
@Override
public void writeMap(EntryWriter ew) throws IOException {
ew.put("hints", (MapWriter) ew1 -> hints.forEach((hint, o) -> ew1.putNoEx(hint.toString(), o)));
}
} }

View File

@ -24,6 +24,8 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.solr.common.util.Utils;
/** /**
* Use this class to push all entries of a Map into an output. * Use this class to push all entries of a Map into an output.
* This avoids creating map instances and is supposed to be memory efficient. * This avoids creating map instances and is supposed to be memory efficient.
@ -31,6 +33,9 @@ import java.util.Map;
*/ */
public interface MapWriter extends MapSerializable { public interface MapWriter extends MapSerializable {
default String jsonStr(){
return Utils.toJSONString(this);
}
@Override @Override
default Map toMap(Map<String, Object> map) { default Map toMap(Map<String, Object> map) {
try { try {
@ -83,6 +88,14 @@ public interface MapWriter extends MapSerializable {
* @param v The value can be any supported object * @param v The value can be any supported object
*/ */
EntryWriter put(String k, Object v) throws IOException; EntryWriter put(String k, Object v) throws IOException;
default EntryWriter putNoEx(String k, Object v) {
try {
put(k,v);
} catch (IOException e) {
throw new RuntimeException(e);
}
return this;
}
default EntryWriter putIfNotNull(String k, Object v) throws IOException { default EntryWriter putIfNotNull(String k, Object v) throws IOException {
if(v != null) put(k,v); if(v != null) put(k,v);

View File

@ -16,13 +16,16 @@
*/ */
package org.apache.solr.common.util; package org.apache.solr.common.util;
import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.util.Objects; import java.util.Objects;
import org.apache.solr.common.MapWriter;
import static org.apache.solr.common.util.Utils.makeMap; import static org.apache.solr.common.util.Utils.makeMap;
import static org.apache.solr.common.util.Utils.toJSONString; import static org.apache.solr.common.util.Utils.toJSONString;
public class Pair<T1, T2> implements Serializable { public class Pair<T1, T2> implements Serializable, MapWriter {
private final T1 first; private final T1 first;
private final T2 second; private final T2 second;
@ -56,4 +59,10 @@ public class Pair<T1, T2> implements Serializable {
return Objects.hash(first, second); return Objects.hash(first, second);
} }
@Override
public void writeMap(EntryWriter ew) throws IOException {
ew.put("first", first);
ew.put("second", second);
}
} }