mirror of https://github.com/apache/lucene.git
SOLR-11063: Suggesters should accept required freedisk as a hint
This commit is contained in:
parent
259a3df8a7
commit
fe86ab982d
|
@ -82,6 +82,8 @@ New Features
|
||||||
|
|
||||||
* SOLR-11062: new tag "diskType" in autoscaling policy (noble)
|
* SOLR-11062: new tag "diskType" in autoscaling policy (noble)
|
||||||
|
|
||||||
|
* SOLR-11063: Suggesters should accept required freedisk as a hint (noble)
|
||||||
|
|
||||||
Bug Fixes
|
Bug Fixes
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
|
|
@ -49,8 +49,7 @@ class AddReplicaSuggester extends Suggester {
|
||||||
Integer targetNodeIndex = null;
|
Integer targetNodeIndex = null;
|
||||||
for (int i = getMatrix().size() - 1; i >= 0; i--) {
|
for (int i = getMatrix().size() - 1; i >= 0; i--) {
|
||||||
Row row = getMatrix().get(i);
|
Row row = getMatrix().get(i);
|
||||||
if (!row.isLive) continue;
|
if (!isNodeSuitable(row)) continue;
|
||||||
if (!isAllowed(row.node, Hint.TARGET_NODE)) continue;
|
|
||||||
Row tmpRow = row.addReplica(shard.first(), shard.second(), type);
|
Row tmpRow = row.addReplica(shard.first(), shard.second(), type);
|
||||||
|
|
||||||
List<Violation> errs = testChangedMatrix(strict, getModifiedMatrix(getMatrix(), tmpRow, i));
|
List<Violation> errs = testChangedMatrix(strict, getModifiedMatrix(getMatrix(), tmpRow, i));
|
||||||
|
|
|
@ -57,8 +57,7 @@ public class MoveReplicaSuggester extends Suggester {
|
||||||
for (int j = getMatrix().size() - 1; j >= stopAt; j--) {
|
for (int j = getMatrix().size() - 1; j >= stopAt; j--) {
|
||||||
if (j == i) continue;
|
if (j == i) continue;
|
||||||
Row targetRow = getMatrix().get(j);
|
Row targetRow = getMatrix().get(j);
|
||||||
if(!targetRow.isLive) continue;
|
if (!isNodeSuitable(targetRow)) continue;
|
||||||
if (!isAllowed(targetRow.node, Hint.TARGET_NODE)) continue;
|
|
||||||
targetRow = targetRow.addReplica(coll, shard, replicaInfo.getType());
|
targetRow = targetRow.addReplica(coll, shard, replicaInfo.getType());
|
||||||
List<Violation> errs = testChangedMatrix(strict, getModifiedMatrix(getModifiedMatrix(getMatrix(), srcTmpRow, i), targetRow, j));
|
List<Violation> errs = testChangedMatrix(strict, getModifiedMatrix(getModifiedMatrix(getMatrix(), srcTmpRow, i), targetRow, j));
|
||||||
if (!containsNewErrors(errs) && isLessSerious(errs, leastSeriousViolation) &&
|
if (!containsNewErrors(errs) && isLessSerious(errs, leastSeriousViolation) &&
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.solr.client.solrj.impl.ClusterStateProvider;
|
||||||
import org.apache.solr.common.IteratorWriter;
|
import org.apache.solr.common.IteratorWriter;
|
||||||
import org.apache.solr.common.MapWriter;
|
import org.apache.solr.common.MapWriter;
|
||||||
import org.apache.solr.common.cloud.ClusterState;
|
import org.apache.solr.common.cloud.ClusterState;
|
||||||
|
import org.apache.solr.common.cloud.rule.ImplicitSnitch;
|
||||||
import org.apache.solr.common.params.CollectionParams.CollectionAction;
|
import org.apache.solr.common.params.CollectionParams.CollectionAction;
|
||||||
import org.apache.solr.common.util.StrUtils;
|
import org.apache.solr.common.util.StrUtils;
|
||||||
import org.apache.solr.common.util.Utils;
|
import org.apache.solr.common.util.Utils;
|
||||||
|
@ -94,12 +95,9 @@ public class Policy implements MapWriter {
|
||||||
}
|
}
|
||||||
this.clusterPreferences = Collections.unmodifiableList(initialClusterPreferences);
|
this.clusterPreferences = Collections.unmodifiableList(initialClusterPreferences);
|
||||||
final SortedSet<String> paramsOfInterest = new TreeSet<>();
|
final SortedSet<String> paramsOfInterest = new TreeSet<>();
|
||||||
for (Preference preference : clusterPreferences) {
|
paramsOfInterest.add(ImplicitSnitch.DISK);//always get freedisk anyway.
|
||||||
if (paramsOfInterest.contains(preference.name.name())) {
|
paramsOfInterest.add(ImplicitSnitch.CORES);//always get cores anyway.
|
||||||
throw new RuntimeException(preference.name + " is repeated");
|
clusterPreferences.forEach(preference -> paramsOfInterest.add(preference.name.toString()));
|
||||||
}
|
|
||||||
paramsOfInterest.add(preference.name.toString());
|
|
||||||
}
|
|
||||||
List<String> newParams = new ArrayList<>(paramsOfInterest);
|
List<String> newParams = new ArrayList<>(paramsOfInterest);
|
||||||
clusterPolicy = ((List<Map<String, Object>>) jsonMap.getOrDefault(CLUSTER_POLICY, emptyList())).stream()
|
clusterPolicy = ((List<Map<String, Object>>) jsonMap.getOrDefault(CLUSTER_POLICY, emptyList())).stream()
|
||||||
.map(Clause::new)
|
.map(Clause::new)
|
||||||
|
|
|
@ -29,14 +29,18 @@ import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import org.apache.solr.client.solrj.SolrRequest;
|
import org.apache.solr.client.solrj.SolrRequest;
|
||||||
import org.apache.solr.client.solrj.impl.ClusterStateProvider;
|
import org.apache.solr.client.solrj.impl.ClusterStateProvider;
|
||||||
import org.apache.solr.common.MapWriter;
|
import org.apache.solr.common.MapWriter;
|
||||||
import org.apache.solr.common.cloud.Replica;
|
import org.apache.solr.common.cloud.Replica;
|
||||||
|
import org.apache.solr.common.cloud.rule.ImplicitSnitch;
|
||||||
import org.apache.solr.common.util.Pair;
|
import org.apache.solr.common.util.Pair;
|
||||||
import org.apache.solr.common.util.Utils;
|
import org.apache.solr.common.util.Utils;
|
||||||
|
|
||||||
|
import static org.apache.solr.client.solrj.cloud.autoscaling.Suggestion.ConditionType.FREEDISK;
|
||||||
|
|
||||||
/* A suggester is capable of suggesting a collection operation
|
/* A suggester is capable of suggesting a collection operation
|
||||||
* given a particular session. Before it suggests a new operation,
|
* given a particular session. Before it suggests a new operation,
|
||||||
* it ensures that ,
|
* it ensures that ,
|
||||||
|
@ -76,6 +80,13 @@ public abstract class Suggester implements MapWriter {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected boolean isNodeSuitable(Row row) {
|
||||||
|
if (!row.isLive) return false;
|
||||||
|
if (!isAllowed(row.node, Hint.TARGET_NODE)) return false;
|
||||||
|
if (!isAllowed(row.getVal(ImplicitSnitch.DISK), Hint.MINFREEDISK)) return false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
abstract SolrRequest init();
|
abstract SolrRequest init();
|
||||||
|
|
||||||
|
|
||||||
|
@ -227,11 +238,12 @@ public abstract class Suggester implements MapWriter {
|
||||||
|
|
||||||
protected boolean isAllowed(Object v, Hint hint) {
|
protected boolean isAllowed(Object v, Hint hint) {
|
||||||
Object hintVal = hints.get(hint);
|
Object hintVal = hints.get(hint);
|
||||||
|
if (hintVal == null) return true;
|
||||||
if (hint.multiValued) {
|
if (hint.multiValued) {
|
||||||
Set set = (Set) hintVal;
|
Set set = (Set) hintVal;
|
||||||
return set == null || set.contains(v);
|
return set == null || set.contains(v);
|
||||||
} else {
|
} else {
|
||||||
return hintVal == null || Objects.equals(v, hintVal);
|
return hintVal == null || hint.valueValidator.test(new Pair<>(hintVal, v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -258,10 +270,20 @@ public abstract class Suggester implements MapWriter {
|
||||||
if (!(o instanceof Replica.Type)) {
|
if (!(o instanceof Replica.Type)) {
|
||||||
throw new RuntimeException("REPLICATYPE hint must use a ReplicaType");
|
throw new RuntimeException("REPLICATYPE hint must use a ReplicaType");
|
||||||
}
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
|
MINFREEDISK(false, o -> {
|
||||||
|
if (!(o instanceof Number)) throw new RuntimeException("MINFREEDISK hint must be a number");
|
||||||
|
}, hintValVsActual -> {
|
||||||
|
Double hintFreediskInGb = (Double) FREEDISK.validate(null, hintValVsActual.first(), false);
|
||||||
|
Double actualFreediskInGb = (Double) FREEDISK.validate(null, hintValVsActual.second(), false);
|
||||||
|
if(actualFreediskInGb == null) return false;
|
||||||
|
return actualFreediskInGb > hintFreediskInGb;
|
||||||
});
|
});
|
||||||
|
|
||||||
public final boolean multiValued;
|
public final boolean multiValued;
|
||||||
public final Consumer<Object> validator;
|
public final Consumer<Object> validator;
|
||||||
|
public final Predicate<Pair<Object, Object>> valueValidator;
|
||||||
|
|
||||||
Hint(boolean multiValued) {
|
Hint(boolean multiValued) {
|
||||||
this(multiValued, v -> {
|
this(multiValued, v -> {
|
||||||
|
@ -273,12 +295,20 @@ public abstract class Suggester implements MapWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
Hint(boolean multiValued, Consumer<Object> c) {
|
Hint(boolean multiValued, Consumer<Object> c) {
|
||||||
this.multiValued = multiValued;
|
this(multiValued, c, equalsPredicate);
|
||||||
this.validator = c;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Hint(boolean multiValued, Consumer<Object> c, Predicate<Pair<Object, Object>> testval) {
|
||||||
|
this.multiValued = multiValued;
|
||||||
|
this.validator = c;
|
||||||
|
this.valueValidator = testval;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Predicate<Pair<Object, Object>> equalsPredicate = valPair -> Objects.equals(valPair.first(), valPair.second());
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return jsonStr();
|
return jsonStr();
|
||||||
|
|
|
@ -279,6 +279,7 @@ public class Suggestion {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Object validate(String name, Object val, boolean isRuleVal) {
|
public Object validate(String name, Object val, boolean isRuleVal) {
|
||||||
|
if (name == null) name = this.tagName;
|
||||||
if (type == Double.class) {
|
if (type == Double.class) {
|
||||||
Double num = Clause.parseDouble(name, val);
|
Double num = Clause.parseDouble(name, val);
|
||||||
if (isRuleVal) {
|
if (isRuleVal) {
|
||||||
|
|
|
@ -1620,4 +1620,46 @@ public class TestPolicy extends SolrTestCaseJ4 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testDiskSpaceHint() {
|
||||||
|
|
||||||
|
String dataproviderdata = "{" +
|
||||||
|
" liveNodes:[" +
|
||||||
|
" '127.0.0.1:51078_solr'," +
|
||||||
|
" '127.0.0.1:51147_solr']," +
|
||||||
|
" replicaInfo:{" +
|
||||||
|
" '127.0.0.1:51147_solr':{}," +
|
||||||
|
" '127.0.0.1:51078_solr':{testNodeAdded:{shard1:[" +
|
||||||
|
" { core_node3 : { type : NRT}}," +
|
||||||
|
" { core_node4 : { type : NRT}}]}}}," +
|
||||||
|
" nodeValues:{" +
|
||||||
|
" '127.0.0.1:51147_solr':{" +
|
||||||
|
" node:'127.0.0.1:51147_solr'," +
|
||||||
|
" cores:0," +
|
||||||
|
" freedisk : 100}," +
|
||||||
|
" '127.0.0.1:51078_solr':{" +
|
||||||
|
" node:'127.0.0.1:51078_solr'," +
|
||||||
|
" cores:2," +
|
||||||
|
" freedisk:200}}}";
|
||||||
|
|
||||||
|
String autoScalingjson = "cluster-preferences:[" +
|
||||||
|
" {minimize : cores}]" +
|
||||||
|
" cluster-policy:[{cores:'<10',node:'#ANY'}," +
|
||||||
|
" {replica:'<2', shard:'#EACH',node:'#ANY'}," +
|
||||||
|
" { nodeRole:overseer,replica:0}]}";
|
||||||
|
Policy policy = new Policy((Map<String, Object>) Utils.fromJSONString(autoScalingjson));
|
||||||
|
Policy.Session session = policy.createSession(cloudManagerWithData(dataproviderdata));
|
||||||
|
Suggester suggester = session.getSuggester(CollectionParams.CollectionAction.ADDREPLICA)
|
||||||
|
.hint(Hint.COLL_SHARD, new Pair<>("coll1", "shard1"))
|
||||||
|
.hint(Hint.MINFREEDISK, 150);
|
||||||
|
CollectionAdminRequest.AddReplica op = (CollectionAdminRequest.AddReplica) suggester.getSuggestion();
|
||||||
|
|
||||||
|
assertEquals("127.0.0.1:51078_solr" , op.getNode());
|
||||||
|
|
||||||
|
suggester = session.getSuggester(CollectionParams.CollectionAction.ADDREPLICA)
|
||||||
|
.hint(Hint.COLL_SHARD, new Pair<>("coll1", "shard1"));
|
||||||
|
op = (CollectionAdminRequest.AddReplica) suggester.getSuggestion();
|
||||||
|
|
||||||
|
assertEquals("127.0.0.1:51147_solr" , op.getNode());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue