SOLR-11062: new tag "diskType" in autoscaling policy

This commit is contained in:
Noble Paul 2018-01-09 22:58:10 +11:00
parent 74128cf695
commit 6336ed46f9
5 changed files with 43 additions and 6 deletions

View File

@ -80,6 +80,8 @@ New Features
rules that sets the time interval for each collection. An internal Overseer command "ROUTEDALIAS_CREATECOLL"
was created to facilitate this. (David Smiley)
* SOLR-11062: new tag "diskType" in autoscaling policy (noble)
Bug Fixes
----------------------

View File

@ -20,11 +20,14 @@ import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
import com.google.common.collect.ImmutableSet;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.client.solrj.SolrRequest;
@ -227,8 +230,9 @@ public class TestPolicyCloud extends SolrCloudTestCase {
for (String tag : tags) {
assertNotNull( "missing : "+ tag , val.get(tag));
}
val = provider.getNodeStateProvider().getNodeValues(collection.getReplicas().get(0).getNodeName(), Collections.singleton("diskType"));
Set<String> diskTypes = ImmutableSet.of("rotational", "ssd");
assertTrue(diskTypes.contains(val.get("diskType")));
}
public void testCreateCollectionAddShardWithReplicaTypeUsingPolicy() throws Exception {

View File

@ -18,9 +18,11 @@
package org.apache.solr.client.solrj.cloud.autoscaling;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -34,6 +36,7 @@ import org.apache.solr.common.cloud.rule.ImplicitSnitch;
import org.apache.solr.common.util.Pair;
import org.apache.solr.common.util.StrUtils;
import static java.util.Collections.unmodifiableSet;
import static org.apache.solr.client.solrj.cloud.autoscaling.Policy.ANY;
import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
@ -224,7 +227,13 @@ public class Suggestion {
public void getSuggestions(SuggestionCtx ctx) {
perNodeSuggestions(ctx);
}
},;
},
DISKTYPE(ImplicitSnitch.DISKTYPE, String.class, unmodifiableSet(new HashSet(Arrays.asList("ssd", "rotational"))), null, null, null) {
@Override
public void getSuggestions(SuggestionCtx ctx) {
perNodeSuggestions(ctx);
}
};
final Class type;
final Set<String> vals;

View File

@ -27,6 +27,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.solr.client.solrj.SolrRequest;
@ -142,14 +143,19 @@ public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter
return result;
}
static void fetchMetrics(String solrNode, ClientSnitchCtx ctx, Map<String, String> metricsKeyVsTag) {
static void fetchMetrics(String solrNode, ClientSnitchCtx ctx, Map<String, Object> metricsKeyVsTag) {
ModifiableSolrParams params = new ModifiableSolrParams();
params.add("key", metricsKeyVsTag.keySet().toArray(new String[metricsKeyVsTag.size()]));
try {
SimpleSolrResponse rsp = ctx.invoke(solrNode, CommonParams.METRICS_PATH, params);
metricsKeyVsTag.forEach((key, tag) -> {
Object v = Utils.getObjectByPath(rsp.nl, true, Arrays.asList("metrics", key));
if (v != null) ctx.getTags().put(tag, v);
if (tag instanceof Function) {
Pair<String, Object> p = (Pair<String, Object>) ((Function) tag).apply(v);
ctx.getTags().put(p.first(), p.second());
} else {
if (v != null) ctx.getTags().put(tag.toString(), v);
}
});
} catch (Exception e) {
log.warn("could not get tags from node " + solrNode, e);
@ -166,7 +172,7 @@ public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter
@Override
protected void getRemoteInfo(String solrNode, Set<String> requestedTags, SnitchContext ctx) {
ClientSnitchCtx snitchContext = (ClientSnitchCtx) ctx;
Map<String, String> metricsKeyVsTag = new HashMap<>();
Map<String, Object> metricsKeyVsTag = new HashMap<>();
for (String tag : requestedTags) {
if (tag.startsWith(SYSPROP)) {
metricsKeyVsTag.put("solr.jvm:system.properties:" + tag.substring(SYSPROP.length()), tag);
@ -174,6 +180,21 @@ public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter
metricsKeyVsTag.put(tag.substring(METRICS_PREFIX.length()), tag);
}
}
if (requestedTags.contains(ImplicitSnitch.DISKTYPE)) {
metricsKeyVsTag.put("solr.node:CONTAINER.fs.coreRoot.spins", new Function<Object, Pair<String,Object>>() {
@Override
public Pair<String, Object> apply(Object o) {
if("true".equals(String.valueOf(o))){
return new Pair<>(ImplicitSnitch.DISKTYPE, "rotational");
}
if("false".equals(String.valueOf(o))){
return new Pair<>(ImplicitSnitch.DISKTYPE, "ssd");
}
return new Pair<>(ImplicitSnitch.DISKTYPE,null);
}
});
}
if (!metricsKeyVsTag.isEmpty()) {
fetchMetrics(solrNode, snitchContext, metricsKeyVsTag);
}

View File

@ -52,6 +52,7 @@ public class ImplicitSnitch extends Snitch {
public static final String SYSPROP = "sysprop.";
public static final String SYSLOADAVG = "sysLoadAvg";
public static final String HEAPUSAGE = "heapUsage";
public static final String DISKTYPE = "diskType";
public static final List<String> IP_SNITCHES = Collections.unmodifiableList(Arrays.asList("ip_1", "ip_2", "ip_3", "ip_4"));
public static final Set<String> tags = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(NODE, PORT, HOST, CORES, DISK, ROLE, "ip_1", "ip_2", "ip_3", "ip_4")));