SOLR-10278: added classes to use Metrics to fetch node values such as cores freedisk

This commit is contained in:
Noble Paul 2017-04-04 22:25:08 +09:30
parent 3eb2321c88
commit 69acd5f98b
10 changed files with 118 additions and 41 deletions

View File

@ -0,0 +1,80 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.cloud.autoscaling;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.solr.client.solrj.response.SimpleSolrResponse;
import org.apache.solr.cloud.rule.ServerSnitchContext;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.rule.ImplicitSnitch;
import org.apache.solr.common.cloud.rule.SnitchContext;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.Utils;
//uses metrics API to get node information
public class AutoScalingSnitch extends ImplicitSnitch {
@Override
protected void getRemoteInfo(String solrNode, Set<String> requestedTags, SnitchContext ctx) {
ServerSnitchContext snitchContext = (ServerSnitchContext) ctx;
List<String> groups = new ArrayList<>();
List<String> prefixes = new ArrayList<>();
if (requestedTags.contains(DISK)) {
groups.add("solr.node");
prefixes.add("CONTAINER.fs.usableSpace");
}
if (requestedTags.contains(CORES)) {
groups.add("solr.core");
prefixes.add("CORE.coreName");
}
if(groups.isEmpty() || prefixes.isEmpty()) return;
ModifiableSolrParams params = new ModifiableSolrParams();
params.add("group", StrUtils.join(groups, ','));
params.add("prefix", StrUtils.join(prefixes,','));
try {
SimpleSolrResponse rsp = snitchContext.invoke(solrNode, CommonParams.METRICS_PATH, params);
Map m = rsp.nl.asMap(4);
if(requestedTags.contains(DISK)){
Number n = (Number) Utils.getObjectByPath(m,true, "metrics/solr.node/CONTAINER.fs.usableSpace");
if(n != null) ctx.getTags().put(DISK, n.longValue());
}
if(requestedTags.contains(CORES)){
int count = 0;
Map cores = (Map) m.get("metrics");
for (Object o : cores.keySet()) {
if(o.toString().startsWith("solr.core.")) count++;
}
ctx.getTags().put(CORES, count);
}
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "", e);
}
}
}

View File

@ -15,29 +15,36 @@
* limitations under the License.
*/
package org.apache.solr.recipe;
package org.apache.solr.cloud.autoscaling;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.solr.client.solrj.impl.CloudSolrClient.ClusterStateProvider;
import org.apache.solr.cloud.rule.ServerSnitchContext;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.recipe.Policy.ClusterDataProvider;
import org.apache.solr.recipe.Policy.ReplicaInfo;
public class SolrClientClusterDataProvider implements ClusterDataProvider {
public class ServerClusterDataProvider implements ClusterDataProvider {
private final ClusterStateProvider clusterStateProvider;
private final CoreContainer coreContainer;
private Set<String> liveNodes;
private Map<String,Object> snitchSession = new HashMap<>();
private final Map<String, Map<String, Map<String, List<ReplicaInfo>>>> data = new HashMap<>();
public SolrClientClusterDataProvider(ClusterStateProvider csp) {
this.clusterStateProvider = csp;
Map<String, ClusterState.CollectionRef> all = clusterStateProvider.getCollections();
public ServerClusterDataProvider(CoreContainer coreContainer) {
this.coreContainer = coreContainer;
ClusterState clusterState = coreContainer.getZkController().getZkStateReader().getClusterState();
this.liveNodes = clusterState.getLiveNodes();
Map<String, ClusterState.CollectionRef> all = clusterState.getCollectionStates();
all.forEach((collName, ref) -> {
DocCollection coll = ref.get();
if (coll == null) return;
@ -55,8 +62,10 @@ public class SolrClientClusterDataProvider implements ClusterDataProvider {
@Override
public Map<String, Object> getNodeValues(String node, Collection<String> keys) {
//todo
return new HashMap<>();
AutoScalingSnitch snitch = new AutoScalingSnitch();
ServerSnitchContext ctx = new ServerSnitchContext(null, node, snitchSession, coreContainer);
snitch.getRemoteInfo(node, new HashSet<>(keys), ctx);
return ctx.getTags();
}
@Override
@ -66,6 +75,6 @@ public class SolrClientClusterDataProvider implements ClusterDataProvider {
@Override
public Collection<String> getNodes() {
return clusterStateProvider.liveNodes();
return liveNodes;
}
}

View File

@ -74,13 +74,12 @@ public class ServerSnitchContext extends SnitchContext {
public void invokeRemote(String node, ModifiableSolrParams params, String klas, RemoteCallback callback) {
if (callback == null) callback = this;
String url = coreContainer.getZkController().getZkStateReader().getBaseUrlForNodeName(node);
params.add("class", klas);
params.add(ACTION, INVOKE.toString());
//todo batch all requests to the same server
try {
SimpleSolrResponse rsp = invoke(coreContainer.getUpdateShardHandler(), url, CommonParams.CORES_HANDLER_PATH, params);
SimpleSolrResponse rsp = invoke(node, CommonParams.CORES_HANDLER_PATH, params);
Map<String, Object> returnedVal = (Map<String, Object>) rsp.getResponse().get(klas);
if(exception == null){
// log this
@ -94,8 +93,10 @@ public class ServerSnitchContext extends SnitchContext {
}
}
public SimpleSolrResponse invoke(UpdateShardHandler shardHandler, final String url, String path, SolrParams params)
public SimpleSolrResponse invoke(String solrNode, String path, SolrParams params)
throws IOException, SolrServerException {
String url = coreContainer.getZkController().getZkStateReader().getBaseUrlForNodeName(solrNode);
UpdateShardHandler shardHandler = coreContainer.getUpdateShardHandler();
GenericSolrRequest request = new GenericSolrRequest(SolrRequest.METHOD.GET, path, params);
try (HttpSolrClient client = new HttpSolrClient.Builder(url).withHttpClient(shardHandler.getHttpClient())
.withResponseParser(new BinaryResponseParser()).build()) {

View File

@ -60,7 +60,6 @@ import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.ToleratedUpdateError;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.ClusterState.CollectionRef;
import org.apache.solr.common.cloud.CollectionStatePredicate;
import org.apache.solr.common.cloud.CollectionStateWatcher;
import org.apache.solr.common.cloud.DocCollection;
@ -1446,7 +1445,7 @@ public class CloudSolrClient extends SolrClient {
&& !cacheEntry.shoulRetry()) return col;
}
CollectionRef ref = getCollectionRef(collection);
ClusterState.CollectionRef ref = getCollectionRef(collection);
if (ref == null) {
//no such collection exists
return null;
@ -1481,7 +1480,7 @@ public class CloudSolrClient extends SolrClient {
}
}
CollectionRef getCollectionRef(String collection) {
ClusterState.CollectionRef getCollectionRef(String collection) {
return stateProvider.getState(collection);
}
@ -1732,7 +1731,7 @@ public class CloudSolrClient extends SolrClient {
public interface ClusterStateProvider extends Closeable {
CollectionRef getState(String collection);
ClusterState.CollectionRef getState(String collection);
Set<String> liveNodes();
@ -1742,8 +1741,6 @@ public class CloudSolrClient extends SolrClient {
Map<String, Object> getClusterProperties();
Map<String,CollectionRef> getCollections();
void connect();
}
}

View File

@ -27,8 +27,6 @@ import java.util.Set;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.Aliases;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.ClusterState.CollectionRef;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.cloud.ZooKeeperException;
import org.apache.zookeeper.KeeperException;
@ -54,7 +52,7 @@ public class ZkClientClusterStateProvider implements CloudSolrClient.ClusterStat
}
@Override
public CollectionRef getState(String collection) {
public ClusterState.CollectionRef getState(String collection) {
return zkStateReader.getClusterState().getCollectionRef(collection);
}
@ -183,11 +181,6 @@ public class ZkClientClusterStateProvider implements CloudSolrClient.ClusterStat
return zkHostString;
}
@Override
public Map<String, CollectionRef> getCollections() {
return zkStateReader.getClusterState().getCollectionStates();
}
@Override
public String toString() {
return zkHost;

View File

@ -46,6 +46,7 @@ public class ImplicitSnitch extends Snitch {
public static final String CORES = "cores";
public static final String DISK = "freedisk";
public static final String ROLE = "role";
public static final String NODEROLE = "noderole";
public static final String SYSPROP = "sysprop.";
public static final List<String> IP_SNITCHES = Collections.unmodifiableList(Arrays.asList("ip_1", "ip_2", "ip_3", "ip_4"));
public static final Set<String> tags = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(NODE, PORT, HOST, CORES, DISK, ROLE, "ip_1", "ip_2", "ip_3", "ip_4")));
@ -61,9 +62,14 @@ public class ImplicitSnitch extends Snitch {
Matcher hostAndPortMatcher = hostAndPortPattern.matcher(solrNode);
if (hostAndPortMatcher.find()) ctx.getTags().put(PORT, hostAndPortMatcher.group(2));
}
if (requestedTags.contains(ROLE)) fillRole(solrNode, ctx);
if (requestedTags.contains(ROLE) || requestedTags.contains(NODEROLE)) fillRole(solrNode, ctx);
addIpTags(solrNode, requestedTags, ctx);
getRemoteInfo(solrNode, requestedTags, ctx);
}
protected void getRemoteInfo(String solrNode, Set<String> requestedTags, SnitchContext ctx) {
ModifiableSolrParams params = new ModifiableSolrParams();
if (requestedTags.contains(CORES)) params.add(CORES, "1");
if (requestedTags.contains(DISK)) params.add(DISK, "1");

View File

@ -44,10 +44,6 @@ public abstract class SnitchContext implements RemoteCallback {
this.session = session;
}
public SnitchInfo getSnitchInfo() {
return snitchInfo;
}
public Map<String, Object> getTags() {
return tags;
}

View File

@ -201,11 +201,11 @@ public class Policy {
}
static class ReplicaInfo implements MapWriter {
public static class ReplicaInfo implements MapWriter {
final String name;
Map<String, Object> variables;
ReplicaInfo(String name, Map<String, Object> vals) {
public ReplicaInfo(String name, Map<String, Object> vals) {
this.name = name;
this.variables = vals;
}
@ -217,7 +217,7 @@ public class Policy {
}
interface ClusterDataProvider {
public interface ClusterDataProvider {
Map<String, Object> getNodeValues(String node, Collection<String> keys);
/**

View File

@ -16,7 +16,7 @@
*/
/**
* Common classes for recipe parsing filtering nodes & sorting
* Common classes for recipe parsing filtering nodes and sorting
*/
package org.apache.solr.recipe;

View File

@ -149,11 +149,6 @@ public class CloudSolrClientCacheTest extends SolrTestCaseJ4 {
@Override
public void connect() { }
@Override
public Map<String, CollectionRef> getCollections() {
return Collections.emptyMap();
}
@Override
public void close() throws IOException {