SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer.

This commit is contained in:
Andrzej Bialecki 2017-03-07 22:00:38 +01:00
parent a6e14ec6d2
commit 4d7bc94771
36 changed files with 2434 additions and 209 deletions

View File

@ -50,6 +50,10 @@ Upgrading from Solr 6.x
factors should be indexed in a separate field and combined with the query
score using a function query.
New Features
----------------------
* SOLR-9857, SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer. (ab)
Bug Fixes
----------------------
* SOLR-9262: Connection and read timeouts are being ignored by UpdateShardHandler after SOLR-4509.

View File

@ -714,14 +714,13 @@ final class OverseerElectionContext extends ElectionContext {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrZkClient zkClient;
private Overseer overseer;
public static final String OVERSEER_ELECT = "/overseer_elect";
public OverseerElectionContext(SolrZkClient zkClient, Overseer overseer, final String zkNodeName) {
super(zkNodeName, OVERSEER_ELECT, OVERSEER_ELECT + "/leader", null, zkClient);
super(zkNodeName, Overseer.OVERSEER_ELECT, Overseer.OVERSEER_ELECT + "/leader", null, zkClient);
this.overseer = overseer;
this.zkClient = zkClient;
try {
new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(OVERSEER_ELECT, zkClient);
new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(Overseer.OVERSEER_ELECT, zkClient);
} catch (KeeperException e) {
throw new SolrException(ErrorCode.SERVER_ERROR, e);
} catch (InterruptedException e) {

View File

@ -65,7 +65,8 @@ public class Overseer implements Closeable {
public static final int STATE_UPDATE_DELAY = 1500; // delay between cloud state updates
public static final int NUM_RESPONSES_TO_STORE = 10000;
public static final String OVERSEER_ELECT = "/overseer_elect";
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
enum LeaderStatus {DONT_KNOW, NO, YES}
@ -281,7 +282,7 @@ public class Overseer implements Closeable {
private void checkIfIamStillLeader() {
if (zkController != null && zkController.getCoreContainer().isShutDown()) return;//shutting down no need to go further
org.apache.zookeeper.data.Stat stat = new org.apache.zookeeper.data.Stat();
String path = OverseerElectionContext.OVERSEER_ELECT + "/leader";
String path = OVERSEER_ELECT + "/leader";
byte[] data;
try {
data = zkClient.getData(path, null, stat, true);
@ -394,7 +395,7 @@ public class Overseer implements Closeable {
boolean success = true;
try {
ZkNodeProps props = ZkNodeProps.load(zkClient.getData(
OverseerElectionContext.OVERSEER_ELECT + "/leader", null, null, true));
OVERSEER_ELECT + "/leader", null, null, true));
if (myId.equals(props.getStr("id"))) {
return LeaderStatus.YES;
}

View File

@ -65,7 +65,7 @@ public class OverseerNodePrioritizer {
String ldr = OverseerTaskProcessor.getLeaderNode(zk);
if(overseerDesignates.contains(ldr)) return;
log.info("prioritizing overseer nodes at {} overseer designates are {}", overseerId, overseerDesignates);
List<String> electionNodes = OverseerTaskProcessor.getSortedElectionNodes(zk, OverseerElectionContext.OVERSEER_ELECT + LeaderElector.ELECTION_NODE);
List<String> electionNodes = OverseerTaskProcessor.getSortedElectionNodes(zk, Overseer.OVERSEER_ELECT + LeaderElector.ELECTION_NODE);
if(electionNodes.size()<2) return;
log.info("sorted nodes {}", electionNodes);

View File

@ -337,7 +337,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
public static List<String> getSortedOverseerNodeNames(SolrZkClient zk) throws KeeperException, InterruptedException {
List<String> children = null;
try {
children = zk.getChildren(OverseerElectionContext.OVERSEER_ELECT + LeaderElector.ELECTION_NODE, null, true);
children = zk.getChildren(Overseer.OVERSEER_ELECT + LeaderElector.ELECTION_NODE, null, true);
} catch (Exception e) {
log.warn("error ", e);
return new ArrayList<>();
@ -370,7 +370,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
public static String getLeaderId(SolrZkClient zkClient) throws KeeperException,InterruptedException{
byte[] data = null;
try {
data = zkClient.getData(OverseerElectionContext.OVERSEER_ELECT + "/leader", null, new Stat(), true);
data = zkClient.getData(Overseer.OVERSEER_ELECT + "/leader", null, new Stat(), true);
} catch (KeeperException.NoNodeException e) {
return null;
}
@ -384,7 +384,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
boolean success = true;
try {
ZkNodeProps props = ZkNodeProps.load(zkStateReader.getZkClient().getData(
OverseerElectionContext.OVERSEER_ELECT + "/leader", null, null, true));
Overseer.OVERSEER_ELECT + "/leader", null, null, true));
if (myId.equals(props.getStr("id"))) {
return LeaderStatus.YES;
}

View File

@ -1715,7 +1715,7 @@ public class ZkController {
//however delete it . This is possible when the last attempt at deleting the election node failed.
if (electionNode.startsWith(getNodeName())) {
try {
zkClient.delete(OverseerElectionContext.OVERSEER_ELECT + LeaderElector.ELECTION_NODE + "/" + electionNode, -1, true);
zkClient.delete(Overseer.OVERSEER_ELECT + LeaderElector.ELECTION_NODE + "/" + electionNode, -1, true);
} catch (NoNodeException e) {
//no problem
} catch (InterruptedException e) {

View File

@ -69,6 +69,7 @@ import org.apache.solr.handler.admin.CollectionsHandler;
import org.apache.solr.handler.admin.ConfigSetsHandler;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.admin.InfoHandler;
import org.apache.solr.handler.admin.MetricsCollectorHandler;
import org.apache.solr.handler.admin.MetricsHandler;
import org.apache.solr.handler.admin.SecurityConfHandler;
import org.apache.solr.handler.admin.SecurityConfHandlerLocal;
@ -177,6 +178,8 @@ public class CoreContainer {
protected MetricsHandler metricsHandler;
protected MetricsCollectorHandler metricsCollectorHandler;
private enum CoreInitFailedAction { fromleader, none }
/**
@ -511,15 +514,18 @@ public class CoreContainer {
coreAdminHandler = createHandler(CORES_HANDLER_PATH, cfg.getCoreAdminHandlerClass(), CoreAdminHandler.class);
configSetsHandler = createHandler(CONFIGSETS_HANDLER_PATH, cfg.getConfigSetsHandlerClass(), ConfigSetsHandler.class);
metricsHandler = createHandler(METRICS_PATH, MetricsHandler.class.getName(), MetricsHandler.class);
metricsCollectorHandler = createHandler(MetricsCollectorHandler.HANDLER_PATH, MetricsCollectorHandler.class.getName(), MetricsCollectorHandler.class);
// may want to add some configuration here in the future
metricsCollectorHandler.init(null);
containerHandlers.put(AUTHZ_PATH, securityConfHandler);
securityConfHandler.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), AUTHZ_PATH);
containerHandlers.put(AUTHC_PATH, securityConfHandler);
if(pkiAuthenticationPlugin != null)
containerHandlers.put(PKIAuthenticationPlugin.PATH, pkiAuthenticationPlugin.getRequestHandler());
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, SolrInfoMBean.Group.node);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, SolrInfoMBean.Group.jvm);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, SolrInfoMBean.Group.jetty);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.node);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jvm);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jetty);
coreConfigService = ConfigSetService.createConfigSetService(cfg, loader, zkSys.zkController);
@ -537,6 +543,10 @@ public class CoreContainer {
metricManager.register(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node),
unloadedCores, true, "unloaded",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
if (isZooKeeperAware()) {
metricManager.loadClusterReporters(cfg.getMetricReporterPlugins(), this);
}
// setup executor to load cores in parallel
ExecutorService coreLoadExecutor = MetricUtils.instrumentedExecutorService(
ExecutorUtil.newMDCAwareFixedThreadPool(
@ -660,10 +670,16 @@ public class CoreContainer {
isShutDown = true;
ExecutorUtil.shutdownAndAwaitTermination(coreContainerWorkExecutor);
if (metricManager != null) {
metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
}
if (isZooKeeperAware()) {
cancelCoreRecoveries();
zkSys.zkController.publishNodeAsDown(zkSys.zkController.getNodeName());
zkSys.zkController.publishNodeAsDown(zkSys.zkController.getNodeName());
if (metricManager != null) {
metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.cluster));
}
}
try {
@ -722,10 +738,6 @@ public class CoreContainer {
}
}
if (metricManager != null) {
metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
}
// It should be safe to close the authorization plugin at this point.
try {
if(authorizationPlugin != null) {
@ -1232,7 +1244,7 @@ public class CoreContainer {
try (SolrCore core = getCore(name)) {
if (core != null) {
String oldRegistryName = core.getCoreMetricManager().getRegistryName();
String newRegistryName = SolrCoreMetricManager.createRegistryName(core.getCoreDescriptor().getCollectionName(), toName);
String newRegistryName = SolrCoreMetricManager.createRegistryName(core, toName);
metricManager.swapRegistries(oldRegistryName, newRegistryName);
registerCore(toName, core, true, false);
SolrCore old = solrCores.remove(name);

View File

@ -20,6 +20,7 @@ import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
import javax.management.InstanceNotFoundException;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
@ -53,7 +54,6 @@ import org.apache.lucene.store.AlreadyClosedException;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrConfig.JmxConfiguration;
import org.apache.solr.metrics.SolrCoreMetricManager;
import org.apache.solr.metrics.reporters.JmxObjectNameFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -93,9 +93,10 @@ public class JmxMonitoredMap<K, V> extends
private final String registryName;
public JmxMonitoredMap(String coreName, String coreHashCode,
public JmxMonitoredMap(String coreName, String coreHashCode, String registryName,
final JmxConfiguration jmxConfig) {
this.coreHashCode = coreHashCode;
this.registryName = registryName;
jmxRootName = (null != jmxConfig.rootName ?
jmxConfig.rootName
: ("solr" + (null != coreName ? "/" + coreName : "")));
@ -117,7 +118,6 @@ public class JmxMonitoredMap<K, V> extends
if (servers == null || servers.isEmpty()) {
server = null;
registryName = null;
nameFactory = null;
log.debug("No JMX servers found, not exposing Solr information with JMX.");
return;
@ -141,7 +141,6 @@ public class JmxMonitoredMap<K, V> extends
}
server = newServer;
}
registryName = SolrCoreMetricManager.createRegistryName(null, coreName);
nameFactory = new JmxObjectNameFactory(REPORTER_NAME + coreHashCode, registryName);
}
@ -166,6 +165,8 @@ public class JmxMonitoredMap<K, V> extends
for (ObjectName name : objectNames) {
try {
server.unregisterMBean(name);
} catch (InstanceNotFoundException ie) {
// ignore - someone else already deleted this one
} catch (Exception e) {
log.warn("Exception un-registering mbean {}", name, e);
}

View File

@ -860,6 +860,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
this.configSetProperties = configSetProperties;
// Initialize the metrics manager
this.coreMetricManager = initCoreMetricManager(config);
this.coreMetricManager.loadReporters();
if (updateHandler == null) {
directoryFactory = initDirectoryFactory();
@ -1101,13 +1102,12 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
*/
private SolrCoreMetricManager initCoreMetricManager(SolrConfig config) {
SolrCoreMetricManager coreMetricManager = new SolrCoreMetricManager(this);
coreMetricManager.loadReporters();
return coreMetricManager;
}
private Map<String,SolrInfoMBean> initInfoRegistry(String name, SolrConfig config) {
if (config.jmxConfig.enabled) {
return new JmxMonitoredMap<String, SolrInfoMBean>(name, String.valueOf(this.hashCode()), config.jmxConfig);
return new JmxMonitoredMap<String, SolrInfoMBean>(name, coreMetricManager.getRegistryName(), String.valueOf(this.hashCode()), config.jmxConfig);
} else {
log.debug("JMX monitoring not detected for core: " + name);
return new ConcurrentHashMap<>();

View File

@ -36,9 +36,9 @@ public interface SolrInfoMBean {
SEARCHER, REPLICATION, TLOG, INDEX, DIRECTORY, HTTP, OTHER }
/**
* Top-level group of beans for a subsystem.
* Top-level group of beans or metrics for a subsystem.
*/
enum Group { jvm, jetty, node, core }
enum Group { jvm, jetty, node, core, collection, shard, cluster, overseer }
/**
* Simple common usage name, e.g. BasicQueryHandler,

View File

@ -451,7 +451,8 @@ public class SolrXmlConfig {
return new PluginInfo[0];
PluginInfo[] configs = new PluginInfo[nodes.getLength()];
for (int i = 0; i < nodes.getLength(); i++) {
configs[i] = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, true);
// we don't require class in order to support predefined replica and node reporter classes
configs[i] = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
}
return configs;
}

View File

@ -0,0 +1,228 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.admin;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.HashMap;
import java.util.Map;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.handler.loader.ContentStreamLoader;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.handler.loader.CSVLoader;
import org.apache.solr.handler.loader.JavabinLoader;
import org.apache.solr.handler.loader.JsonLoader;
import org.apache.solr.handler.loader.XMLLoader;
import org.apache.solr.metrics.AggregateMetric;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.reporters.solr.SolrReporter;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.update.MergeIndexesCommand;
import org.apache.solr.update.RollbackUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.util.stats.MetricUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Handler to collect and aggregate metric reports. Each report indicates the target registry where
* metrics values should be collected and aggregated. Metrics with the same names are
* aggregated using {@link AggregateMetric} instances, which track the source of updates and
* their count, as well as providing simple statistics over collected values.
*
* Each report consists of {@link SolrInputDocument}-s that are expected to contain
* the following fields:
* <ul>
* <li>{@link SolrReporter#GROUP_ID} - (required) specifies target registry name where metrics will be grouped.</li>
* <li>{@link SolrReporter#REPORTER_ID} - (required) id of the reporter that sent this update. This can be eg.
* node name or replica name or other id that uniquely identifies the source of metrics values.</li>
* <li>{@link MetricUtils#METRIC_NAME} - (required) metric name (in the source registry)</li>
* <li>{@link SolrReporter#LABEL_ID} - (optional) label to prepend to metric names in the target registry.</li>
* <li>{@link SolrReporter#REGISTRY_ID} - (optional) name of the source registry.</li>
* </ul>
* Remaining fields are assumed to be single-valued, and to contain metric attributes and their values. Example:
* <pre>
* &lt;doc&gt;
* &lt;field name="_group_"&gt;solr.core.collection1.shard1.leader&lt;/field&gt;
* &lt;field name="_reporter_"&gt;core_node3&lt;/field&gt;
* &lt;field name="metric"&gt;INDEX.merge.errors&lt;/field&gt;
* &lt;field name="value"&gt;0&lt;/field&gt;
* &lt;/doc&gt;
* </pre>
*/
public class MetricsCollectorHandler extends RequestHandlerBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final String HANDLER_PATH = "/admin/metrics/collector";
private final CoreContainer coreContainer;
private final SolrMetricManager metricManager;
private final Map<String, ContentStreamLoader> loaders = new HashMap<>();
private SolrParams params;
public MetricsCollectorHandler(final CoreContainer coreContainer) {
this.coreContainer = coreContainer;
this.metricManager = coreContainer.getMetricManager();
}
@Override
public void init(NamedList initArgs) {
super.init(initArgs);
if (initArgs != null) {
params = SolrParams.toSolrParams(initArgs);
} else {
params = new ModifiableSolrParams();
}
loaders.put("application/xml", new XMLLoader().init(params) );
loaders.put("application/json", new JsonLoader().init(params) );
loaders.put("application/csv", new CSVLoader().init(params) );
loaders.put("application/javabin", new JavabinLoader().init(params) );
loaders.put("text/csv", loaders.get("application/csv") );
loaders.put("text/xml", loaders.get("application/xml") );
loaders.put("text/json", loaders.get("application/json"));
}
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
if (coreContainer == null || coreContainer.isShutDown()) {
// silently drop request
return;
}
//log.info("#### " + req.toString());
if (req.getContentStreams() == null) { // no content
return;
}
for (ContentStream cs : req.getContentStreams()) {
if (cs.getContentType() == null) {
log.warn("Missing content type - ignoring");
continue;
}
ContentStreamLoader loader = loaders.get(cs.getContentType());
if (loader == null) {
throw new SolrException(SolrException.ErrorCode.UNSUPPORTED_MEDIA_TYPE, "Unsupported content type for stream: " + cs.getSourceInfo() + ", contentType=" + cs.getContentType());
}
loader.load(req, rsp, cs, new MetricUpdateProcessor(metricManager));
}
}
@Override
public String getDescription() {
return "Handler for collecting and aggregating metric reports.";
}
private static class MetricUpdateProcessor extends UpdateRequestProcessor {
private final SolrMetricManager metricManager;
public MetricUpdateProcessor(SolrMetricManager metricManager) {
super(null);
this.metricManager = metricManager;
}
@Override
public void processAdd(AddUpdateCommand cmd) throws IOException {
SolrInputDocument doc = cmd.solrDoc;
if (doc == null) {
return;
}
String metricName = (String)doc.getFieldValue(MetricUtils.METRIC_NAME);
if (metricName == null) {
log.warn("Missing " + MetricUtils.METRIC_NAME + " field in document, skipping: " + doc);
return;
}
doc.remove(MetricUtils.METRIC_NAME);
// XXX we could modify keys by using this original registry name
doc.remove(SolrReporter.REGISTRY_ID);
String groupId = (String)doc.getFieldValue(SolrReporter.GROUP_ID);
if (groupId == null) {
log.warn("Missing " + SolrReporter.GROUP_ID + " field in document, skipping: " + doc);
return;
}
doc.remove(SolrReporter.GROUP_ID);
String reporterId = (String)doc.getFieldValue(SolrReporter.REPORTER_ID);
if (reporterId == null) {
log.warn("Missing " + SolrReporter.REPORTER_ID + " field in document, skipping: " + doc);
return;
}
doc.remove(SolrReporter.REPORTER_ID);
String labelId = (String)doc.getFieldValue(SolrReporter.LABEL_ID);
doc.remove(SolrReporter.LABEL_ID);
doc.forEach(f -> {
String key = MetricRegistry.name(labelId, metricName, f.getName());
MetricRegistry registry = metricManager.registry(groupId);
AggregateMetric metric = getOrRegister(registry, key, new AggregateMetric());
Object o = f.getFirstValue();
if (o != null) {
metric.set(reporterId, o);
} else {
// remove missing values
metric.clear(reporterId);
}
});
}
private AggregateMetric getOrRegister(MetricRegistry registry, String name, AggregateMetric add) {
AggregateMetric existing = (AggregateMetric)registry.getMetrics().get(name);
if (existing != null) {
return existing;
}
try {
registry.register(name, add);
return add;
} catch (IllegalArgumentException e) {
// someone added before us
existing = (AggregateMetric)registry.getMetrics().get(name);
if (existing == null) { // now, that is weird...
throw new IllegalArgumentException("Inconsistent metric status, " + name);
}
return existing;
}
}
@Override
public void processDelete(DeleteUpdateCommand cmd) throws IOException {
throw new UnsupportedOperationException("processDelete");
}
@Override
public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException {
throw new UnsupportedOperationException("processMergeIndexes");
}
@Override
public void processCommit(CommitUpdateCommand cmd) throws IOException {
throw new UnsupportedOperationException("processCommit");
}
@Override
public void processRollback(RollbackUpdateCommand cmd) throws IOException {
throw new UnsupportedOperationException("processRollback");
}
}
}

View File

@ -79,7 +79,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
NamedList response = new NamedList();
for (String registryName : requestedRegistries) {
MetricRegistry registry = metricManager.registry(registryName);
response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter));
response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter, false, false, null));
}
rsp.getValues().add("metrics", response);
}

View File

@ -0,0 +1,200 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.metrics;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import com.codahale.metrics.Metric;
/**
* This class is used for keeping several partial named values and providing useful statistics over them.
*/
public class AggregateMetric implements Metric {
/**
* Simple class to represent current value and how many times it was set.
*/
public static class Update {
public Object value;
public final AtomicInteger updateCount = new AtomicInteger();
public Update(Object value) {
update(value);
}
public void update(Object value) {
this.value = value;
updateCount.incrementAndGet();
}
@Override
public String toString() {
return "Update{" +
"value=" + value +
", updateCount=" + updateCount +
'}';
}
}
private final Map<String, Update> values = new ConcurrentHashMap<>();
public void set(String name, Object value) {
final Update existing = values.get(name);
if (existing == null) {
final Update created = new Update(value);
final Update raced = values.putIfAbsent(name, created);
if (raced != null) {
raced.update(value);
}
} else {
existing.update(value);
}
}
public void clear(String name) {
values.remove(name);
}
public void clear() {
values.clear();
}
public int size() {
return values.size();
}
public boolean isEmpty() {
return values.isEmpty();
}
public Map<String, Update> getValues() {
return Collections.unmodifiableMap(values);
}
// --------- stats ---------
public double getMax() {
if (values.isEmpty()) {
return 0;
}
Double res = null;
for (Update u : values.values()) {
if (!(u.value instanceof Number)) {
continue;
}
Number n = (Number)u.value;
if (res == null) {
res = n.doubleValue();
continue;
}
if (n.doubleValue() > res) {
res = n.doubleValue();
}
}
return res;
}
public double getMin() {
if (values.isEmpty()) {
return 0;
}
Double res = null;
for (Update u : values.values()) {
if (!(u.value instanceof Number)) {
continue;
}
Number n = (Number)u.value;
if (res == null) {
res = n.doubleValue();
continue;
}
if (n.doubleValue() < res) {
res = n.doubleValue();
}
}
return res;
}
public double getMean() {
if (values.isEmpty()) {
return 0;
}
double total = 0;
for (Update u : values.values()) {
if (!(u.value instanceof Number)) {
continue;
}
Number n = (Number)u.value;
total += n.doubleValue();
}
return total / values.size();
}
public double getStdDev() {
int size = values.size();
if (size < 2) {
return 0;
}
final double mean = getMean();
double sum = 0;
int count = 0;
for (Update u : values.values()) {
if (!(u.value instanceof Number)) {
continue;
}
count++;
Number n = (Number)u.value;
final double diff = n.doubleValue() - mean;
sum += diff * diff;
}
if (count < 2) {
return 0;
}
final double variance = sum / (count - 1);
return Math.sqrt(variance);
}
public double getSum() {
if (values.isEmpty()) {
return 0;
}
double res = 0;
for (Update u : values.values()) {
if (!(u.value instanceof Number)) {
continue;
}
Number n = (Number)u.value;
res += n.doubleValue();
}
return res;
}
@Override
public String toString() {
return "AggregateMetric{" +
"size=" + size() +
", max=" + getMax() +
", min=" + getMin() +
", mean=" + getMean() +
", stddev=" + getStdDev() +
", sum=" + getSum() +
", values=" + values +
'}';
}
}

View File

@ -20,6 +20,7 @@ import java.io.Closeable;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import org.apache.solr.cloud.CloudDescriptor;
import org.apache.solr.core.NodeConfig;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
@ -36,8 +37,14 @@ public class SolrCoreMetricManager implements Closeable {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrCore core;
private final String tag;
private final SolrMetricManager metricManager;
private String registryName;
private String collectionName;
private String shardName;
private String replicaName;
private String leaderRegistryName;
private boolean cloudMode;
/**
* Constructs a metric manager.
@ -46,8 +53,26 @@ public class SolrCoreMetricManager implements Closeable {
*/
public SolrCoreMetricManager(SolrCore core) {
this.core = core;
this.tag = String.valueOf(core.hashCode());
this.metricManager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
registryName = createRegistryName(core.getCoreDescriptor().getCollectionName(), core.getName());
initCloudMode();
registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName);
}
private void initCloudMode() {
CloudDescriptor cd = core.getCoreDescriptor().getCloudDescriptor();
if (cd != null) {
cloudMode = true;
collectionName = core.getCoreDescriptor().getCollectionName();
shardName = cd.getShardId();
//replicaName = cd.getCoreNodeName();
String coreName = core.getName();
replicaName = parseReplicaName(collectionName, coreName);
if (replicaName == null) {
replicaName = cd.getCoreNodeName();
}
}
}
/**
@ -57,7 +82,11 @@ public class SolrCoreMetricManager implements Closeable {
public void loadReporters() {
NodeConfig nodeConfig = core.getCoreDescriptor().getCoreContainer().getConfig();
PluginInfo[] pluginInfos = nodeConfig.getMetricReporterPlugins();
metricManager.loadReporters(pluginInfos, core.getResourceLoader(), SolrInfoMBean.Group.core, registryName);
metricManager.loadReporters(pluginInfos, core.getResourceLoader(), tag,
SolrInfoMBean.Group.core, registryName);
if (cloudMode) {
metricManager.loadShardReporters(pluginInfos, core);
}
}
/**
@ -67,12 +96,18 @@ public class SolrCoreMetricManager implements Closeable {
*/
public void afterCoreSetName() {
String oldRegistryName = registryName;
registryName = createRegistryName(core.getCoreDescriptor().getCollectionName(), core.getName());
String oldLeaderRegistryName = leaderRegistryName;
initCloudMode();
registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName);
if (oldRegistryName.equals(registryName)) {
return;
}
// close old reporters
metricManager.closeReporters(oldRegistryName);
metricManager.closeReporters(oldRegistryName, tag);
if (oldLeaderRegistryName != null) {
metricManager.closeReporters(oldLeaderRegistryName, tag);
}
// load reporters again, using the new core name
loadReporters();
}
@ -96,7 +131,7 @@ public class SolrCoreMetricManager implements Closeable {
*/
@Override
public void close() throws IOException {
metricManager.closeReporters(getRegistryName());
metricManager.closeReporters(getRegistryName(), tag);
}
public SolrCore getCore() {
@ -104,7 +139,7 @@ public class SolrCoreMetricManager implements Closeable {
}
/**
* Retrieves the metric registry name of the manager.
* Metric registry name of the manager.
*
* In order to make it easier for reporting tools to aggregate metrics from
* different cores that logically belong to a single collection we convert the
@ -124,22 +159,74 @@ public class SolrCoreMetricManager implements Closeable {
return registryName;
}
public static String createRegistryName(String collectionName, String coreName) {
if (collectionName == null || (collectionName != null && !coreName.startsWith(collectionName + "_"))) {
// single core, or unknown naming scheme
/**
* Metric registry name for leader metrics. This is null if not in cloud mode.
* @return metric registry name for leader metrics
*/
public String getLeaderRegistryName() {
return leaderRegistryName;
}
/**
* Return a tag specific to this instance.
*/
public String getTag() {
return tag;
}
public static String createRegistryName(boolean cloud, String collectionName, String shardName, String replicaName, String coreName) {
if (cloud) { // build registry name from logical names
return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, collectionName, shardName, replicaName);
} else {
return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, coreName);
}
// split "collection1_shard1_1_replica1" into parts
String str = coreName.substring(collectionName.length() + 1);
String shard;
String replica = null;
int pos = str.lastIndexOf("_replica");
if (pos == -1) { // ?? no _replicaN part ??
shard = str;
} else {
shard = str.substring(0, pos);
replica = str.substring(pos + 1);
}
/**
* This method is used by {@link org.apache.solr.core.CoreContainer#rename(String, String)}.
* @param aCore existing core with old name
* @param coreName new name
* @return new registry name
*/
public static String createRegistryName(SolrCore aCore, String coreName) {
CloudDescriptor cd = aCore.getCoreDescriptor().getCloudDescriptor();
String replicaName = null;
if (cd != null) {
replicaName = parseReplicaName(cd.getCollectionName(), coreName);
}
return createRegistryName(
cd != null,
cd != null ? cd.getCollectionName() : null,
cd != null ? cd.getShardId() : null,
replicaName,
coreName
);
}
public static String parseReplicaName(String collectionName, String coreName) {
if (collectionName == null || !coreName.startsWith(collectionName)) {
return null;
} else {
// split "collection1_shard1_1_replica1" into parts
if (coreName.length() > collectionName.length()) {
String str = coreName.substring(collectionName.length() + 1);
int pos = str.lastIndexOf("_replica");
if (pos == -1) { // ?? no _replicaN part ??
return str;
} else {
return str.substring(pos + 1);
}
} else {
return null;
}
}
}
public static String createLeaderRegistryName(boolean cloud, String collectionName, String shardName) {
if (cloud) {
return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.collection, collectionName, shardName, "leader");
} else {
return null;
}
return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, collectionName, shard, replica);
}
}

View File

@ -18,9 +18,13 @@ package org.apache.solr.metrics;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
@ -29,6 +33,9 @@ import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import java.util.stream.Collectors;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
@ -39,9 +46,14 @@ import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.MetricSet;
import com.codahale.metrics.SharedMetricRegistries;
import com.codahale.metrics.Timer;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.metrics.reporters.solr.SolrClusterReporter;
import org.apache.solr.metrics.reporters.solr.SolrShardReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -87,27 +99,39 @@ public class SolrMetricManager {
private final Lock reportersLock = new ReentrantLock();
private final Lock swapLock = new ReentrantLock();
public static final int DEFAULT_CLOUD_REPORTER_PERIOD = 60;
public SolrMetricManager() { }
/**
* An implementation of {@link MetricFilter} that selects metrics
* with names that start with a prefix.
* with names that start with one of prefixes.
*/
public static class PrefixFilter implements MetricFilter {
private final String[] prefixes;
private final Set<String> prefixes = new HashSet<>();
private final Set<String> matched = new HashSet<>();
private boolean allMatch = false;
/**
* Create a filter that uses the provided prefix.
* Create a filter that uses the provided prefixes.
* @param prefixes prefixes to use, must not be null. If empty then any
* name will match, if not empty then match on any prefix will
* succeed (logical OR).
*/
public PrefixFilter(String... prefixes) {
Objects.requireNonNull(prefixes);
this.prefixes = prefixes;
if (prefixes.length == 0) {
if (prefixes.length > 0) {
this.prefixes.addAll(Arrays.asList(prefixes));
}
if (this.prefixes.isEmpty()) {
allMatch = true;
}
}
public PrefixFilter(Collection<String> prefixes) {
Objects.requireNonNull(prefixes);
this.prefixes.addAll(prefixes);
if (this.prefixes.isEmpty()) {
allMatch = true;
}
}
@ -141,6 +165,85 @@ public class SolrMetricManager {
public void reset() {
matched.clear();
}
@Override
public String toString() {
return "PrefixFilter{" +
"prefixes=" + prefixes +
'}';
}
}
/**
* An implementation of {@link MetricFilter} that selects metrics
* with names that match regular expression patterns.
*/
public static class RegexFilter implements MetricFilter {
private final Set<Pattern> compiledPatterns = new HashSet<>();
private final Set<String> matched = new HashSet<>();
private boolean allMatch = false;
/**
* Create a filter that uses the provided prefix.
* @param patterns regex patterns to use, must not be null. If empty then any
* name will match, if not empty then match on any pattern will
* succeed (logical OR).
*/
public RegexFilter(String... patterns) throws PatternSyntaxException {
this(patterns != null ? Arrays.asList(patterns) : Collections.emptyList());
}
public RegexFilter(Collection<String> patterns) throws PatternSyntaxException {
Objects.requireNonNull(patterns);
if (patterns.isEmpty()) {
allMatch = true;
return;
}
patterns.forEach(p -> {
Pattern pattern = Pattern.compile(p);
compiledPatterns.add(pattern);
});
if (patterns.isEmpty()) {
allMatch = true;
}
}
@Override
public boolean matches(String name, Metric metric) {
if (allMatch) {
matched.add(name);
return true;
}
for (Pattern p : compiledPatterns) {
if (p.matcher(name).matches()) {
matched.add(name);
return true;
}
}
return false;
}
/**
* Return the set of names that matched this filter.
* @return matching names
*/
public Set<String> getMatched() {
return Collections.unmodifiableSet(matched);
}
/**
* Clear the set of names that matched.
*/
public void reset() {
matched.clear();
}
@Override
public String toString() {
return "RegexFilter{" +
"compiledPatterns=" + compiledPatterns +
'}';
}
}
/**
@ -150,7 +253,40 @@ public class SolrMetricManager {
Set<String> set = new HashSet<>();
set.addAll(registries.keySet());
set.addAll(SharedMetricRegistries.names());
return Collections.unmodifiableSet(set);
return set;
}
/**
* Return set of existing registry names that match a regex pattern
* @param patterns regex patterns. NOTE: users need to make sure that patterns that
* don't start with a wildcard use the full registry name starting with
* {@link #REGISTRY_NAME_PREFIX}
* @return set of existing registry names where at least one pattern matched.
*/
public Set<String> registryNames(String... patterns) throws PatternSyntaxException {
if (patterns == null || patterns.length == 0) {
return registryNames();
}
List<Pattern> compiled = new ArrayList<>();
for (String pattern : patterns) {
compiled.add(Pattern.compile(pattern));
}
return registryNames((Pattern[])compiled.toArray(new Pattern[compiled.size()]));
}
public Set<String> registryNames(Pattern... patterns) {
Set<String> allNames = registryNames();
if (patterns == null || patterns.length == 0) {
return allNames;
}
return allNames.stream().filter(s -> {
for (Pattern p : patterns) {
if (p.matcher(s).matches()) {
return true;
}
}
return false;
}).collect(Collectors.toSet());
}
/**
@ -209,7 +345,7 @@ public class SolrMetricManager {
*/
public void removeRegistry(String registry) {
// close any reporters for this registry first
closeReporters(registry);
closeReporters(registry, null);
// make sure we use a name with prefix, with overrides
registry = overridableRegistryName(registry);
if (isSharedRegistry(registry)) {
@ -490,10 +626,12 @@ public class SolrMetricManager {
* the list. If both attributes are present then only "group" attribute will be processed.
* @param pluginInfos plugin configurations
* @param loader resource loader
* @param tag optional tag for the reporters, to distinguish reporters logically created for different parent
* component instances.
* @param group selected group, not null
* @param registryNames optional child registry name elements
*/
public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, SolrInfoMBean.Group group, String... registryNames) {
public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, String tag, SolrInfoMBean.Group group, String... registryNames) {
if (pluginInfos == null || pluginInfos.length == 0) {
return;
}
@ -533,7 +671,7 @@ public class SolrMetricManager {
}
}
try {
loadReporter(registryName, loader, info);
loadReporter(registryName, loader, info, tag);
} catch (Exception e) {
log.warn("Error loading metrics reporter, plugin info: " + info, e);
}
@ -545,9 +683,12 @@ public class SolrMetricManager {
* @param registry reporter is associated with this registry
* @param loader loader to use when creating an instance of the reporter
* @param pluginInfo plugin configuration. Plugin "name" and "class" attributes are required.
* @param tag optional tag for the reporter, to distinguish reporters logically created for different parent
* component instances.
* @return instance of newly created and registered reporter
* @throws Exception if any argument is missing or invalid
*/
public void loadReporter(String registry, SolrResourceLoader loader, PluginInfo pluginInfo) throws Exception {
public SolrMetricReporter loadReporter(String registry, SolrResourceLoader loader, PluginInfo pluginInfo, String tag) throws Exception {
if (registry == null || pluginInfo == null || pluginInfo.name == null || pluginInfo.className == null) {
throw new IllegalArgumentException("loadReporter called with missing arguments: " +
"registry=" + registry + ", loader=" + loader + ", pluginInfo=" + pluginInfo);
@ -558,14 +699,19 @@ public class SolrMetricManager {
pluginInfo.className,
SolrMetricReporter.class,
new String[0],
new Class[] { SolrMetricManager.class, String.class },
new Object[] { this, registry }
new Class[]{SolrMetricManager.class, String.class},
new Object[]{this, registry}
);
try {
reporter.init(pluginInfo);
} catch (IllegalStateException e) {
throw new IllegalArgumentException("reporter init failed: " + pluginInfo, e);
}
registerReporter(registry, pluginInfo.name, tag, reporter);
return reporter;
}
private void registerReporter(String registry, String name, String tag, SolrMetricReporter reporter) throws Exception {
try {
if (!reportersLock.tryLock(10, TimeUnit.SECONDS)) {
throw new Exception("Could not obtain lock to modify reporters registry: " + registry);
@ -579,12 +725,15 @@ public class SolrMetricManager {
perRegistry = new HashMap<>();
reporters.put(registry, perRegistry);
}
SolrMetricReporter oldReporter = perRegistry.get(pluginInfo.name);
if (tag != null && !tag.isEmpty()) {
name = name + "@" + tag;
}
SolrMetricReporter oldReporter = perRegistry.get(name);
if (oldReporter != null) { // close it
log.info("Replacing existing reporter '" + pluginInfo.name + "' in registry '" + registry + "': " + oldReporter.toString());
log.info("Replacing existing reporter '" + name + "' in registry '" + registry + "': " + oldReporter.toString());
oldReporter.close();
}
perRegistry.put(pluginInfo.name, reporter);
perRegistry.put(name, reporter);
} finally {
reportersLock.unlock();
@ -595,9 +744,11 @@ public class SolrMetricManager {
* Close and unregister a named {@link SolrMetricReporter} for a registry.
* @param registry registry name
* @param name reporter name
* @param tag optional tag for the reporter, to distinguish reporters logically created for different parent
* component instances.
* @return true if a named reporter existed and was closed.
*/
public boolean closeReporter(String registry, String name) {
public boolean closeReporter(String registry, String name, String tag) {
// make sure we use a name with prefix, with overrides
registry = overridableRegistryName(registry);
try {
@ -614,6 +765,9 @@ public class SolrMetricManager {
if (perRegistry == null) {
return false;
}
if (tag != null && !tag.isEmpty()) {
name = name + "@" + tag;
}
SolrMetricReporter reporter = perRegistry.remove(name);
if (reporter == null) {
return false;
@ -635,6 +789,17 @@ public class SolrMetricManager {
* @return names of closed reporters
*/
public Set<String> closeReporters(String registry) {
return closeReporters(registry, null);
}
/**
* Close and unregister all {@link SolrMetricReporter}-s for a registry.
* @param registry registry name
* @param tag optional tag for the reporter, to distinguish reporters logically created for different parent
* component instances.
* @return names of closed reporters
*/
public Set<String> closeReporters(String registry, String tag) {
// make sure we use a name with prefix, with overrides
registry = overridableRegistryName(registry);
try {
@ -646,18 +811,28 @@ public class SolrMetricManager {
log.warn("Interrupted while trying to obtain lock to modify reporters registry: " + registry);
return Collections.emptySet();
}
log.info("Closing metric reporters for: " + registry);
log.info("Closing metric reporters for registry=" + registry + ", tag=" + tag);
try {
Map<String, SolrMetricReporter> perRegistry = reporters.remove(registry);
Map<String, SolrMetricReporter> perRegistry = reporters.get(registry);
if (perRegistry != null) {
for (SolrMetricReporter reporter : perRegistry.values()) {
Set<String> names = new HashSet<>(perRegistry.keySet());
Set<String> removed = new HashSet<>();
names.forEach(name -> {
if (tag != null && !tag.isEmpty() && !name.endsWith("@" + tag)) {
return;
}
SolrMetricReporter reporter = perRegistry.remove(name);
try {
reporter.close();
} catch (IOException ioe) {
log.warn("Exception closing reporter " + reporter, ioe);
}
removed.add(name);
});
if (removed.size() == names.size()) {
reporters.remove(registry);
}
return perRegistry.keySet();
return removed;
} else {
return Collections.emptySet();
}
@ -695,4 +870,114 @@ public class SolrMetricManager {
reportersLock.unlock();
}
}
private List<PluginInfo> prepareCloudPlugins(PluginInfo[] pluginInfos, String group, String className,
Map<String, String> defaultAttributes,
Map<String, Object> defaultInitArgs,
PluginInfo defaultPlugin) {
List<PluginInfo> result = new ArrayList<>();
if (pluginInfos == null) {
pluginInfos = new PluginInfo[0];
}
for (PluginInfo info : pluginInfos) {
String groupAttr = info.attributes.get("group");
if (!group.equals(groupAttr)) {
continue;
}
info = preparePlugin(info, className, defaultAttributes, defaultInitArgs);
if (info != null) {
result.add(info);
}
}
if (result.isEmpty() && defaultPlugin != null) {
defaultPlugin = preparePlugin(defaultPlugin, className, defaultAttributes, defaultInitArgs);
if (defaultPlugin != null) {
result.add(defaultPlugin);
}
}
return result;
}
private PluginInfo preparePlugin(PluginInfo info, String className, Map<String, String> defaultAttributes,
Map<String, Object> defaultInitArgs) {
if (info == null) {
return null;
}
String classNameAttr = info.attributes.get("class");
if (className != null) {
if (classNameAttr != null && !className.equals(classNameAttr)) {
log.warn("Conflicting class name attributes, expected " + className + " but was " + classNameAttr + ", skipping " + info);
return null;
}
}
Map<String, String> attrs = new HashMap<>(info.attributes);
defaultAttributes.forEach((k, v) -> {
if (!attrs.containsKey(k)) {
attrs.put(k, v);
}
});
attrs.put("class", className);
Map<String, Object> initArgs = new HashMap<>();
if (info.initArgs != null) {
initArgs.putAll(info.initArgs.asMap(10));
}
defaultInitArgs.forEach((k, v) -> {
if (!initArgs.containsKey(k)) {
initArgs.put(k, v);
}
});
return new PluginInfo(info.type, attrs, new NamedList(initArgs), null);
}
public void loadShardReporters(PluginInfo[] pluginInfos, SolrCore core) {
// don't load for non-cloud cores
if (core.getCoreDescriptor().getCloudDescriptor() == null) {
return;
}
// prepare default plugin if none present in the config
Map<String, String> attrs = new HashMap<>();
attrs.put("name", "shardDefault");
attrs.put("group", SolrInfoMBean.Group.shard.toString());
Map<String, Object> initArgs = new HashMap<>();
initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
String registryName = core.getCoreMetricManager().getRegistryName();
// collect infos and normalize
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.shard.toString(), SolrShardReporter.class.getName(),
attrs, initArgs, null);
for (PluginInfo info : infos) {
try {
SolrMetricReporter reporter = loadReporter(registryName, core.getResourceLoader(), info,
String.valueOf(core.hashCode()));
((SolrShardReporter)reporter).setCore(core);
} catch (Exception e) {
log.warn("Could not load shard reporter, pluginInfo=" + info, e);
}
}
}
public void loadClusterReporters(PluginInfo[] pluginInfos, CoreContainer cc) {
// don't load for non-cloud instances
if (!cc.isZooKeeperAware()) {
return;
}
Map<String, String> attrs = new HashMap<>();
attrs.put("name", "clusterDefault");
attrs.put("group", SolrInfoMBean.Group.cluster.toString());
Map<String, Object> initArgs = new HashMap<>();
initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.cluster.toString(), SolrClusterReporter.class.getName(),
attrs, initArgs, null);
String registryName = getRegistryName(SolrInfoMBean.Group.cluster);
for (PluginInfo info : infos) {
try {
SolrMetricReporter reporter = loadReporter(registryName, cc.getResourceLoader(), info, null);
((SolrClusterReporter)reporter).setCoreContainer(cc);
} catch (Exception e) {
log.warn("Could not load node reporter, pluginInfo=" + info, e);
}
}
}
}

View File

@ -41,9 +41,9 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
* @param additionalProperties additional properties as key, value pairs.
*/
public JmxObjectNameFactory(String reporterName, String domain, String... additionalProperties) {
this.reporterName = reporterName;
this.reporterName = reporterName.replaceAll(":", "_");
this.domain = domain;
this.subdomains = domain.split("\\.");
this.subdomains = domain.replaceAll(":", "_").split("\\.");
if (additionalProperties != null && (additionalProperties.length % 2) != 0) {
throw new IllegalArgumentException("additionalProperties length must be even: " + Arrays.toString(additionalProperties));
}
@ -83,7 +83,7 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
}
sb.append(','); // separate from other properties
} else {
sb.append(currentDomain);
sb.append(currentDomain.replaceAll(":", "_"));
sb.append(':');
}
} else {

View File

@ -0,0 +1,277 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.metrics.reporters.solr;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import org.apache.http.client.HttpClient;
import org.apache.solr.cloud.Overseer;
import org.apache.solr.cloud.ZkController;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.handler.admin.MetricsCollectorHandler;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricReporter;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This reporter sends selected metrics from local registries to {@link Overseer}.
* <p>The following configuration properties are supported:</p>
* <ul>
* <li>handler - (optional str) handler path where reports are sent. Default is
* {@link MetricsCollectorHandler#HANDLER_PATH}.</li>
* <li>period - (optional int) how often reports are sent, in seconds. Default is 60. Setting this
* to 0 disables the reporter.</li>
* <li>report - (optional multiple lst) report configuration(s), see below.</li>
* </ul>
* Each report configuration consist of the following properties:
* <ul>
* <li>registry - (required str) regex pattern matching source registries (see {@link SolrMetricManager#registryNames(String...)}),
* may contain capture groups.</li>
* <li>group - (required str) target registry name where metrics will be grouped. This can be a regex pattern that
* contains back-references to capture groups collected by <code>registry</code> pattern</li>
* <li>label - (optional str) optional prefix to prepend to metric names, may contain back-references to
* capture groups collected by <code>registry</code> pattern</li>
* <li>filter - (optional multiple str) regex expression(s) matching selected metrics to be reported.</li>
* </ul>
* NOTE: this reporter uses predefined "overseer" group, and it's always created even if explicit configuration
* is missing. Default configuration uses report specifications from {@link #DEFAULT_REPORTS}.
* <p>Example configuration:</p>
* <pre>
* &lt;reporter name="test" group="overseer"&gt;
* &lt;str name="handler"&gt;/admin/metrics/collector&lt;/str&gt;
* &lt;int name="period"&gt;11&lt;/int&gt;
* &lt;lst name="report"&gt;
* &lt;str name="group"&gt;overseer&lt;/str&gt;
* &lt;str name="label"&gt;jvm&lt;/str&gt;
* &lt;str name="registry"&gt;solr\.jvm&lt;/str&gt;
* &lt;str name="filter"&gt;memory\.total\..*&lt;/str&gt;
* &lt;str name="filter"&gt;memory\.heap\..*&lt;/str&gt;
* &lt;str name="filter"&gt;os\.SystemLoadAverage&lt;/str&gt;
* &lt;str name="filter"&gt;threads\.count&lt;/str&gt;
* &lt;/lst&gt;
* &lt;lst name="report"&gt;
* &lt;str name="group"&gt;overseer&lt;/str&gt;
* &lt;str name="label"&gt;leader.$1&lt;/str&gt;
* &lt;str name="registry"&gt;solr\.core\.(.*)\.leader&lt;/str&gt;
* &lt;str name="filter"&gt;UPDATE\./update/.*&lt;/str&gt;
* &lt;/lst&gt;
* &lt;/reporter&gt;
* </pre>
*
*/
public class SolrClusterReporter extends SolrMetricReporter {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final String CLUSTER_GROUP = SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.cluster.toString());
public static final List<SolrReporter.Report> DEFAULT_REPORTS = new ArrayList<SolrReporter.Report>() {{
add(new SolrReporter.Report(CLUSTER_GROUP, "jetty",
SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jetty.toString()),
Collections.emptySet())); // all metrics
add(new SolrReporter.Report(CLUSTER_GROUP, "jvm",
SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jvm.toString()),
new HashSet<String>() {{
add("memory\\.total\\..*");
add("memory\\.heap\\..*");
add("os\\.SystemLoadAverage");
add("os\\.FreePhysicalMemorySize");
add("os\\.FreeSwapSpaceSize");
add("os\\.OpenFileDescriptorCount");
add("threads\\.count");
}})); // all metrics
// XXX anything interesting here?
//add(new SolrReporter.Specification(OVERSEER_GROUP, "node", SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.node.toString()),
// Collections.emptySet())); // all metrics
add(new SolrReporter.Report(CLUSTER_GROUP, "leader.$1", "solr\\.collection\\.(.*)\\.leader",
new HashSet<String>(){{
add("UPDATE\\./update/.*");
add("QUERY\\./select.*");
add("INDEX\\..*");
add("TLOG\\..*");
}}));
}};
private String handler = MetricsCollectorHandler.HANDLER_PATH;
private int period = SolrMetricManager.DEFAULT_CLOUD_REPORTER_PERIOD;
private List<SolrReporter.Report> reports = new ArrayList<>();
private SolrReporter reporter;
/**
* Create a reporter for metrics managed in a named registry.
*
* @param metricManager metric manager
* @param registryName this is ignored
*/
public SolrClusterReporter(SolrMetricManager metricManager, String registryName) {
super(metricManager, registryName);
}
public void setHandler(String handler) {
this.handler = handler;
}
public void setPeriod(int period) {
this.period = period;
}
public void setReport(List<Map> reportConfig) {
if (reportConfig == null || reportConfig.isEmpty()) {
return;
}
reportConfig.forEach(map -> {
SolrReporter.Report r = SolrReporter.Report.fromMap(map);
if (r != null) {
reports.add(r);
}
});
}
// for unit tests
int getPeriod() {
return period;
}
List<SolrReporter.Report> getReports() {
return reports;
}
@Override
protected void validate() throws IllegalStateException {
if (period < 1) {
log.info("Turning off node reporter, period=" + period);
}
if (reports.isEmpty()) { // set defaults
reports = DEFAULT_REPORTS;
}
}
@Override
public void close() throws IOException {
if (reporter != null) {
reporter.close();;
}
}
public void setCoreContainer(CoreContainer cc) {
if (reporter != null) {
reporter.close();;
}
// start reporter only in cloud mode
if (!cc.isZooKeeperAware()) {
log.warn("Not ZK-aware, not starting...");
return;
}
if (period < 1) { // don't start it
return;
}
HttpClient httpClient = cc.getUpdateShardHandler().getHttpClient();
ZkController zk = cc.getZkController();
String reporterId = zk.getNodeName();
reporter = SolrReporter.Builder.forReports(metricManager, reports)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.withHandler(handler)
.withReporterId(reporterId)
.cloudClient(false) // we want to send reports specifically to a selected leader instance
.skipAggregateValues(true) // we don't want to transport details of aggregates
.skipHistograms(true) // we don't want to transport histograms
.build(httpClient, new OverseerUrlSupplier(zk));
reporter.start(period, TimeUnit.SECONDS);
}
// TODO: fix this when there is an elegant way to retrieve URL of a node that runs Overseer leader.
// package visibility for unit tests
static class OverseerUrlSupplier implements Supplier<String> {
private static final long DEFAULT_INTERVAL = 30000000; // 30s
private ZkController zk;
private String lastKnownUrl = null;
private long lastCheckTime = 0;
private long interval = DEFAULT_INTERVAL;
OverseerUrlSupplier(ZkController zk) {
this.zk = zk;
}
@Override
public String get() {
if (zk == null) {
return null;
}
// primitive caching for lastKnownUrl
long now = System.nanoTime();
if (lastKnownUrl != null && (now - lastCheckTime) < interval) {
return lastKnownUrl;
}
if (!zk.isConnected()) {
return lastKnownUrl;
}
lastCheckTime = now;
SolrZkClient zkClient = zk.getZkClient();
ZkNodeProps props;
try {
props = ZkNodeProps.load(zkClient.getData(
Overseer.OVERSEER_ELECT + "/leader", null, null, true));
} catch (KeeperException e) {
log.warn("Could not obtain overseer's address, skipping.", e);
return lastKnownUrl;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return lastKnownUrl;
}
if (props == null) {
return lastKnownUrl;
}
String oid = props.getStr("id");
if (oid == null) {
return lastKnownUrl;
}
String[] ids = oid.split("-");
if (ids.length != 3) { // unknown format
log.warn("Unknown format of leader id, skipping: " + oid);
return lastKnownUrl;
}
// convert nodeName back to URL
String url = zk.getZkStateReader().getBaseUrlForNodeName(ids[1]);
// check that it's parseable
try {
new java.net.URL(url);
} catch (MalformedURLException mue) {
log.warn("Malformed Overseer's leader URL: url", mue);
return lastKnownUrl;
}
lastKnownUrl = url;
return url;
}
}
}

View File

@ -0,0 +1,392 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.metrics.reporters.solr;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.ScheduledReporter;
import com.codahale.metrics.Timer;
import org.apache.http.client.HttpClient;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.io.SolrClientCache;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.handler.admin.MetricsCollectorHandler;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.util.stats.MetricUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implementation of {@link ScheduledReporter} that reports metrics from selected registries and sends
* them periodically as update requests to a selected Solr collection and to a configured handler.
*/
public class SolrReporter extends ScheduledReporter {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final String REGISTRY_ID = "_registry_";
public static final String REPORTER_ID = "_reporter_";
public static final String GROUP_ID = "_group_";
public static final String LABEL_ID = "_label_";
/**
* Specification of what registries and what metrics to send.
*/
public static final class Report {
public String groupPattern;
public String labelPattern;
public String registryPattern;
public Set<String> metricFilters = new HashSet<>();
/**
* Create a report specification
* @param groupPattern logical group for these metrics. This is used in {@link MetricsCollectorHandler}
* to select the target registry for metrics to aggregate. Must not be null or empty.
* It may contain back-references to capture groups from {@code registryPattern}
* @param labelPattern name of this group of metrics. This is used in {@link MetricsCollectorHandler}
* to prefix metric names. May be null or empty. It may contain back-references
* to capture groups from {@code registryPattern}.
* @param registryPattern pattern for selecting matching registries, see {@link SolrMetricManager#registryNames(String...)}
* @param metricFilters patterns for selecting matching metrics, see {@link org.apache.solr.metrics.SolrMetricManager.RegexFilter}
*/
public Report(String groupPattern, String labelPattern, String registryPattern, Collection<String> metricFilters) {
this.groupPattern = groupPattern;
this.labelPattern = labelPattern;
this.registryPattern = registryPattern;
if (metricFilters != null) {
this.metricFilters.addAll(metricFilters);
}
}
public static Report fromMap(Map<?, ?> map) {
String groupPattern = (String)map.get("group");
String labelPattern = (String)map.get("label");
String registryPattern = (String)map.get("registry");
Object oFilters = map.get("filter");
Collection<String> metricFilters = Collections.emptyList();
if (oFilters != null) {
if (oFilters instanceof String) {
metricFilters = Collections.singletonList((String)oFilters);
} else if (oFilters instanceof Collection) {
metricFilters = (Collection<String>)oFilters;
} else {
log.warn("Invalid report filters, ignoring: " + oFilters);
}
}
if (groupPattern == null || registryPattern == null) {
log.warn("Invalid report configuration, group and registry required!: " + map);
return null;
}
return new Report(groupPattern, labelPattern, registryPattern, metricFilters);
}
}
public static class Builder {
private final SolrMetricManager metricManager;
private final List<Report> reports;
private String reporterId;
private TimeUnit rateUnit;
private TimeUnit durationUnit;
private String handler;
private boolean skipHistograms;
private boolean skipAggregateValues;
private boolean cloudClient;
private SolrParams params;
/**
* Create a builder for SolrReporter.
* @param metricManager metric manager that is the source of metrics
* @param reports report definitions
* @return builder
*/
public static Builder forReports(SolrMetricManager metricManager, List<Report> reports) {
return new Builder(metricManager, reports);
}
private Builder(SolrMetricManager metricManager, List<Report> reports) {
this.metricManager = metricManager;
this.reports = reports;
this.rateUnit = TimeUnit.SECONDS;
this.durationUnit = TimeUnit.MILLISECONDS;
this.skipHistograms = false;
this.skipAggregateValues = false;
this.cloudClient = false;
this.params = null;
}
/**
* Additional {@link SolrParams} to add to every request.
* @param params additional params
* @return {@code this}
*/
public Builder withSolrParams(SolrParams params) {
this.params = params;
return this;
}
/**
* If true then use {@link org.apache.solr.client.solrj.impl.CloudSolrClient} for communication.
* Default is false.
* @param cloudClient use CloudSolrClient when true, {@link org.apache.solr.client.solrj.impl.HttpSolrClient} otherwise.
* @return {@code this}
*/
public Builder cloudClient(boolean cloudClient) {
this.cloudClient = cloudClient;
return this;
}
/**
* Histograms are difficult / impossible to aggregate, so it may not be
* worth to report them.
* @param skipHistograms when true then skip histograms from reports
* @return {@code this}
*/
public Builder skipHistograms(boolean skipHistograms) {
this.skipHistograms = skipHistograms;
return this;
}
/**
* Individual values from {@link org.apache.solr.metrics.AggregateMetric} may not be worth to report.
* @param skipAggregateValues when tru then skip reporting individual values from the metric
* @return {@code this}
*/
public Builder skipAggregateValues(boolean skipAggregateValues) {
this.skipAggregateValues = skipAggregateValues;
return this;
}
/**
* Handler name to use at the remote end.
*
* @param handler handler name, eg. "/admin/metricsCollector"
* @return {@code this}
*/
public Builder withHandler(String handler) {
this.handler = handler;
return this;
}
/**
* Use this id to identify metrics from this instance.
*
* @param reporterId reporter id
* @return {@code this}
*/
public Builder withReporterId(String reporterId) {
this.reporterId = reporterId;
return this;
}
/**
* Convert rates to the given time unit.
*
* @param rateUnit a unit of time
* @return {@code this}
*/
public Builder convertRatesTo(TimeUnit rateUnit) {
this.rateUnit = rateUnit;
return this;
}
/**
* Convert durations to the given time unit.
*
* @param durationUnit a unit of time
* @return {@code this}
*/
public Builder convertDurationsTo(TimeUnit durationUnit) {
this.durationUnit = durationUnit;
return this;
}
/**
* Build it.
* @param client an instance of {@link HttpClient} to be used for making calls.
* @param urlProvider function that returns the base URL of Solr instance to target. May return
* null to indicate that reporting should be skipped. Note: this
* function will be called every time just before report is sent.
* @return configured instance of reporter
*/
public SolrReporter build(HttpClient client, Supplier<String> urlProvider) {
return new SolrReporter(client, urlProvider, metricManager, reports, handler, reporterId, rateUnit, durationUnit,
params, skipHistograms, skipAggregateValues, cloudClient);
}
}
private String reporterId;
private String handler;
private Supplier<String> urlProvider;
private SolrClientCache clientCache;
private List<CompiledReport> compiledReports;
private SolrMetricManager metricManager;
private boolean skipHistograms;
private boolean skipAggregateValues;
private boolean cloudClient;
private ModifiableSolrParams params;
private Map<String, Object> metadata;
private static final class CompiledReport {
String group;
String label;
Pattern registryPattern;
MetricFilter filter;
CompiledReport(Report report) throws PatternSyntaxException {
this.group = report.groupPattern;
this.label = report.labelPattern;
this.registryPattern = Pattern.compile(report.registryPattern);
this.filter = new SolrMetricManager.RegexFilter(report.metricFilters);
}
@Override
public String toString() {
return "CompiledReport{" +
"group='" + group + '\'' +
", label='" + label + '\'' +
", registryPattern=" + registryPattern +
", filter=" + filter +
'}';
}
}
public SolrReporter(HttpClient httpClient, Supplier<String> urlProvider, SolrMetricManager metricManager,
List<Report> metrics, String handler,
String reporterId, TimeUnit rateUnit, TimeUnit durationUnit,
SolrParams params, boolean skipHistograms, boolean skipAggregateValues, boolean cloudClient) {
super(null, "solr-reporter", MetricFilter.ALL, rateUnit, durationUnit);
this.metricManager = metricManager;
this.urlProvider = urlProvider;
this.reporterId = reporterId;
if (handler == null) {
handler = MetricsCollectorHandler.HANDLER_PATH;
}
this.handler = handler;
this.clientCache = new SolrClientCache(httpClient);
this.compiledReports = new ArrayList<>();
metrics.forEach(report -> {
MetricFilter filter = new SolrMetricManager.RegexFilter(report.metricFilters);
try {
CompiledReport cs = new CompiledReport(report);
compiledReports.add(cs);
} catch (PatternSyntaxException e) {
log.warn("Skipping report with invalid registryPattern: " + report.registryPattern, e);
}
});
this.skipHistograms = skipHistograms;
this.skipAggregateValues = skipAggregateValues;
this.cloudClient = cloudClient;
this.params = new ModifiableSolrParams();
this.params.set(REPORTER_ID, reporterId);
// allow overrides to take precedence
if (params != null) {
this.params.add(params);
}
metadata = new HashMap<>();
metadata.put(REPORTER_ID, reporterId);
}
@Override
public void close() {
clientCache.close();
super.close();
}
@Override
public void report() {
String url = urlProvider.get();
// if null then suppress reporting
if (url == null) {
return;
}
SolrClient solr;
if (cloudClient) {
solr = clientCache.getCloudSolrClient(url);
} else {
solr = clientCache.getHttpSolrClient(url);
}
UpdateRequest req = new UpdateRequest(handler);
req.setParams(params);
compiledReports.forEach(report -> {
Set<String> registryNames = metricManager.registryNames(report.registryPattern);
registryNames.forEach(registryName -> {
String label = report.label;
if (label != null && label.indexOf('$') != -1) {
// label with back-references
Matcher m = report.registryPattern.matcher(registryName);
label = m.replaceFirst(label);
}
final String effectiveLabel = label;
String group = report.group;
if (group.indexOf('$') != -1) {
// group with back-references
Matcher m = report.registryPattern.matcher(registryName);
group = m.replaceFirst(group);
}
final String effectiveGroup = group;
MetricUtils.toSolrInputDocuments(metricManager.registry(registryName), Collections.singletonList(report.filter), MetricFilter.ALL,
skipHistograms, skipAggregateValues, metadata, doc -> {
doc.setField(REGISTRY_ID, registryName);
doc.setField(GROUP_ID, effectiveGroup);
if (effectiveLabel != null) {
doc.setField(LABEL_ID, effectiveLabel);
}
req.add(doc);
});
});
});
// if no docs added then don't send a report
if (req.getDocuments() == null || req.getDocuments().isEmpty()) {
return;
}
try {
//log.info("%%% sending to " + url + ": " + req.getParams());
solr.request(req);
} catch (Exception e) {
log.debug("Error sending metric report", e.toString());
}
}
@Override
public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) {
// no-op - we do all the work in report()
}
}

View File

@ -0,0 +1,188 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.metrics.reporters.solr;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import org.apache.solr.cloud.CloudDescriptor;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.admin.MetricsCollectorHandler;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class reports selected metrics from replicas to shard leader.
* <p>The following configuration properties are supported:</p>
* <ul>
* <li>handler - (optional str) handler path where reports are sent. Default is
* {@link MetricsCollectorHandler#HANDLER_PATH}.</li>
* <li>period - (optional int) how often reports are sent, in seconds. Default is 60. Setting this
* to 0 disables the reporter.</li>
* <li>filter - (optional multiple str) regex expression(s) matching selected metrics to be reported.</li>
* </ul>
* NOTE: this reporter uses predefined "replica" group, and it's always created even if explicit configuration
* is missing. Default configuration uses filters defined in {@link #DEFAULT_FILTERS}.
* <p>Example configuration:</p>
* <pre>
* &lt;reporter name="test" group="replica"&gt;
* &lt;int name="period"&gt;11&lt;/int&gt;
* &lt;str name="filter"&gt;UPDATE\./update/.*requests&lt;/str&gt;
* &lt;str name="filter"&gt;QUERY\./select.*requests&lt;/str&gt;
* &lt;/reporter&gt;
* </pre>
*/
public class SolrShardReporter extends SolrMetricReporter {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final List<String> DEFAULT_FILTERS = new ArrayList(){{
add("TLOG.*");
add("REPLICATION.*");
add("INDEX.flush.*");
add("INDEX.merge.major.*");
add("UPDATE\\./update/.*requests");
add("QUERY\\./select.*requests");
}};
private String handler = MetricsCollectorHandler.HANDLER_PATH;
private int period = SolrMetricManager.DEFAULT_CLOUD_REPORTER_PERIOD;
private List<String> filters = new ArrayList<>();
private SolrReporter reporter;
/**
* Create a reporter for metrics managed in a named registry.
*
* @param metricManager metric manager
* @param registryName registry to use, one of registries managed by
* {@link SolrMetricManager}
*/
public SolrShardReporter(SolrMetricManager metricManager, String registryName) {
super(metricManager, registryName);
}
public void setHandler(String handler) {
this.handler = handler;
}
public void setPeriod(int period) {
this.period = period;
}
public void setFilter(List<String> filterConfig) {
if (filterConfig == null || filterConfig.isEmpty()) {
return;
}
filters = filterConfig;
}
// for unit tests
int getPeriod() {
return period;
}
@Override
protected void validate() throws IllegalStateException {
if (period < 1) {
log.info("Turning off shard reporter, period=" + period);
}
if (filters.isEmpty()) {
filters = DEFAULT_FILTERS;
}
// start in inform(...) only when core is available
}
@Override
public void close() throws IOException {
if (reporter != null) {
reporter.close();
}
}
public void setCore(SolrCore core) {
if (reporter != null) {
reporter.close();
}
if (core.getCoreDescriptor().getCloudDescriptor() == null) {
// not a cloud core
log.warn("Not initializing shard reporter for non-cloud core " + core.getName());
return;
}
if (period < 1) { // don't start it
log.warn("Not starting shard reporter ");
return;
}
// our id is coreNodeName
String id = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName();
// target registry is the leaderRegistryName
String groupId = core.getCoreMetricManager().getLeaderRegistryName();
if (groupId == null) {
log.warn("No leaderRegistryName for core " + core + ", not starting the reporter...");
return;
}
SolrReporter.Report spec = new SolrReporter.Report(groupId, null, registryName, filters);
reporter = SolrReporter.Builder.forReports(metricManager, Collections.singletonList(spec))
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.withHandler(handler)
.withReporterId(id)
.cloudClient(false) // we want to send reports specifically to a selected leader instance
.skipAggregateValues(true) // we don't want to transport details of aggregates
.skipHistograms(true) // we don't want to transport histograms
.build(core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getHttpClient(), new LeaderUrlSupplier(core));
reporter.start(period, TimeUnit.SECONDS);
}
private static class LeaderUrlSupplier implements Supplier<String> {
private SolrCore core;
LeaderUrlSupplier(SolrCore core) {
this.core = core;
}
@Override
public String get() {
CloudDescriptor cd = core.getCoreDescriptor().getCloudDescriptor();
if (cd == null) {
return null;
}
ClusterState state = core.getCoreDescriptor().getCoreContainer().getZkController().getClusterState();
DocCollection collection = state.getCollection(core.getCoreDescriptor().getCollectionName());
Replica replica = collection.getLeader(core.getCoreDescriptor().getCloudDescriptor().getShardId());
if (replica == null) {
log.warn("No leader for " + collection.getName() + "/" + core.getCoreDescriptor().getCloudDescriptor().getShardId());
return null;
}
String baseUrl = replica.getStr("base_url");
if (baseUrl == null) {
log.warn("No base_url for replica " + replica);
}
return baseUrl;
}
}
}

View File

@ -0,0 +1,22 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package contains {@link org.apache.solr.metrics.SolrMetricReporter} implementations
* specific to SolrCloud reporting.
*/
package org.apache.solr.metrics.reporters.solr;

View File

@ -161,11 +161,13 @@ public class PeerSync implements SolrMetricProducer {
core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.REPLICATION.toString(), this);
}
public static final String METRIC_SCOPE = "peerSync";
@Override
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
syncTime = manager.timer(registry, "time", scope);
syncErrors = manager.counter(registry, "errors", scope);
syncSkipped = manager.counter(registry, "skipped", scope);
syncTime = manager.timer(registry, "time", scope, METRIC_SCOPE);
syncErrors = manager.counter(registry, "errors", scope, METRIC_SCOPE);
syncSkipped = manager.counter(registry, "skipped", scope, METRIC_SCOPE);
}
/** optional list of updates we had before possibly receiving new updates */

View File

@ -16,11 +16,15 @@
*/
package org.apache.solr.util.stats;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
@ -32,13 +36,40 @@ import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.metrics.AggregateMetric;
/**
* Metrics specific utility functions.
*/
public class MetricUtils {
public static final String METRIC_NAME = "metric";
public static final String VALUES = "values";
static final String MS = "_ms";
static final String MIN = "min";
static final String MIN_MS = MIN + MS;
static final String MAX = "max";
static final String MAX_MS = MAX + MS;
static final String MEAN = "mean";
static final String MEAN_MS = MEAN + MS;
static final String MEDIAN = "median";
static final String MEDIAN_MS = MEDIAN + MS;
static final String STDDEV = "stddev";
static final String STDDEV_MS = STDDEV + MS;
static final String SUM = "sum";
static final String P75 = "p75";
static final String P75_MS = P75 + MS;
static final String P95 = "p95";
static final String P95_MS = P95 + MS;
static final String P99 = "p99";
static final String P99_MS = P99 + MS;
static final String P999 = "p999";
static final String P999_MS = P999 + MS;
/**
* Adds metrics from a Timer to a NamedList, using well-known back-compat names.
* @param lst The NamedList to add the metrics data to
@ -77,41 +108,138 @@ public class MetricUtils {
* included in the output
* @param mustMatchFilter a {@link MetricFilter}.
* A metric <em>must</em> match this filter to be included in the output.
* @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
* @param metadata optional metadata. If not null and not empty then this map will be added under a
* {@code _metadata_} key.
* @return a {@link NamedList}
*/
public static NamedList toNamedList(MetricRegistry registry, List<MetricFilter> shouldMatchFilters, MetricFilter mustMatchFilter) {
NamedList response = new NamedList();
public static NamedList toNamedList(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
MetricFilter mustMatchFilter, boolean skipHistograms,
boolean skipAggregateValues,
Map<String, Object> metadata) {
NamedList result = new NamedList();
toNamedMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, (k, v) -> {
result.add(k, new NamedList(v));
});
if (metadata != null && !metadata.isEmpty()) {
result.add("_metadata_", new NamedList(metadata));
}
return result;
}
/**
* Returns a representation of the given metric registry as a list of {@link SolrInputDocument}-s.
Only those metrics
* are converted to NamedList which match at least one of the given MetricFilter instances.
*
* @param registry the {@link MetricRegistry} to be converted to NamedList
* @param shouldMatchFilters a list of {@link MetricFilter} instances.
* A metric must match <em>any one</em> of the filters from this list to be
* included in the output
* @param mustMatchFilter a {@link MetricFilter}.
* A metric <em>must</em> match this filter to be included in the output.
* @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
* @param metadata optional metadata. If not null and not empty then this map will be added under a
* {@code _metadata_} key.
* @return a list of {@link SolrInputDocument}-s
*/
public static List<SolrInputDocument> toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
MetricFilter mustMatchFilter, boolean skipHistograms,
boolean skipAggregateValues,
Map<String, Object> metadata) {
List<SolrInputDocument> result = new LinkedList<>();
toSolrInputDocuments(registry, shouldMatchFilters, mustMatchFilter, skipHistograms,
skipAggregateValues, metadata, doc -> {
result.add(doc);
});
return result;
}
public static void toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
MetricFilter mustMatchFilter, boolean skipHistograms,
boolean skipAggregateValues,
Map<String, Object> metadata, Consumer<SolrInputDocument> consumer) {
boolean addMetadata = metadata != null && !metadata.isEmpty();
toNamedMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, (k, v) -> {
SolrInputDocument doc = new SolrInputDocument();
doc.setField(METRIC_NAME, k);
toSolrInputDocument(null, doc, v);
if (addMetadata) {
toSolrInputDocument(null, doc, metadata);
}
consumer.accept(doc);
});
}
public static void toSolrInputDocument(String prefix, SolrInputDocument doc, Map<String, Object> map) {
for (Map.Entry<String, Object> entry : map.entrySet()) {
if (entry.getValue() instanceof Map) { // flatten recursively
toSolrInputDocument(entry.getKey(), doc, (Map<String, Object>)entry.getValue());
} else {
String key = prefix != null ? prefix + "." + entry.getKey() : entry.getKey();
doc.addField(key, entry.getValue());
}
}
}
public static void toNamedMaps(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
MetricFilter mustMatchFilter, boolean skipHistograms, boolean skipAggregateValues,
BiConsumer<String, Map<String, Object>> consumer) {
Map<String, Metric> metrics = registry.getMetrics();
SortedSet<String> names = registry.getNames();
names.stream()
.filter(s -> shouldMatchFilters.stream().anyMatch(metricFilter -> metricFilter.matches(s, metrics.get(s))))
.filter(s -> mustMatchFilter.matches(s, metrics.get(s)))
.forEach(n -> {
Metric metric = metrics.get(n);
if (metric instanceof Counter) {
Counter counter = (Counter) metric;
response.add(n, counterToNamedList(counter));
} else if (metric instanceof Gauge) {
Gauge gauge = (Gauge) metric;
response.add(n, gaugeToNamedList(gauge));
} else if (metric instanceof Meter) {
Meter meter = (Meter) metric;
response.add(n, meterToNamedList(meter));
} else if (metric instanceof Timer) {
Timer timer = (Timer) metric;
response.add(n, timerToNamedList(timer));
} else if (metric instanceof Histogram) {
Histogram histogram = (Histogram) metric;
response.add(n, histogramToNamedList(histogram));
}
});
Metric metric = metrics.get(n);
if (metric instanceof Counter) {
Counter counter = (Counter) metric;
consumer.accept(n, counterToMap(counter));
} else if (metric instanceof Gauge) {
Gauge gauge = (Gauge) metric;
consumer.accept(n, gaugeToMap(gauge));
} else if (metric instanceof Meter) {
Meter meter = (Meter) metric;
consumer.accept(n, meterToMap(meter));
} else if (metric instanceof Timer) {
Timer timer = (Timer) metric;
consumer.accept(n, timerToMap(timer, skipHistograms));
} else if (metric instanceof Histogram) {
if (!skipHistograms) {
Histogram histogram = (Histogram) metric;
consumer.accept(n, histogramToMap(histogram));
}
} else if (metric instanceof AggregateMetric) {
consumer.accept(n, aggregateMetricToMap((AggregateMetric)metric, skipAggregateValues));
}
});
}
static Map<String, Object> aggregateMetricToMap(AggregateMetric metric, boolean skipAggregateValues) {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", metric.size());
response.put(MAX, metric.getMax());
response.put(MIN, metric.getMin());
response.put(MEAN, metric.getMean());
response.put(STDDEV, metric.getStdDev());
response.put(SUM, metric.getSum());
if (!(metric.isEmpty() || skipAggregateValues)) {
Map<String, Object> values = new LinkedHashMap<>();
response.put(VALUES, values);
metric.getValues().forEach((k, v) -> {
Map<String, Object> map = new LinkedHashMap<>();
map.put("value", v.value);
map.put("updateCount", v.updateCount.get());
values.put(k, map);
});
}
return response;
}
static NamedList histogramToNamedList(Histogram histogram) {
NamedList response = new NamedList();
static Map<String, Object> histogramToMap(Histogram histogram) {
Map<String, Object> response = new LinkedHashMap<>();
Snapshot snapshot = histogram.getSnapshot();
response.add("count", histogram.getCount());
response.put("count", histogram.getCount());
// non-time based values
addSnapshot(response, snapshot, false);
return response;
@ -126,71 +254,52 @@ public class MetricUtils {
}
}
static final String MS = "_ms";
static final String MIN = "min";
static final String MIN_MS = MIN + MS;
static final String MAX = "max";
static final String MAX_MS = MAX + MS;
static final String MEAN = "mean";
static final String MEAN_MS = MEAN + MS;
static final String MEDIAN = "median";
static final String MEDIAN_MS = MEDIAN + MS;
static final String STDDEV = "stddev";
static final String STDDEV_MS = STDDEV + MS;
static final String P75 = "p75";
static final String P75_MS = P75 + MS;
static final String P95 = "p95";
static final String P95_MS = P95 + MS;
static final String P99 = "p99";
static final String P99_MS = P99 + MS;
static final String P999 = "p999";
static final String P999_MS = P999 + MS;
// some snapshots represent time in ns, other snapshots represent raw values (eg. chunk size)
static void addSnapshot(NamedList response, Snapshot snapshot, boolean ms) {
response.add((ms ? MIN_MS: MIN), nsToMs(ms, snapshot.getMin()));
response.add((ms ? MAX_MS: MAX), nsToMs(ms, snapshot.getMax()));
response.add((ms ? MEAN_MS : MEAN), nsToMs(ms, snapshot.getMean()));
response.add((ms ? MEDIAN_MS: MEDIAN), nsToMs(ms, snapshot.getMedian()));
response.add((ms ? STDDEV_MS: STDDEV), nsToMs(ms, snapshot.getStdDev()));
response.add((ms ? P75_MS: P75), nsToMs(ms, snapshot.get75thPercentile()));
response.add((ms ? P95_MS: P95), nsToMs(ms, snapshot.get95thPercentile()));
response.add((ms ? P99_MS: P99), nsToMs(ms, snapshot.get99thPercentile()));
response.add((ms ? P999_MS: P999), nsToMs(ms, snapshot.get999thPercentile()));
static void addSnapshot(Map<String, Object> response, Snapshot snapshot, boolean ms) {
response.put((ms ? MIN_MS: MIN), nsToMs(ms, snapshot.getMin()));
response.put((ms ? MAX_MS: MAX), nsToMs(ms, snapshot.getMax()));
response.put((ms ? MEAN_MS : MEAN), nsToMs(ms, snapshot.getMean()));
response.put((ms ? MEDIAN_MS: MEDIAN), nsToMs(ms, snapshot.getMedian()));
response.put((ms ? STDDEV_MS: STDDEV), nsToMs(ms, snapshot.getStdDev()));
response.put((ms ? P75_MS: P75), nsToMs(ms, snapshot.get75thPercentile()));
response.put((ms ? P95_MS: P95), nsToMs(ms, snapshot.get95thPercentile()));
response.put((ms ? P99_MS: P99), nsToMs(ms, snapshot.get99thPercentile()));
response.put((ms ? P999_MS: P999), nsToMs(ms, snapshot.get999thPercentile()));
}
static NamedList timerToNamedList(Timer timer) {
NamedList response = new NamedList();
response.add("count", timer.getCount());
response.add("meanRate", timer.getMeanRate());
response.add("1minRate", timer.getOneMinuteRate());
response.add("5minRate", timer.getFiveMinuteRate());
response.add("15minRate", timer.getFifteenMinuteRate());
// time-based values in nanoseconds
addSnapshot(response, timer.getSnapshot(), true);
static Map<String,Object> timerToMap(Timer timer, boolean skipHistograms) {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", timer.getCount());
response.put("meanRate", timer.getMeanRate());
response.put("1minRate", timer.getOneMinuteRate());
response.put("5minRate", timer.getFiveMinuteRate());
response.put("15minRate", timer.getFifteenMinuteRate());
if (!skipHistograms) {
// time-based values in nanoseconds
addSnapshot(response, timer.getSnapshot(), true);
}
return response;
}
static NamedList meterToNamedList(Meter meter) {
NamedList response = new NamedList();
response.add("count", meter.getCount());
response.add("meanRate", meter.getMeanRate());
response.add("1minRate", meter.getOneMinuteRate());
response.add("5minRate", meter.getFiveMinuteRate());
response.add("15minRate", meter.getFifteenMinuteRate());
static Map<String, Object> meterToMap(Meter meter) {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", meter.getCount());
response.put("meanRate", meter.getMeanRate());
response.put("1minRate", meter.getOneMinuteRate());
response.put("5minRate", meter.getFiveMinuteRate());
response.put("15minRate", meter.getFifteenMinuteRate());
return response;
}
static NamedList gaugeToNamedList(Gauge gauge) {
NamedList response = new NamedList();
response.add("value", gauge.getValue());
static Map<String, Object> gaugeToMap(Gauge gauge) {
Map<String, Object> response = new LinkedHashMap<>();
response.put("value", gauge.getValue());
return response;
}
static NamedList counterToNamedList(Counter counter) {
NamedList response = new NamedList();
response.add("count", counter.getCount());
static Map<String, Object> counterToMap(Counter counter) {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", counter.getCount());
return response;
}

View File

@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<solr>
<shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
<str name="urlScheme">${urlScheme:}</str>
<int name="socketTimeout">${socketTimeout:90000}</int>
<int name="connTimeout">${connTimeout:15000}</int>
</shardHandlerFactory>
<solrcloud>
<str name="host">127.0.0.1</str>
<int name="hostPort">${hostPort:8983}</int>
<str name="hostContext">${hostContext:solr}</str>
<int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
<bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
<int name="leaderVoteWait">${leaderVoteWait:10000}</int>
<int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
<int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
<int name="autoReplicaFailoverWaitAfterExpiration">${autoReplicaFailoverWaitAfterExpiration:10000}</int>
<int name="autoReplicaFailoverWorkLoopDelay">${autoReplicaFailoverWorkLoopDelay:10000}</int>
<int name="autoReplicaFailoverBadNodeExpiration">${autoReplicaFailoverBadNodeExpiration:60000}</int>
</solrcloud>
<metrics>
<reporter name="test" group="shard">
<int name="period">5</int>
<str name="filter">UPDATE\./update/.*requests</str>
<str name="filter">QUERY\./select.*requests</str>
</reporter>
<reporter name="test" group="cluster">
<str name="handler">/admin/metrics/collector</str>
<int name="period">5</int>
<lst name="report">
<str name="group">cluster</str>
<str name="label">jvm</str>
<str name="registry">solr\.jvm</str>
<str name="filter">memory\.total\..*</str>
<str name="filter">memory\.heap\..*</str>
<str name="filter">os\.SystemLoadAverage</str>
<str name="filter">threads\.count</str>
</lst>
<lst name="report">
<str name="group">cluster</str>
<str name="label">leader.$1</str>
<str name="registry">solr\.collection\.(.*)\.leader</str>
<str name="filter">UPDATE\./update/.*</str>
</lst>
</reporter>
</metrics>
</solr>

View File

@ -119,9 +119,9 @@ public class TestCloudRecovery extends SolrCloudTestCase {
.filter(s -> s.startsWith("solr.core.")).collect(Collectors.toList());
for (String registry : registryNames) {
Map<String, Metric> metrics = manager.registry(registry).getMetrics();
Timer timer = (Timer)metrics.get("REPLICATION.time");
Counter counter = (Counter)metrics.get("REPLICATION.errors");
Counter skipped = (Counter)metrics.get("REPLICATION.skipped");
Timer timer = (Timer)metrics.get("REPLICATION.peerSync.time");
Counter counter = (Counter)metrics.get("REPLICATION.peerSync.errors");
Counter skipped = (Counter)metrics.get("REPLICATION.peerSync.skipped");
replicationCount += timer.getCount();
errorsCount += counter.getCount();
skippedCount += skipped.getCount();

View File

@ -85,7 +85,7 @@ public class TestJmxMonitoredMap extends LuceneTestCase {
log.info("Using port: " + port);
String url = "service:jmx:rmi:///jndi/rmi://127.0.0.1:"+port+"/solrjmx";
JmxConfiguration config = new JmxConfiguration(true, null, url, null);
monitoredMap = new JmxMonitoredMap<>("", "", config);
monitoredMap = new JmxMonitoredMap<>("", "", "", config);
JMXServiceURL u = new JMXServiceURL(url);
connector = JMXConnectorFactory.connect(u);
mbeanServer = connector.getMBeanServerConnection();

View File

@ -103,6 +103,7 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
String className = MockMetricReporter.class.getName();
String reporterName = TestUtil.randomUnicodeString(random);
String taggedName = reporterName + "@" + coreMetricManager.getTag();
Map<String, Object> attrs = new HashMap<>();
attrs.put(FieldType.CLASS_NAME, className);
@ -116,15 +117,16 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
PluginInfo pluginInfo = shouldDefinePlugin ? new PluginInfo(TestUtil.randomUnicodeString(random), attrs) : null;
try {
metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(), pluginInfo);
metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
pluginInfo, String.valueOf(coreMetricManager.getCore().hashCode()));
assertNotNull(pluginInfo);
Map<String, SolrMetricReporter> reporters = metricManager.getReporters(coreMetricManager.getRegistryName());
assertTrue("reporters.size should be > 0, but was + " + reporters.size(), reporters.size() > 0);
assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(reporterName));
assertTrue("wrong reporter class: " + reporters.get(reporterName), reporters.get(reporterName) instanceof MockMetricReporter);
assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(taggedName));
assertTrue("wrong reporter class: " + reporters.get(taggedName), reporters.get(taggedName) instanceof MockMetricReporter);
} catch (IllegalArgumentException e) {
assertTrue(pluginInfo == null || attrs.get("configurable") == null);
assertNull(metricManager.getReporters(coreMetricManager.getRegistryName()).get(reporterName));
assertNull(metricManager.getReporters(coreMetricManager.getRegistryName()).get(taggedName));
}
}
@ -152,20 +154,11 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
}
@Test
public void testRegistryName() throws Exception {
String collectionName = "my_collection_";
String cloudCoreName = "my_collection__shard1_0_replica0";
String simpleCoreName = "collection_1_replica0";
String simpleRegistryName = "solr.core." + simpleCoreName;
String cloudRegistryName = "solr.core." + cloudCoreName;
String nestedRegistryName = "solr.core.my_collection_.shard1_0.replica0";
// pass through
assertEquals(cloudRegistryName, coreMetricManager.createRegistryName(null, cloudCoreName));
assertEquals(simpleRegistryName, coreMetricManager.createRegistryName(null, simpleCoreName));
// unknown naming scheme -> pass through
assertEquals(simpleRegistryName, coreMetricManager.createRegistryName(collectionName, simpleCoreName));
// cloud collection
assertEquals(nestedRegistryName, coreMetricManager.createRegistryName(collectionName, cloudCoreName));
public void testNonCloudRegistryName() throws Exception {
String registryName = h.getCore().getCoreMetricManager().getRegistryName();
String leaderRegistryName = h.getCore().getCoreMetricManager().getLeaderRegistryName();
assertNotNull(registryName);
assertEquals("solr.core.collection1", registryName);
assertNull(leaderRegistryName);
}
}

View File

@ -205,32 +205,32 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
createPluginInfo("node_foo", "node", null),
createPluginInfo("core_foo", "core", null)
};
metricManager.loadReporters(plugins, loader, SolrInfoMBean.Group.node);
String tag = "xyz";
metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.node);
Map<String, SolrMetricReporter> reporters = metricManager.getReporters(
SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
assertEquals(4, reporters.size());
assertTrue(reporters.containsKey("universal_foo"));
assertTrue(reporters.containsKey("multigroup_foo"));
assertTrue(reporters.containsKey("node_foo"));
assertTrue(reporters.containsKey("multiregistry_foo"));
assertTrue(reporters.containsKey("universal_foo@" + tag));
assertTrue(reporters.containsKey("multigroup_foo@" + tag));
assertTrue(reporters.containsKey("node_foo@" + tag));
assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
metricManager.loadReporters(plugins, loader, SolrInfoMBean.Group.core, "collection1");
metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.core, "collection1");
reporters = metricManager.getReporters(
SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, "collection1"));
assertEquals(5, reporters.size());
assertTrue(reporters.containsKey("universal_foo"));
assertTrue(reporters.containsKey("multigroup_foo"));
assertTrue(reporters.containsKey("specific_foo"));
assertTrue(reporters.containsKey("core_foo"));
assertTrue(reporters.containsKey("multiregistry_foo"));
assertTrue(reporters.containsKey("universal_foo@" + tag));
assertTrue(reporters.containsKey("multigroup_foo@" + tag));
assertTrue(reporters.containsKey("specific_foo@" + tag));
assertTrue(reporters.containsKey("core_foo@" + tag));
assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
metricManager.loadReporters(plugins, loader, SolrInfoMBean.Group.jvm);
metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.jvm);
reporters = metricManager.getReporters(
SolrMetricManager.getRegistryName(SolrInfoMBean.Group.jvm));
assertEquals(2, reporters.size());
assertTrue(reporters.containsKey("universal_foo"));
assertTrue(reporters.containsKey("multigroup_foo"));
assertTrue(reporters.containsKey("universal_foo@" + tag));
assertTrue(reporters.containsKey("multigroup_foo@" + tag));
metricManager.removeRegistry("solr.jvm");
reporters = metricManager.getReporters(

View File

@ -19,7 +19,6 @@ package org.apache.solr.metrics;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Map;
import java.util.Random;
@ -55,6 +54,11 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
private CoreContainer cc;
private SolrMetricManager metricManager;
private String tag;
private void assertTagged(Map<String, SolrMetricReporter> reporters, String name) {
assertTrue("Reporter '" + name + "' missing in " + reporters, reporters.containsKey(name + "@" + tag));
}
@Before
public void beforeTest() throws Exception {
@ -68,10 +72,13 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
new TestHarness.TestCoresLocator(DEFAULT_TEST_CORENAME, initCoreDataDir.getAbsolutePath(), "solrconfig.xml", "schema.xml"));
h.coreName = DEFAULT_TEST_CORENAME;
metricManager = cc.getMetricManager();
tag = h.getCore().getCoreMetricManager().getTag();
// initially there are more reporters, because two of them are added via a matching collection name
Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.core." + DEFAULT_TEST_CORENAME);
assertEquals(INITIAL_REPORTERS.length, reporters.size());
assertTrue(reporters.keySet().containsAll(Arrays.asList(INITIAL_REPORTERS)));
for (String r : INITIAL_REPORTERS) {
assertTagged(reporters, r);
}
// test rename operation
cc.rename(DEFAULT_TEST_CORENAME, CORE_NAME);
h.coreName = CORE_NAME;
@ -101,7 +108,7 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
deleteCore();
for (String reporterName : RENAMED_REPORTERS) {
SolrMetricReporter reporter = reporters.get(reporterName);
SolrMetricReporter reporter = reporters.get(reporterName + "@" + tag);
MockMetricReporter mockReporter = (MockMetricReporter) reporter;
assertTrue("Reporter " + reporterName + " was not closed: " + mockReporter, mockReporter.didClose);
}
@ -130,7 +137,7 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
// SPECIFIC and MULTIREGISTRY were skipped because they were
// specific to collection1
for (String reporterName : RENAMED_REPORTERS) {
SolrMetricReporter reporter = reporters.get(reporterName);
SolrMetricReporter reporter = reporters.get(reporterName + "@" + tag);
assertNotNull("Reporter " + reporterName + " was not found.", reporter);
assertTrue(reporter instanceof MockMetricReporter);

View File

@ -64,15 +64,17 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
coreMetricManager = core.getCoreMetricManager();
metricManager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
PluginInfo pluginInfo = createReporterPluginInfo();
metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(), pluginInfo);
metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
pluginInfo, coreMetricManager.getTag());
Map<String, SolrMetricReporter> reporters = metricManager.getReporters(coreMetricManager.getRegistryName());
assertTrue("reporters.size should be > 0, but was + " + reporters.size(), reporters.size() > 0);
reporterName = pluginInfo.name;
assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(reporterName));
assertTrue("wrong reporter class: " + reporters.get(reporterName), reporters.get(reporterName) instanceof SolrJmxReporter);
String taggedName = reporterName + "@" + coreMetricManager.getTag();
assertNotNull("reporter " + taggedName + " not present among " + reporters, reporters.get(taggedName));
assertTrue("wrong reporter class: " + reporters.get(taggedName), reporters.get(taggedName) instanceof SolrJmxReporter);
reporter = (SolrJmxReporter) reporters.get(reporterName);
reporter = (SolrJmxReporter) reporters.get(taggedName);
mBeanServer = reporter.getMBeanServer();
assertNotNull("MBean server not found.", mBeanServer);
}
@ -144,7 +146,8 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
h.getCoreContainer().reload(h.getCore().getName());
PluginInfo pluginInfo = createReporterPluginInfo();
metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(), pluginInfo);
metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
pluginInfo, String.valueOf(coreMetricManager.getCore().hashCode()));
coreMetricManager.registerMetricProducer(scope, producer);
objects = mBeanServer.queryMBeans(null, null);

View File

@ -0,0 +1,163 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.metrics.reporters.solr;
import java.nio.file.Paths;
import java.util.Map;
import com.codahale.metrics.Metric;
import org.apache.commons.io.IOUtils;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrCore;
import org.apache.solr.metrics.AggregateMetric;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricReporter;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
*/
public class SolrCloudReportersTest extends SolrCloudTestCase {
int leaderRegistries;
int clusterRegistries;
@BeforeClass
public static void configureDummyCluster() throws Exception {
configureCluster(0).configure();
}
@Before
public void closePreviousCluster() throws Exception {
shutdownCluster();
leaderRegistries = 0;
clusterRegistries = 0;
}
@Test
public void testExplicitConfiguration() throws Exception {
String solrXml = IOUtils.toString(SolrCloudReportersTest.class.getResourceAsStream("/solr/solr-solrreporter.xml"), "UTF-8");
configureCluster(2)
.withSolrXml(solrXml).configure();
cluster.uploadConfigSet(Paths.get(TEST_PATH().toString(), "configsets", "minimal", "conf"), "test");
System.out.println("ZK: " + cluster.getZkServer().getZkAddress());
CollectionAdminRequest.createCollection("test_collection", "test", 2, 2)
.setMaxShardsPerNode(4)
.process(cluster.getSolrClient());
waitForState("Expected test_collection with 2 shards and 2 replicas", "test_collection", clusterShape(2, 2));
Thread.sleep(15000);
cluster.getJettySolrRunners().forEach(jetty -> {
CoreContainer cc = jetty.getCoreContainer();
// verify registry names
for (String name : cc.getCoreNames()) {
SolrCore core = cc.getCore(name);
try {
String registryName = core.getCoreMetricManager().getRegistryName();
String leaderRegistryName = core.getCoreMetricManager().getLeaderRegistryName();
String coreName = core.getName();
String collectionName = core.getCoreDescriptor().getCollectionName();
String coreNodeName = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName();
String replicaName = coreName.split("_")[3];
String shardId = core.getCoreDescriptor().getCloudDescriptor().getShardId();
assertEquals("solr.core." + collectionName + "." + shardId + "." + replicaName, registryName);
assertEquals("solr.collection." + collectionName + "." + shardId + ".leader", leaderRegistryName);
} finally {
if (core != null) {
core.close();
}
}
}
SolrMetricManager metricManager = cc.getMetricManager();
Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.cluster");
assertEquals(reporters.toString(), 1, reporters.size());
SolrMetricReporter reporter = reporters.get("test");
assertNotNull(reporter);
assertTrue(reporter.toString(), reporter instanceof SolrClusterReporter);
SolrClusterReporter sor = (SolrClusterReporter)reporter;
assertEquals(5, sor.getPeriod());
for (String registryName : metricManager.registryNames(".*\\.shard[0-9]\\.replica.*")) {
reporters = metricManager.getReporters(registryName);
assertEquals(reporters.toString(), 1, reporters.size());
reporter = null;
for (String name : reporters.keySet()) {
if (name.startsWith("test")) {
reporter = reporters.get(name);
}
}
assertNotNull(reporter);
assertTrue(reporter.toString(), reporter instanceof SolrShardReporter);
SolrShardReporter srr = (SolrShardReporter)reporter;
assertEquals(5, srr.getPeriod());
}
for (String registryName : metricManager.registryNames(".*\\.leader")) {
leaderRegistries++;
reporters = metricManager.getReporters(registryName);
// no reporters registered for leader registry
assertEquals(reporters.toString(), 0, reporters.size());
// verify specific metrics
Map<String, Metric> metrics = metricManager.registry(registryName).getMetrics();
String key = "QUERY./select.requests.count";
assertTrue(key, metrics.containsKey(key));
assertTrue(key, metrics.get(key) instanceof AggregateMetric);
key = "UPDATE./update/json.requests.count";
assertTrue(key, metrics.containsKey(key));
assertTrue(key, metrics.get(key) instanceof AggregateMetric);
}
if (metricManager.registryNames().contains("solr.cluster")) {
clusterRegistries++;
Map<String,Metric> metrics = metricManager.registry("solr.cluster").getMetrics();
String key = "jvm.memory.heap.init.value";
assertTrue(key, metrics.containsKey(key));
assertTrue(key, metrics.get(key) instanceof AggregateMetric);
key = "leader.test_collection.shard1.UPDATE./update/json.requests.count.max";
assertTrue(key, metrics.containsKey(key));
assertTrue(key, metrics.get(key) instanceof AggregateMetric);
}
});
assertEquals("leaderRegistries", 2, leaderRegistries);
assertEquals("clusterRegistries", 1, clusterRegistries);
}
@Test
public void testDefaultPlugins() throws Exception {
String solrXml = IOUtils.toString(SolrCloudReportersTest.class.getResourceAsStream("/solr/solr.xml"), "UTF-8");
configureCluster(2)
.withSolrXml(solrXml).configure();
cluster.uploadConfigSet(Paths.get(TEST_PATH().toString(), "configsets", "minimal", "conf"), "test");
System.out.println("ZK: " + cluster.getZkServer().getZkAddress());
CollectionAdminRequest.createCollection("test_collection", "test", 2, 2)
.setMaxShardsPerNode(4)
.process(cluster.getSolrClient());
waitForState("Expected test_collection with 2 shards and 2 replicas", "test_collection", clusterShape(2, 2));
cluster.getJettySolrRunners().forEach(jetty -> {
CoreContainer cc = jetty.getCoreContainer();
SolrMetricManager metricManager = cc.getMetricManager();
Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.cluster");
assertEquals(reporters.toString(), 0, reporters.size());
for (String registryName : metricManager.registryNames(".*\\.shard[0-9]\\.replica.*")) {
reporters = metricManager.getReporters(registryName);
assertEquals(reporters.toString(), 0, reporters.size());
}
});
}
}

View File

@ -0,0 +1,117 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.metrics.reporters.solr;
import java.lang.invoke.MethodHandles;
import java.util.Map;
import com.codahale.metrics.Metric;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.cloud.CloudDescriptor;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.metrics.AggregateMetric;
import org.apache.solr.metrics.SolrCoreMetricManager;
import org.apache.solr.metrics.SolrMetricManager;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
*/
public class SolrShardReporterTest extends AbstractFullDistribZkTestBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public SolrShardReporterTest() {
schemaString = "schema15.xml"; // we need a string id
}
@Override
public String getSolrXml() {
return "solr-solrreporter.xml";
}
@Test
public void test() throws Exception {
waitForRecoveriesToFinish("control_collection",
jettys.get(0).getCoreContainer().getZkController().getZkStateReader(), false);
waitForRecoveriesToFinish("collection1",
jettys.get(0).getCoreContainer().getZkController().getZkStateReader(), false);
printLayout();
// wait for at least two reports
Thread.sleep(10000);
ClusterState state = jettys.get(0).getCoreContainer().getZkController().getClusterState();
for (JettySolrRunner jetty : jettys) {
CoreContainer cc = jetty.getCoreContainer();
SolrMetricManager metricManager = cc.getMetricManager();
for (final String coreName : cc.getCoreNames()) {
CoreDescriptor cd = cc.getCoreDescriptor(coreName);
if (cd.getCloudDescriptor() == null) { // not a cloud collection
continue;
}
CloudDescriptor cloudDesc = cd.getCloudDescriptor();
DocCollection docCollection = state.getCollection(cloudDesc.getCollectionName());
String replicaName = SolrCoreMetricManager.parseReplicaName(cloudDesc.getCollectionName(), coreName);
if (replicaName == null) {
replicaName = cloudDesc.getCoreNodeName();
}
String registryName = SolrCoreMetricManager.createRegistryName(true,
cloudDesc.getCollectionName(), cloudDesc.getShardId(), replicaName, null);
String leaderRegistryName = SolrCoreMetricManager.createLeaderRegistryName(true,
cloudDesc.getCollectionName(), cloudDesc.getShardId());
boolean leader = cloudDesc.isLeader();
Slice slice = docCollection.getSlice(cloudDesc.getShardId());
int numReplicas = slice.getReplicas().size();
if (leader) {
assertTrue(metricManager.registryNames() + " doesn't contain " + leaderRegistryName,
metricManager.registryNames().contains(leaderRegistryName));
Map<String, Metric> metrics = metricManager.registry(leaderRegistryName).getMetrics();
metrics.forEach((k, v) -> {
assertTrue("Unexpected type of " + k + ": " + v.getClass().getName() + ", " + v,
v instanceof AggregateMetric);
AggregateMetric am = (AggregateMetric)v;
if (!k.startsWith("REPLICATION.peerSync")) {
assertEquals(coreName + "::" + registryName + "::" + k + ": " + am.toString(), numReplicas, am.size());
}
});
} else {
assertFalse(metricManager.registryNames() + " contains " + leaderRegistryName +
" but it's not a leader!",
metricManager.registryNames().contains(leaderRegistryName));
Map<String, Metric> metrics = metricManager.registry(leaderRegistryName).getMetrics();
metrics.forEach((k, v) -> {
assertTrue("Unexpected type of " + k + ": " + v.getClass().getName() + ", " + v,
v instanceof AggregateMetric);
AggregateMetric am = (AggregateMetric)v;
if (!k.startsWith("REPLICATION.peerSync")) {
assertEquals(coreName + "::" + registryName + "::" + k + ": " + am.toString(), 1, am.size());
}
});
}
assertTrue(metricManager.registryNames() + " doesn't contain " + registryName,
metricManager.registryNames().contains(registryName));
}
}
SolrMetricManager metricManager = controlJetty.getCoreContainer().getMetricManager();
assertTrue(metricManager.registryNames().contains("solr.cluster"));
}
}

View File

@ -17,12 +17,20 @@
package org.apache.solr.util.stats;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.metrics.AggregateMetric;
import org.junit.Test;
public class MetricUtilsTest extends SolrTestCaseJ4 {
@ -36,7 +44,7 @@ public class MetricUtilsTest extends SolrTestCaseJ4 {
timer.update(Math.abs(random().nextInt()) + 1, TimeUnit.NANOSECONDS);
}
// obtain timer metrics
NamedList lst = MetricUtils.timerToNamedList(timer);
NamedList lst = new NamedList(MetricUtils.timerToMap(timer, false));
// check that expected metrics were obtained
assertEquals(14, lst.size());
final Snapshot snapshot = timer.getSnapshot();
@ -52,5 +60,49 @@ public class MetricUtilsTest extends SolrTestCaseJ4 {
assertEquals(MetricUtils.nsToMs(snapshot.get999thPercentile()), lst.get("p999_ms"));
}
@Test
public void testMetrics() throws Exception {
MetricRegistry registry = new MetricRegistry();
Counter counter = registry.counter("counter");
counter.inc();
Timer timer = registry.timer("timer");
Timer.Context ctx = timer.time();
Thread.sleep(150);
ctx.stop();
Meter meter = registry.meter("meter");
meter.mark();
Histogram histogram = registry.histogram("histogram");
histogram.update(10);
AggregateMetric am = new AggregateMetric();
registry.register("aggregate", am);
am.set("foo", 10);
am.set("bar", 1);
am.set("bar", 2);
MetricUtils.toNamedMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL,
false, false, (k, v) -> {
if (k.startsWith("counter")) {
assertEquals(1L, v.get("count"));
} else if (k.startsWith("timer")) {
assertEquals(1L, v.get("count"));
assertTrue(((Number)v.get("min_ms")).intValue() > 100);
} else if (k.startsWith("meter")) {
assertEquals(1L, v.get("count"));
} else if (k.startsWith("histogram")) {
assertEquals(1L, v.get("count"));
} else if (k.startsWith("aggregate")) {
assertEquals(2, v.get("count"));
Map<String, Object> values = (Map<String, Object>)v.get("values");
assertNotNull(values);
assertEquals(2, values.size());
Map<String, Object> update = (Map<String, Object>)values.get("foo");
assertEquals(10, update.get("value"));
assertEquals(1, update.get("updateCount"));
update = (Map<String, Object>)values.get("bar");
assertEquals(2, update.get("value"));
assertEquals(2, update.get("updateCount"));
}
});
}
}

View File

@ -112,8 +112,8 @@ public class BinaryRequestWriter extends RequestWriter {
/*
* A hack to get access to the protected internal buffer and avoid an additional copy
*/
class BAOS extends ByteArrayOutputStream {
byte[] getbuf() {
public static class BAOS extends ByteArrayOutputStream {
public byte[] getbuf() {
return super.buf;
}
}

View File

@ -22,6 +22,7 @@ import java.lang.invoke.MethodHandles;
import java.util.Map;
import java.util.HashMap;
import org.apache.http.client.HttpClient;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
@ -38,15 +39,27 @@ public class SolrClientCache implements Serializable {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final Map<String, SolrClient> solrClients = new HashMap<>();
private final HttpClient httpClient;
public SolrClientCache() {
httpClient = null;
}
public SolrClientCache(HttpClient httpClient) {
this.httpClient = httpClient;
}
public synchronized CloudSolrClient getCloudSolrClient(String zkHost) {
CloudSolrClient client;
if (solrClients.containsKey(zkHost)) {
client = (CloudSolrClient) solrClients.get(zkHost);
} else {
client = new CloudSolrClient.Builder()
.withZkHost(zkHost)
.build();
CloudSolrClient.Builder builder = new CloudSolrClient.Builder()
.withZkHost(zkHost);
if (httpClient != null) {
builder = builder.withHttpClient(httpClient);
}
client = builder.build();
client.connect();
solrClients.put(zkHost, client);
}
@ -59,8 +72,11 @@ public class SolrClientCache implements Serializable {
if (solrClients.containsKey(host)) {
client = (HttpSolrClient) solrClients.get(host);
} else {
client = new HttpSolrClient.Builder(host)
.build();
HttpSolrClient.Builder builder = new HttpSolrClient.Builder(host);
if (httpClient != null) {
builder = builder.withHttpClient(httpClient);
}
client = builder.build();
solrClients.put(host, client);
}
return client;

View File

@ -251,8 +251,8 @@ public class TestCoreAdmin extends AbstractEmbeddedSolrServerTestCase {
// assert initial metrics
SolrMetricManager metricManager = cores.getMetricManager();
String core0RegistryName = SolrCoreMetricManager.createRegistryName(null, "core0");
String core1RegistryName = SolrCoreMetricManager.createRegistryName(null, "core1");
String core0RegistryName = SolrCoreMetricManager.createRegistryName(false, null, null, null, "core0");
String core1RegistryName = SolrCoreMetricManager.createRegistryName(false, null, null,null, "core1");
MetricRegistry core0Registry = metricManager.registry(core0RegistryName);
MetricRegistry core1Registry = metricManager.registry(core1RegistryName);