SOLR-9959: SolrInfoMBean-s category and hierarchy cleanup.

This commit is contained in:
Andrzej Bialecki 2017-04-11 19:22:23 +02:00
parent e386ec973b
commit e30cc70fdd
155 changed files with 3049 additions and 2888 deletions

View File

@ -0,0 +1,10 @@
<component name="libraryTable">
<library name="Solr DIH core library">
<CLASSES>
<root url="file://$PROJECT_DIR$/solr/contrib/dataimporthandler/lib" />
</CLASSES>
<JAVADOC />
<SOURCES />
<jarDirectory url="file://$PROJECT_DIR$/solr/contrib/dataimporthandler/lib" recursive="false" />
</library>
</component>

View File

@ -16,9 +16,10 @@
<orderEntry type="library" scope="TEST" name="HSQLDB" level="project" />
<orderEntry type="library" scope="TEST" name="Derby" level="project" />
<orderEntry type="library" scope="TEST" name="Solr DIH test library" level="project" />
<orderEntry type="library" scope="TEST" name="Solr example library" level="project" />
<orderEntry type="library" name="Solr example library" level="project" />
<orderEntry type="library" name="Solr core library" level="project" />
<orderEntry type="library" name="Solrj library" level="project" />
<orderEntry type="library" name="Solr DIH core library" level="project" />
<orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
<orderEntry type="module" scope="TEST" module-name="solr-test-framework" />
<orderEntry type="module" module-name="solr-core" />

View File

@ -813,7 +813,7 @@ org.apache.solr.EchoParamsTest=136,170,349,124,140,142,284
org.apache.solr.MinimalSchemaTest=304,316,467,304,297,755,309
org.apache.solr.OutputWriterTest=302,276,265,314,244,211,268
org.apache.solr.SampleTest=339,290,266,243,333,414,355
org.apache.solr.SolrInfoMBeanTest=1090,1132,644,629,637,1023,735
org.apache.solr.SolrInfoBeanTest=1090,1132,644,629,637,1023,735
org.apache.solr.TestDistributedGrouping=13095,9478,8420,9633,10692,9265,10893
org.apache.solr.TestDistributedSearch=11199,9886,16211,11367,11325,10717,10392
org.apache.solr.TestDocumentBuilder=10,10,9,13,10,9,10

View File

@ -52,6 +52,19 @@ Upgrading from Solr 6.x
* Deprecated method getNumericType() has been removed from FieldType. Use getNumberType() instead
* MBean names and attributes now follow hierarchical names used in metrics. This is reflected also in
/admin/mbeans and /admin/plugins output, and can be observed in the UI Plugins tab, because now all these
APIs get their data from the metrics API. The old (mostly flat) JMX view has been removed.
* <jmx> element in solrconfig.xml is no longer supported. Equivalent functionality can be configured in
solr.xml using <metrics><reporter ...> element and SolrJmxReporter implementation. Limited back-compatibility
is offered by automatically adding a default instance of SolrJmxReporter if it's missing, AND when a local
MBean server is found (which can be activated either via ENABLE_REMOTE_JMX_OPTS in solr.in.sh or via system
properties, eg. -Dcom.sun.management.jmxremote). This default instance exports all Solr metrics from all
registries as hierarchical MBeans. This behavior can be also disabled by specifying a SolrJmxReporter
configuration with a boolean init arg "enabled" set to "false". For a more fine-grained control users
should explicitly specify at least one SolrJmxReporter configuration.
New Features
----------------------
* SOLR-9857, SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer. (ab)
@ -96,6 +109,10 @@ Other Changes
* SOLR-10347: Removed index level boost support from "documents" section of the admin UI (Amrit Sarkar via
Tomás Fernández Löbbe)
* SOLR-9959: SolrInfoMBean category and hierarchy cleanup. Per-component statistics are now obtained from
the metrics API, legacy JMX support has been replaced with SolrJmxReporter functionality. Several reporter
improvements (support for multiple prefix filters, "enabled" flag, reuse of service clients). (ab)
----------------------
================== 6.6.0 ==================

View File

@ -16,11 +16,11 @@
*/
package org.apache.solr.analytics.plugin;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import com.codahale.metrics.Timer;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.util.stats.MetricUtils;
public class AnalyticsStatisticsCollector {
@ -85,17 +85,20 @@ public class AnalyticsStatisticsCollector {
currentTimer.stop();
}
public NamedList<Object> getStatistics() {
NamedList<Object> lst = new SimpleOrderedMap<>();
lst.add("requests", numRequests.longValue());
lst.add("analyticsRequests", numAnalyticsRequests.longValue());
lst.add("statsRequests", numStatsRequests.longValue());
lst.add("statsCollected", numCollectedStats.longValue());
lst.add("fieldFacets", numFieldFacets.longValue());
lst.add("rangeFacets", numRangeFacets.longValue());
lst.add("queryFacets", numQueryFacets.longValue());
lst.add("queriesInQueryFacets", numQueries.longValue());
MetricUtils.addMetrics(lst, requestTimes);
return lst;
public Map<String, Object> getStatistics() {
Map<String, Object> map = new HashMap<>();
MetricUtils.convertTimer("", requestTimes, false, false, (k, v) -> {
map.putAll((Map<String, Object>)v);
});
map.put("requests", numRequests.longValue());
map.put("analyticsRequests", numAnalyticsRequests.longValue());
map.put("statsRequests", numStatsRequests.longValue());
map.put("statsCollected", numCollectedStats.longValue());
map.put("fieldFacets", numFieldFacets.longValue());
map.put("rangeFacets", numRangeFacets.longValue());
map.put("queryFacets", numQueryFacets.longValue());
map.put("queriesInQueryFacets", numQueries.longValue());
return map;
}
}

View File

@ -22,9 +22,11 @@ import org.apache.solr.analytics.plugin.AnalyticsStatisticsCollector;
import org.apache.solr.analytics.request.AnalyticsStats;
import org.apache.solr.analytics.util.AnalyticsParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
public class AnalyticsComponent extends SearchComponent {
public class AnalyticsComponent extends SearchComponent implements SolrMetricProducer {
public static final String COMPONENT_NAME = "analytics";
private final AnalyticsStatisticsCollector analyticsCollector = new AnalyticsStatisticsCollector();;
@ -80,12 +82,8 @@ public class AnalyticsComponent extends SearchComponent {
}
@Override
public String getVersion() {
return getClass().getPackage().getSpecificationVersion();
}
@Override
public NamedList getStatistics() {
return analyticsCollector.getStatistics();
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
MetricsMap metrics = new MetricsMap((detailed, map) -> map.putAll(analyticsCollector.getStatistics()));
manager.registerGauge(this, registry, metrics, true, getClass().getSimpleName(), getCategory().toString(), scope);
}
}

View File

@ -26,12 +26,13 @@ import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.response.RawResponseWriter;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
@ -74,6 +75,8 @@ public class DataImportHandler extends RequestHandlerBase implements
private String myName = "dataimport";
private MetricsMap metrics;
private static final String PARAM_WRITER_IMPL = "writerImpl";
private static final String DEFAULT_WRITER_NAME = "SolrWriter";
@ -260,41 +263,33 @@ public class DataImportHandler extends RequestHandlerBase implements
};
}
}
@Override
@SuppressWarnings("unchecked")
public NamedList getStatistics() {
if (importer == null)
return super.getStatistics();
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
super.initializeMetrics(manager, registryName, scope);
metrics = new MetricsMap((detailed, map) -> {
if (importer != null) {
DocBuilder.Statistics cumulative = importer.cumulativeStatistics;
DocBuilder.Statistics cumulative = importer.cumulativeStatistics;
SimpleOrderedMap result = new SimpleOrderedMap();
map.put("Status", importer.getStatus().toString());
result.add("Status", importer.getStatus().toString());
if (importer.docBuilder != null) {
DocBuilder.Statistics running = importer.docBuilder.importStatistics;
map.put("Documents Processed", running.docCount);
map.put("Requests made to DataSource", running.queryCount);
map.put("Rows Fetched", running.rowsCount);
map.put("Documents Deleted", running.deletedDocCount);
map.put("Documents Skipped", running.skipDocCount);
}
if (importer.docBuilder != null) {
DocBuilder.Statistics running = importer.docBuilder.importStatistics;
result.add("Documents Processed", running.docCount);
result.add("Requests made to DataSource", running.queryCount);
result.add("Rows Fetched", running.rowsCount);
result.add("Documents Deleted", running.deletedDocCount);
result.add("Documents Skipped", running.skipDocCount);
}
result.add(DataImporter.MSG.TOTAL_DOC_PROCESSED, cumulative.docCount);
result.add(DataImporter.MSG.TOTAL_QUERIES_EXECUTED, cumulative.queryCount);
result.add(DataImporter.MSG.TOTAL_ROWS_EXECUTED, cumulative.rowsCount);
result.add(DataImporter.MSG.TOTAL_DOCS_DELETED, cumulative.deletedDocCount);
result.add(DataImporter.MSG.TOTAL_DOCS_SKIPPED, cumulative.skipDocCount);
NamedList requestStatistics = super.getStatistics();
if (requestStatistics != null) {
for (int i = 0; i < requestStatistics.size(); i++) {
result.add(requestStatistics.getName(i), requestStatistics.getVal(i));
map.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, cumulative.docCount);
map.put(DataImporter.MSG.TOTAL_QUERIES_EXECUTED, cumulative.queryCount);
map.put(DataImporter.MSG.TOTAL_ROWS_EXECUTED, cumulative.rowsCount);
map.put(DataImporter.MSG.TOTAL_DOCS_DELETED, cumulative.deletedDocCount);
map.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED, cumulative.skipDocCount);
}
}
return result;
});
manager.registerGauge(this, registryName, metrics, true, "importer", getCategory().toString(), scope);
}
// //////////////////////SolrInfoMBeans methods //////////////////////

View File

@ -81,6 +81,7 @@ import org.apache.solr.metrics.SolrCoreMetricManager;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.search.SolrFieldCacheBean;
import org.apache.solr.security.AuthenticationPlugin;
import org.apache.solr.security.AuthorizationPlugin;
import org.apache.solr.security.HttpClientBuilderPlugin;
@ -482,18 +483,18 @@ public class CoreContainer {
metricManager = new SolrMetricManager();
coreContainerWorkExecutor = MetricUtils.instrumentedExecutorService(
coreContainerWorkExecutor,
metricManager.registry(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node)),
SolrMetricManager.mkName("coreContainerWorkExecutor", SolrInfoMBean.Category.CONTAINER.toString(), "threadPool"));
coreContainerWorkExecutor, null,
metricManager.registry(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node)),
SolrMetricManager.mkName("coreContainerWorkExecutor", SolrInfoBean.Category.CONTAINER.toString(), "threadPool"));
shardHandlerFactory = ShardHandlerFactory.newInstance(cfg.getShardHandlerFactoryPluginInfo(), loader);
if (shardHandlerFactory instanceof SolrMetricProducer) {
SolrMetricProducer metricProducer = (SolrMetricProducer) shardHandlerFactory;
metricProducer.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), "httpShardHandler");
metricProducer.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), "httpShardHandler");
}
updateShardHandler = new UpdateShardHandler(cfg.getUpdateShardHandlerConfig());
updateShardHandler.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), "updateShardHandler");
updateShardHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), "updateShardHandler");
transientCoreCache = TransientSolrCoreCacheFactory.newInstance(loader, this);
@ -520,14 +521,14 @@ public class CoreContainer {
// may want to add some configuration here in the future
metricsCollectorHandler.init(null);
containerHandlers.put(AUTHZ_PATH, securityConfHandler);
securityConfHandler.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), AUTHZ_PATH);
securityConfHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), AUTHZ_PATH);
containerHandlers.put(AUTHC_PATH, securityConfHandler);
if(pkiAuthenticationPlugin != null)
containerHandlers.put(PKIAuthenticationPlugin.PATH, pkiAuthenticationPlugin.getRequestHandler());
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.node);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jvm);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jetty);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoBean.Group.node);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoBean.Group.jvm);
metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoBean.Group.jetty);
coreConfigService = ConfigSetService.createConfigSetService(cfg, loader, zkSys.zkController);
@ -535,17 +536,25 @@ public class CoreContainer {
// initialize gauges for reporting the number of cores and disk total/free
String registryName = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node);
metricManager.registerGauge(registryName, () -> solrCores.getCores().size(),
true, "loaded", SolrInfoMBean.Category.CONTAINER.toString(), "cores");
metricManager.registerGauge(registryName, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(),
true, "lazy",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
metricManager.registerGauge(registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(),
true, "unloaded",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
metricManager.registerGauge(registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
true, "totalSpace", SolrInfoMBean.Category.CONTAINER.toString(), "fs");
metricManager.registerGauge(registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
true, "usableSpace", SolrInfoMBean.Category.CONTAINER.toString(), "fs");
String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.node);
metricManager.registerGauge(null, registryName, () -> solrCores.getCores().size(),
true, "loaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
metricManager.registerGauge(null, registryName, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(),
true, "lazy", SolrInfoBean.Category.CONTAINER.toString(), "cores");
metricManager.registerGauge(null, registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(),
true, "unloaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
// add version information
metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getSpecificationVersion(),
true, "specification", SolrInfoBean.Category.CONTAINER.toString(), "version");
metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getImplementationVersion(),
true, "implementation", SolrInfoBean.Category.CONTAINER.toString(), "version");
SolrFieldCacheBean fieldCacheBean = new SolrFieldCacheBean();
fieldCacheBean.initializeMetrics(metricManager, registryName, null);
if (isZooKeeperAware()) {
metricManager.loadClusterReporters(cfg.getMetricReporterPlugins(), this);
@ -555,9 +564,9 @@ public class CoreContainer {
ExecutorService coreLoadExecutor = MetricUtils.instrumentedExecutorService(
ExecutorUtil.newMDCAwareFixedThreadPool(
cfg.getCoreLoadThreadCount(isZooKeeperAware()),
new DefaultSolrThreadFactory("coreLoadExecutor")),
metricManager.registry(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node)),
SolrMetricManager.mkName("coreLoadExecutor",SolrInfoMBean.Category.CONTAINER.toString(), "threadPool"));
new DefaultSolrThreadFactory("coreLoadExecutor")), null,
metricManager.registry(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node)),
SolrMetricManager.mkName("coreLoadExecutor", SolrInfoBean.Category.CONTAINER.toString(), "threadPool"));
final List<Future<SolrCore>> futures = new ArrayList<>();
try {
List<CoreDescriptor> cds = coresLocator.discover(this);
@ -685,14 +694,16 @@ public class CoreContainer {
ExecutorUtil.shutdownAndAwaitTermination(coreContainerWorkExecutor);
if (metricManager != null) {
metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node));
metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm));
metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jetty));
}
if (isZooKeeperAware()) {
cancelCoreRecoveries();
zkSys.zkController.publishNodeAsDown(zkSys.zkController.getNodeName());
if (metricManager != null) {
metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.cluster));
metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.cluster));
}
}
@ -1058,7 +1069,7 @@ public class CoreContainer {
private void resetIndexDirectory(CoreDescriptor dcore, ConfigSet coreConfig) {
SolrConfig config = coreConfig.getSolrConfig();
String registryName = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, dcore.getName());
String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, dcore.getName());
DirectoryFactory df = DirectoryFactory.loadDirectoryFactory(config, this, registryName);
String dataDir = SolrCore.findDataDir(df, null, config, dcore);
@ -1376,7 +1387,7 @@ public class CoreContainer {
containerHandlers.put(path, (SolrRequestHandler)handler);
}
if (handler instanceof SolrMetricProducer) {
((SolrMetricProducer)handler).initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), path);
((SolrMetricProducer)handler).initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), path);
}
return handler;
}

View File

@ -24,7 +24,6 @@ import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.nio.file.NoSuchFileException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@ -321,13 +320,6 @@ public abstract class DirectoryFactory implements NamedListInitializedPlugin,
return cd.getInstanceDir().resolve(cd.getDataDir()).toAbsolutePath().toString();
}
/**
* Optionally allow the DirectoryFactory to request registration of some MBeans.
*/
public Collection<SolrInfoMBean> offerMBeans() {
return Collections.emptySet();
}
public void cleanupOldIndexDirectories(final String dataDirPath, final String currentIndexDirPath, boolean afterCoreReload) {
File dataDir = new File(dataDirPath);
if (!dataDir.isDirectory()) {

View File

@ -22,7 +22,6 @@ import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@ -51,6 +50,8 @@ import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.store.blockcache.BlockCache;
import org.apache.solr.store.blockcache.BlockDirectory;
import org.apache.solr.store.blockcache.BlockDirectoryCache;
@ -70,7 +71,7 @@ import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
public class HdfsDirectoryFactory extends CachingDirectoryFactory implements SolrCoreAware {
public class HdfsDirectoryFactory extends CachingDirectoryFactory implements SolrCoreAware, SolrMetricProducer {
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final String BLOCKCACHE_SLAB_COUNT = "solr.hdfs.blockcache.slab.count";
@ -493,8 +494,9 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
}
@Override
public Collection<SolrInfoMBean> offerMBeans() {
return Arrays.<SolrInfoMBean>asList(MetricsHolder.metrics, LocalityHolder.reporter);
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
MetricsHolder.metrics.initializeMetrics(manager, registry, scope);
LocalityHolder.reporter.initializeMetrics(manager, registry, scope);
}
@Override

View File

@ -1,478 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
import javax.management.InstanceNotFoundException;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.MBeanServer;
import javax.management.MBeanServerFactory;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.Query;
import javax.management.QueryExp;
import javax.management.ReflectionException;
import javax.management.openmbean.OpenMBeanAttributeInfoSupport;
import javax.management.openmbean.OpenType;
import javax.management.openmbean.SimpleType;
import javax.management.remote.JMXConnectorServer;
import javax.management.remote.JMXConnectorServerFactory;
import javax.management.remote.JMXServiceURL;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrConfig.JmxConfiguration;
import org.apache.solr.metrics.reporters.JmxObjectNameFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.ID;
import static org.apache.solr.common.params.CommonParams.NAME;
/**
* <p>
* Responsible for finding (or creating) a MBeanServer from given configuration
* and registering all SolrInfoMBean objects with JMX.
* </p>
* <p>
* Please see http://wiki.apache.org/solr/SolrJmx for instructions on usage and configuration
* </p>
*
*
* @see org.apache.solr.core.SolrConfig.JmxConfiguration
* @since solr 1.3
*/
public class JmxMonitoredMap<K, V> extends
ConcurrentHashMap<String, SolrInfoMBean> {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final String REPORTER_NAME = "_jmx_";
// set to true to use cached statistics NamedLists between getMBeanInfo calls to work
// around over calling getStatistics on MBeanInfos when iterating over all attributes (SOLR-6586)
private final boolean useCachedStatsBetweenGetMBeanInfoCalls = Boolean.getBoolean("useCachedStatsBetweenGetMBeanInfoCalls");
private final MBeanServer server;
private final String jmxRootName;
private final String coreHashCode;
private final JmxObjectNameFactory nameFactory;
private final String registryName;
public JmxMonitoredMap(String coreName, String coreHashCode, String registryName,
final JmxConfiguration jmxConfig) {
this.coreHashCode = coreHashCode;
this.registryName = registryName;
jmxRootName = (null != jmxConfig.rootName ?
jmxConfig.rootName
: ("solr" + (null != coreName ? "/" + coreName : "")));
if (jmxConfig.serviceUrl == null) {
List<MBeanServer> servers = null;
if (jmxConfig.agentId == null) {
// Try to find the first MBeanServer
servers = MBeanServerFactory.findMBeanServer(null);
} else if (jmxConfig.agentId != null) {
// Try to find the first MBean server with the given agentId
servers = MBeanServerFactory.findMBeanServer(jmxConfig.agentId);
// throw Exception if no servers were found with the given agentId
if (servers == null || servers.isEmpty())
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"No JMX Servers found with agentId: " + jmxConfig.agentId);
}
if (servers == null || servers.isEmpty()) {
server = null;
nameFactory = null;
log.debug("No JMX servers found, not exposing Solr information with JMX.");
return;
}
server = servers.get(0);
log.info("JMX monitoring is enabled. Adding Solr mbeans to JMX Server: "
+ server);
} else {
MBeanServer newServer = null;
try {
// Create a new MBeanServer with the given serviceUrl
newServer = MBeanServerFactory.newMBeanServer();
JMXConnectorServer connector = JMXConnectorServerFactory
.newJMXConnectorServer(new JMXServiceURL(jmxConfig.serviceUrl),
null, newServer);
connector.start();
log.info("JMX monitoring is enabled at " + jmxConfig.serviceUrl);
} catch (Exception e) {
// Release the reference
throw new RuntimeException("Could not start JMX monitoring ", e);
}
server = newServer;
}
nameFactory = new JmxObjectNameFactory(REPORTER_NAME + coreHashCode, registryName);
}
/**
* Clears the map and unregisters all SolrInfoMBeans in the map from
* MBeanServer
*/
@Override
public void clear() {
if (server != null) {
QueryExp exp = Query.or(Query.eq(Query.attr("coreHashCode"), Query.value(coreHashCode)),
Query.eq(Query.attr("reporter"), Query.value(REPORTER_NAME + coreHashCode)));
Set<ObjectName> objectNames = null;
try {
objectNames = server.queryNames(null, exp);
} catch (Exception e) {
log.warn("Exception querying for mbeans", e);
}
if (objectNames != null) {
for (ObjectName name : objectNames) {
try {
server.unregisterMBean(name);
} catch (InstanceNotFoundException ie) {
// ignore - someone else already deleted this one
} catch (Exception e) {
log.warn("Exception un-registering mbean {}", name, e);
}
}
}
}
super.clear();
}
/**
* Adds the SolrInfoMBean to the map and registers the given SolrInfoMBean
* instance with the MBeanServer defined for this core. If a SolrInfoMBean is
* already registered with the MBeanServer then it is unregistered and then
* re-registered.
*
* @param key the JMX type name for this SolrInfoMBean
* @param infoBean the SolrInfoMBean instance to be registered
*/
@Override
public SolrInfoMBean put(String key, SolrInfoMBean infoBean) {
if (server != null && infoBean != null) {
try {
// back-compat name
ObjectName name = getObjectName(key, infoBean);
if (server.isRegistered(name))
server.unregisterMBean(name);
SolrDynamicMBean mbean = new SolrDynamicMBean(coreHashCode, infoBean, useCachedStatsBetweenGetMBeanInfoCalls);
server.registerMBean(mbean, name);
// now register it also under new name
String beanName = createBeanName(infoBean, key);
name = nameFactory.createName(null, registryName, beanName);
if (server.isRegistered(name))
server.unregisterMBean(name);
server.registerMBean(mbean, name);
} catch (Exception e) {
log.warn( "Failed to register info bean: key=" + key + ", infoBean=" + infoBean, e);
}
}
return super.put(key, infoBean);
}
private String createBeanName(SolrInfoMBean infoBean, String key) {
if (infoBean.getCategory() == null) {
throw new IllegalArgumentException("SolrInfoMBean.category must never be null: " + infoBean);
}
StringBuilder sb = new StringBuilder();
sb.append(infoBean.getCategory().toString());
sb.append('.');
sb.append(key);
sb.append('.');
sb.append(infoBean.getName());
return sb.toString();
}
/**
* Removes the SolrInfoMBean object at the given key and unregisters it from
* MBeanServer
*
* @param key the JMX type name for this SolrInfoMBean
*/
@Override
public SolrInfoMBean remove(Object key) {
SolrInfoMBean infoBean = get(key);
if (infoBean != null) {
try {
unregister((String) key, infoBean);
} catch (RuntimeException e) {
log.warn( "Failed to unregister info bean: " + key, e);
}
}
return super.remove(key);
}
private void unregister(String key, SolrInfoMBean infoBean) {
if (server == null)
return;
try {
// remove legacy name
ObjectName name = getObjectName(key, infoBean);
if (server.isRegistered(name) && coreHashCode.equals(server.getAttribute(name, "coreHashCode"))) {
server.unregisterMBean(name);
}
// remove new name
String beanName = createBeanName(infoBean, key);
name = nameFactory.createName(null, registryName, beanName);
if (server.isRegistered(name)) {
server.unregisterMBean(name);
}
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Failed to unregister info bean: " + key, e);
}
}
private ObjectName getObjectName(String key, SolrInfoMBean infoBean)
throws MalformedObjectNameException {
Hashtable<String, String> map = new Hashtable<>();
map.put("type", key);
if (infoBean.getName() != null && !"".equals(infoBean.getName())) {
map.put(ID, infoBean.getName());
}
return ObjectName.getInstance(jmxRootName, map);
}
/** For test verification */
public MBeanServer getServer() {
return server;
}
/**
* DynamicMBean is used to dynamically expose all SolrInfoMBean
* getStatistics() NameList keys as String getters.
*/
static class SolrDynamicMBean implements DynamicMBean {
private SolrInfoMBean infoBean;
private HashSet<String> staticStats;
private String coreHashCode;
private volatile NamedList cachedDynamicStats;
private boolean useCachedStatsBetweenGetMBeanInfoCalls;
public SolrDynamicMBean(String coreHashCode, SolrInfoMBean managedResource) {
this(coreHashCode, managedResource, false);
}
public SolrDynamicMBean(String coreHashCode, SolrInfoMBean managedResource, boolean useCachedStatsBetweenGetMBeanInfoCalls) {
this.useCachedStatsBetweenGetMBeanInfoCalls = useCachedStatsBetweenGetMBeanInfoCalls;
if (managedResource instanceof JmxAugmentedSolrInfoMBean) {
final JmxAugmentedSolrInfoMBean jmxSpecific = (JmxAugmentedSolrInfoMBean)managedResource;
this.infoBean = new SolrInfoMBeanWrapper(jmxSpecific) {
@Override
public NamedList getStatistics() { return jmxSpecific.getStatisticsForJmx(); }
};
} else {
this.infoBean = managedResource;
}
staticStats = new HashSet<>();
// For which getters are already available in SolrInfoMBean
staticStats.add(NAME);
staticStats.add("version");
staticStats.add("description");
staticStats.add("category");
staticStats.add("source");
this.coreHashCode = coreHashCode;
}
@Override
public MBeanInfo getMBeanInfo() {
ArrayList<MBeanAttributeInfo> attrInfoList = new ArrayList<>();
for (String stat : staticStats) {
attrInfoList.add(new MBeanAttributeInfo(stat, String.class.getName(),
null, true, false, false));
}
// add core's hashcode
attrInfoList.add(new MBeanAttributeInfo("coreHashCode", String.class.getName(),
null, true, false, false));
try {
NamedList dynamicStats = infoBean.getStatistics();
if (useCachedStatsBetweenGetMBeanInfoCalls) {
cachedDynamicStats = dynamicStats;
}
if (dynamicStats != null) {
for (int i = 0; i < dynamicStats.size(); i++) {
String name = dynamicStats.getName(i);
if (staticStats.contains(name)) {
continue;
}
Class type = dynamicStats.get(name).getClass();
OpenType typeBox = determineType(type);
if (type.equals(String.class) || typeBox == null) {
attrInfoList.add(new MBeanAttributeInfo(dynamicStats.getName(i),
String.class.getName(), null, true, false, false));
} else {
attrInfoList.add(new OpenMBeanAttributeInfoSupport(
dynamicStats.getName(i), dynamicStats.getName(i), typeBox,
true, false, false));
}
}
}
} catch (Exception e) {
// don't log issue if the core is closing
if (!(SolrException.getRootCause(e) instanceof AlreadyClosedException))
log.warn("Could not getStatistics on info bean {}", infoBean.getName(), e);
}
MBeanAttributeInfo[] attrInfoArr = attrInfoList
.toArray(new MBeanAttributeInfo[attrInfoList.size()]);
return new MBeanInfo(getClass().getName(), infoBean
.getDescription(), attrInfoArr, null, null, null);
}
private OpenType determineType(Class type) {
try {
for (Field field : SimpleType.class.getFields()) {
if (field.getType().equals(SimpleType.class)) {
SimpleType candidate = (SimpleType) field.get(SimpleType.class);
if (candidate.getTypeName().equals(type.getName())) {
return candidate;
}
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Object getAttribute(String attribute)
throws AttributeNotFoundException, MBeanException, ReflectionException {
Object val;
if ("coreHashCode".equals(attribute)) {
val = coreHashCode;
} else if (staticStats.contains(attribute) && attribute != null
&& attribute.length() > 0) {
try {
String getter = "get" + attribute.substring(0, 1).toUpperCase(Locale.ROOT)
+ attribute.substring(1);
Method meth = infoBean.getClass().getMethod(getter);
val = meth.invoke(infoBean);
} catch (Exception e) {
throw new AttributeNotFoundException(attribute);
}
} else {
NamedList stats = null;
if (useCachedStatsBetweenGetMBeanInfoCalls) {
NamedList cachedStats = this.cachedDynamicStats;
if (cachedStats != null) {
stats = cachedStats;
}
}
if (stats == null) {
stats = infoBean.getStatistics();
}
val = stats.get(attribute);
}
if (val != null) {
// It's String or one of the simple types, just return it as JMX suggests direct support for such types
for (String simpleTypeName : SimpleType.ALLOWED_CLASSNAMES_LIST) {
if (val.getClass().getName().equals(simpleTypeName)) {
return val;
}
}
// It's an arbitrary object which could be something complex and odd, return its toString, assuming that is
// a workable representation of the object
return val.toString();
}
return null;
}
@Override
public AttributeList getAttributes(String[] attributes) {
AttributeList list = new AttributeList();
for (String attribute : attributes) {
try {
list.add(new Attribute(attribute, getAttribute(attribute)));
} catch (Exception e) {
log.warn("Could not get attribute " + attribute);
}
}
return list;
}
@Override
public void setAttribute(Attribute attribute)
throws AttributeNotFoundException, InvalidAttributeValueException,
MBeanException, ReflectionException {
throw new UnsupportedOperationException("Operation not Supported");
}
@Override
public AttributeList setAttributes(AttributeList attributes) {
throw new UnsupportedOperationException("Operation not Supported");
}
@Override
public Object invoke(String actionName, Object[] params, String[] signature)
throws MBeanException, ReflectionException {
throw new UnsupportedOperationException("Operation not Supported");
}
}
/**
* SolrInfoMBean that provides JMX-specific statistics. Used, for example,
* if generating full statistics is expensive; the expensive statistics can
* be generated normally for use with the web ui, while an abbreviated version
* are generated for period jmx use.
*/
public interface JmxAugmentedSolrInfoMBean extends SolrInfoMBean {
/**
* JMX-specific statistics
*/
public NamedList getStatisticsForJmx();
}
}

View File

@ -294,8 +294,8 @@ public class PluginBag<T> implements AutoCloseable {
private void registerMBean(Object inst, SolrCore core, String pluginKey) {
if (core == null) return;
if (inst instanceof SolrInfoMBean) {
SolrInfoMBean mBean = (SolrInfoMBean) inst;
if (inst instanceof SolrInfoBean) {
SolrInfoBean mBean = (SolrInfoBean) inst;
String name = (inst instanceof SolrRequestHandler) ? pluginKey : mBean.getName();
core.registerInfoBean(name, mBean);
}

View File

@ -276,18 +276,12 @@ public class SolrConfig extends Config implements MapSerializable {
hashSetInverseLoadFactor = 1.0f / getFloat("//HashDocSet/@loadFactor", 0.75f);
hashDocSetMaxSize = getInt("//HashDocSet/@maxSize", 3000);
if (get("jmx", null) != null) {
log.warn("solrconfig.xml: <jmx> is no longer supported, use solr.xml:/metrics/reporter section instead");
}
httpCachingConfig = new HttpCachingConfig(this);
Node jmx = getNode("jmx", false);
if (jmx != null) {
jmxConfig = new JmxConfiguration(true,
get("jmx/@agentId", null),
get("jmx/@serviceUrl", null),
get("jmx/@rootName", null));
} else {
jmxConfig = new JmxConfiguration(false, null, null, null);
}
maxWarmingSearchers = getInt("query/maxWarmingSearchers", 1);
slowQueryThresholdMillis = getInt("query/slowQueryThresholdMillis", -1);
for (SolrPluginInfo plugin : plugins) loadPluginInfo(plugin);
@ -510,48 +504,12 @@ public class SolrConfig extends Config implements MapSerializable {
protected String dataDir;
public final int slowQueryThresholdMillis; // threshold above which a query is considered slow
//JMX configuration
public final JmxConfiguration jmxConfig;
private final HttpCachingConfig httpCachingConfig;
public HttpCachingConfig getHttpCachingConfig() {
return httpCachingConfig;
}
public static class JmxConfiguration implements MapSerializable {
public boolean enabled = false;
public String agentId;
public String serviceUrl;
public String rootName;
public JmxConfiguration(boolean enabled,
String agentId,
String serviceUrl,
String rootName) {
this.enabled = enabled;
this.agentId = agentId;
this.serviceUrl = serviceUrl;
this.rootName = rootName;
if (agentId != null && serviceUrl != null) {
throw new SolrException
(SolrException.ErrorCode.SERVER_ERROR,
"Incorrect JMX Configuration in solrconfig.xml, " +
"both agentId and serviceUrl cannot be specified at the same time");
}
}
@Override
public Map<String, Object> toMap(Map<String, Object> map) {
map.put("agentId", agentId);
map.put("serviceUrl", serviceUrl);
map.put("rootName", rootName);
return map;
}
}
public static class HttpCachingConfig implements MapSerializable {
/**
@ -858,7 +816,6 @@ public class SolrConfig extends Config implements MapSerializable {
m.put("queryResultMaxDocsCached", queryResultMaxDocsCached);
m.put("enableLazyFieldLoading", enableLazyFieldLoading);
m.put("maxBooleanClauses", booleanQueryMaxClauseCount);
if (jmxConfig != null) result.put("jmx", jmxConfig);
for (SolrPluginInfo plugin : plugins) {
List<PluginInfo> infos = getPluginInfos(plugin.clazz.getName());
if (infos == null || infos.isEmpty()) continue;
@ -884,7 +841,6 @@ public class SolrConfig extends Config implements MapSerializable {
addCacheConfig(m, filterCacheConfig, queryResultCacheConfig, documentCacheConfig, fieldValueCacheConfig);
if (jmxConfig != null) result.put("jmx", jmxConfig);
m = new LinkedHashMap();
result.put("requestDispatcher", m);
m.put("handleSelect", handleSelect);

View File

@ -27,7 +27,6 @@ import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
@ -58,6 +57,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
import com.codahale.metrics.Counter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.google.common.collect.MapMaker;
import org.apache.commons.io.FileUtils;
@ -133,7 +133,7 @@ import org.apache.solr.schema.IndexSchemaFactory;
import org.apache.solr.schema.ManagedIndexSchema;
import org.apache.solr.schema.SimilarityFactory;
import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.SolrFieldCacheMBean;
import org.apache.solr.search.SolrFieldCacheBean;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.ValueSourceParser;
import org.apache.solr.search.stats.LocalStatsCache;
@ -171,7 +171,7 @@ import static org.apache.solr.common.params.CommonParams.PATH;
/**
*
*/
public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closeable {
public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeable {
public static final String version="1.0";
@ -202,7 +202,7 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
private final PluginBag<UpdateRequestProcessorFactory> updateProcessors = new PluginBag<>(UpdateRequestProcessorFactory.class, this, true);
private final Map<String,UpdateRequestProcessorChain> updateProcessorChains;
private final SolrCoreMetricManager coreMetricManager;
private final Map<String, SolrInfoMBean> infoRegistry;
private final Map<String, SolrInfoBean> infoRegistry = new ConcurrentHashMap<>();
private final IndexDeletionPolicyWrapper solrDelPolicy;
private final SolrSnapshotMetaDataManager snapshotMgr;
private final DirectoryFactory directoryFactory;
@ -222,6 +222,12 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
private Counter newSearcherMaxReachedCounter;
private Counter newSearcherOtherErrorsCounter;
private Set<String> metricNames = new HashSet<>();
public Set<String> getMetricNames() {
return metricNames;
}
public Date getStartTimeStamp() { return startTime; }
private final Map<IndexReader.CacheKey, IndexFingerprint> perSegmentFingerprintCache = new MapMaker().weakKeys().makeMap();
@ -448,14 +454,14 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
}
/**
* Returns a Map of name vs SolrInfoMBean objects. The returned map is an instance of
* Returns a Map of name vs SolrInfoBean objects. The returned map is an instance of
* a ConcurrentHashMap and therefore no synchronization is needed for putting, removing
* or iterating over it.
*
* @return the Info Registry map which contains SolrInfoMBean objects keyed by name
* @return the Info Registry map which contains SolrInfoBean objects keyed by name
* @since solr 1.3
*/
public Map<String, SolrInfoMBean> getInfoRegistry() {
public Map<String, SolrInfoBean> getInfoRegistry() {
return infoRegistry;
}
@ -905,9 +911,12 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
// initialize searcher-related metrics
initializeMetrics(metricManager, coreMetricManager.getRegistryName(), null);
// Initialize JMX
this.infoRegistry = initInfoRegistry(name, config);
infoRegistry.put("fieldCache", new SolrFieldCacheMBean());
SolrFieldCacheBean solrFieldCacheBean = new SolrFieldCacheBean();
// this is registered at the CONTAINER level because it's not core-specific - for now we
// also register it here for back-compat
solrFieldCacheBean.initializeMetrics(metricManager, coreMetricManager.getRegistryName(), "core");
infoRegistry.put("fieldCache", solrFieldCacheBean);
initSchema(config, schema);
@ -998,15 +1007,9 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
// from the core.
resourceLoader.inform(infoRegistry);
// Allow the directory factory to register MBeans as well
for (SolrInfoMBean bean : directoryFactory.offerMBeans()) {
log.debug("Registering JMX bean [{}] from directory factory.", bean.getName());
// Not worried about concurrency, so no reason to use putIfAbsent
if (infoRegistry.containsKey(bean.getName())){
log.debug("Ignoring JMX bean [{}] due to name conflict.", bean.getName());
} else {
infoRegistry.put(bean.getName(), bean);
}
// Allow the directory factory to report metrics
if (directoryFactory instanceof SolrMetricProducer) {
((SolrMetricProducer)directoryFactory).initializeMetrics(metricManager, coreMetricManager.getRegistryName(), "directoryFactory");
}
// seed version buckets with max from index during core initialization ... requires a searcher!
@ -1126,34 +1129,46 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
@Override
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
newSearcherCounter = manager.counter(registry, "new", Category.SEARCHER.toString());
newSearcherTimer = manager.timer(registry, "time", Category.SEARCHER.toString(), "new");
newSearcherWarmupTimer = manager.timer(registry, "warmup", Category.SEARCHER.toString(), "new");
newSearcherMaxReachedCounter = manager.counter(registry, "maxReached", Category.SEARCHER.toString(), "new");
newSearcherOtherErrorsCounter = manager.counter(registry, "errors", Category.SEARCHER.toString(), "new");
newSearcherCounter = manager.counter(this, registry, "new", Category.SEARCHER.toString());
newSearcherTimer = manager.timer(this, registry, "time", Category.SEARCHER.toString(), "new");
newSearcherWarmupTimer = manager.timer(this, registry, "warmup", Category.SEARCHER.toString(), "new");
newSearcherMaxReachedCounter = manager.counter(this, registry, "maxReached", Category.SEARCHER.toString(), "new");
newSearcherOtherErrorsCounter = manager.counter(this, registry, "errors", Category.SEARCHER.toString(), "new");
manager.registerGauge(this, registry, () -> name == null ? "(null)" : name, true, "coreName", Category.CORE.toString());
manager.registerGauge(this, registry, () -> startTime, true, "startTime", Category.CORE.toString());
manager.registerGauge(this, registry, () -> getOpenCount(), true, "refCount", Category.CORE.toString());
manager.registerGauge(this, registry, () -> resourceLoader.getInstancePath().toString(), true, "instanceDir", Category.CORE.toString());
manager.registerGauge(this, registry, () -> getIndexDir(), true, "indexDir", Category.CORE.toString());
manager.registerGauge(this, registry, () -> getIndexSize(), true, "sizeInBytes", Category.INDEX.toString());
manager.registerGauge(this, registry, () -> NumberUtils.readableSize(getIndexSize()), true, "size", Category.INDEX.toString());
if (coreDescriptor != null && coreDescriptor.getCoreContainer() != null) {
manager.registerGauge(this, registry, () -> coreDescriptor.getCoreContainer().getCoreNames(this), true, "aliases", Category.CORE.toString());
final CloudDescriptor cd = coreDescriptor.getCloudDescriptor();
if (cd != null) {
manager.registerGauge(this, registry, () -> {
if (cd.getCollectionName() != null) {
return cd.getCollectionName();
} else {
return "_notset_";
}
}, true, "collection", Category.CORE.toString());
manager.registerGauge(this, registry, () -> {
if (cd.getShardId() != null) {
return cd.getShardId();
} else {
return "_auto_";
}
}, true, "shard", Category.CORE.toString());
}
}
manager.registerGauge(registry, () -> name == null ? "(null)" : name, true, "coreName", Category.CORE.toString());
manager.registerGauge(registry, () -> startTime, true, "startTime", Category.CORE.toString());
manager.registerGauge(registry, () -> getOpenCount(), true, "refCount", Category.CORE.toString());
manager.registerGauge(registry, () -> resourceLoader.getInstancePath().toString(), true, "instanceDir", Category.CORE.toString());
manager.registerGauge(registry, () -> getIndexDir(), true, "indexDir", Category.CORE.toString());
manager.registerGauge(registry, () -> getIndexSize(), true, "sizeInBytes", Category.INDEX.toString());
manager.registerGauge(registry, () -> NumberUtils.readableSize(getIndexSize()), true, "size", Category.INDEX.toString());
manager.registerGauge(registry, () -> coreDescriptor.getCoreContainer().getCoreNames(this), true, "aliases", Category.CORE.toString());
// initialize disk total / free metrics
Path dataDirPath = Paths.get(dataDir);
File dataDirFile = dataDirPath.toFile();
manager.registerGauge(registry, () -> dataDirFile.getTotalSpace(), true, "totalSpace", Category.CORE.toString(), "fs");
manager.registerGauge(registry, () -> dataDirFile.getUsableSpace(), true, "usableSpace", Category.CORE.toString(), "fs");
}
private Map<String,SolrInfoMBean> initInfoRegistry(String name, SolrConfig config) {
if (config.jmxConfig.enabled) {
return new JmxMonitoredMap<String, SolrInfoMBean>(name, coreMetricManager.getRegistryName(), String.valueOf(this.hashCode()), config.jmxConfig);
} else {
log.debug("JMX monitoring not detected for core: " + name);
return new ConcurrentHashMap<>();
}
manager.registerGauge(this, registry, () -> dataDirFile.getTotalSpace(), true, "totalSpace", Category.CORE.toString(), "fs");
manager.registerGauge(this, registry, () -> dataDirFile.getUsableSpace(), true, "usableSpace", Category.CORE.toString(), "fs");
}
private void checkVersionFieldExistsInSchema(IndexSchema schema, CoreDescriptor coreDescriptor) {
@ -2685,6 +2700,9 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
for (PluginInfo info : pluginInfos) {
T o = createInitInstance(info,type, type.getSimpleName(), defClassName);
registry.put(info.name, o);
if (o instanceof SolrMetricProducer) {
coreMetricManager.registerMetricProducer(type.getSimpleName() + "." + info.name, (SolrMetricProducer)o);
}
if(info.isDefault()){
def = o;
}
@ -2692,6 +2710,12 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
return def;
}
public void initDefaultPlugin(Object plugin, Class type) {
if (plugin instanceof SolrMetricProducer) {
coreMetricManager.registerMetricProducer(type.getSimpleName() + ".default", (SolrMetricProducer)plugin);
}
}
/**For a given List of PluginInfo return the instances as a List
* @param defClassName The default classname if PluginInfo#className == null
* @return The instances initialized
@ -2775,14 +2799,9 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
}
/////////////////////////////////////////////////////////////////////
// SolrInfoMBean stuff: Statistics and Module Info
// SolrInfoBean stuff: Statistics and Module Info
/////////////////////////////////////////////////////////////////////
@Override
public String getVersion() {
return SolrCore.version;
}
@Override
public String getDescription() {
return "SolrCore";
@ -2794,48 +2813,8 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
}
@Override
public String getSource() {
return null;
}
@Override
public URL[] getDocs() {
return null;
}
@Override
public NamedList getStatistics() {
NamedList<Object> lst = new SimpleOrderedMap<>(8);
lst.add("coreName", name==null ? "(null)" : name);
lst.add("startTime", startTime);
lst.add("refCount", getOpenCount());
lst.add("instanceDir", resourceLoader.getInstancePath());
lst.add("indexDir", getIndexDir());
long size = getIndexSize();
lst.add("sizeInBytes", size);
lst.add("size", NumberUtils.readableSize(size));
CoreDescriptor cd = getCoreDescriptor();
if (cd != null) {
if (null != cd && cd.getCoreContainer() != null) {
lst.add("aliases", getCoreDescriptor().getCoreContainer().getCoreNames(this));
}
CloudDescriptor cloudDesc = cd.getCloudDescriptor();
if (cloudDesc != null) {
String collection = cloudDesc.getCollectionName();
if (collection == null) {
collection = "_notset_";
}
lst.add("collection", collection);
String shard = cloudDesc.getShardId();
if (shard == null) {
shard = "_auto_";
}
lst.add("shard", shard);
}
}
return lst;
public MetricRegistry getMetricRegistry() {
return coreMetricManager.getRegistry();
}
public Codec getCodec() {
@ -2983,11 +2962,11 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
};
}
public void registerInfoBean(String name, SolrInfoMBean solrInfoMBean) {
infoRegistry.put(name, solrInfoMBean);
public void registerInfoBean(String name, SolrInfoBean solrInfoBean) {
infoRegistry.put(name, solrInfoBean);
if (solrInfoMBean instanceof SolrMetricProducer) {
SolrMetricProducer producer = (SolrMetricProducer) solrInfoMBean;
if (solrInfoBean instanceof SolrMetricProducer) {
SolrMetricProducer producer = (SolrMetricProducer) solrInfoBean;
coreMetricManager.registerMetricProducer(name, producer);
}
}

View File

@ -0,0 +1,95 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import java.util.Map;
import java.util.Set;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.util.stats.MetricUtils;
/**
* Interface for getting various ui friendly strings
* for use by objects which are 'pluggable' to make server administration
* easier.
*/
public interface SolrInfoBean {
/**
* Category of Solr component.
*/
enum Category { CONTAINER, ADMIN, CORE, QUERY, UPDATE, CACHE, HIGHLIGHTER, QUERYPARSER, SPELLCHECKER,
SEARCHER, REPLICATION, TLOG, INDEX, DIRECTORY, HTTP, OTHER }
/**
* Top-level group of beans or metrics for a subsystem.
*/
enum Group { jvm, jetty, node, core, collection, shard, cluster, overseer }
/**
* Simple common usage name, e.g. BasicQueryHandler,
* or fully qualified class name.
*/
String getName();
/** Simple one or two line description */
String getDescription();
/** Category of this component */
Category getCategory();
/** Optionally return a snapshot of metrics that this component reports, or null.
* Default implementation requires that both {@link #getMetricNames()} and
* {@link #getMetricRegistry()} return non-null values.
*/
default Map<String, Object> getMetricsSnapshot() {
if (getMetricRegistry() == null || getMetricNames() == null) {
return null;
}
return MetricUtils.convertMetrics(getMetricRegistry(), getMetricNames());
}
/**
* Modifiable set of metric names that this component reports (default is null,
* which means none). If not null then this set is used by {@link #registerMetricName(String)}
* to capture what metrics names are reported from this component.
*/
default Set<String> getMetricNames() {
return null;
}
/**
* An instance of {@link MetricRegistry} that this component uses for metrics reporting
* (default is null, which means no registry).
*/
default MetricRegistry getMetricRegistry() {
return null;
}
/** Register a metric name that this component reports. This method is called by various
* metric registration methods in {@link org.apache.solr.metrics.SolrMetricManager} in order
* to capture what metric names are reported from this component (which in turn is called
* from {@link org.apache.solr.metrics.SolrMetricProducer#initializeMetrics(SolrMetricManager, String, String)}).
* <p>Default implementation registers all metrics added by a component. Implementations may
* override this to avoid reporting some or all metrics returned by {@link #getMetricsSnapshot()}</p>
*/
default void registerMetricName(String name) {
Set<String> names = getMetricNames();
if (names != null) {
names.add(name);
}
}
}

View File

@ -1,76 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import java.net.URL;
import org.apache.solr.common.util.NamedList;
/**
* MBean interface for getting various ui friendly strings and URLs
* for use by objects which are 'pluggable' to make server administration
* easier.
*
*
*/
public interface SolrInfoMBean {
/**
* Category of Solr component.
*/
enum Category { CONTAINER, ADMIN, CORE, QUERY, UPDATE, CACHE, HIGHLIGHTER, QUERYPARSER, SPELLCHECKER,
SEARCHER, REPLICATION, TLOG, INDEX, DIRECTORY, HTTP, OTHER }
/**
* Top-level group of beans or metrics for a subsystem.
*/
enum Group { jvm, jetty, node, core, collection, shard, cluster, overseer }
/**
* Simple common usage name, e.g. BasicQueryHandler,
* or fully qualified clas name.
*/
public String getName();
/** Simple common usage version, e.g. 2.0 */
public String getVersion();
/** Simple one or two line description */
public String getDescription();
/** Purpose of this Class */
public Category getCategory();
/** CVS Source, SVN Source, etc */
public String getSource();
/**
* Documentation URL list.
*
* <p>
* Suggested documentation URLs: Homepage for sponsoring project,
* FAQ on class usage, Design doc for class, Wiki, bug reporting URL, etc...
* </p>
*/
public URL[] getDocs();
/**
* Any statistics this instance would like to be publicly available via
* the Solr Administration interface.
*
* <p>
* Any Object type may be stored in the list, but only the
* <code>toString()</code> representation will be used.
* </p>
*/
public NamedList getStatistics();
}

View File

@ -1,62 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import java.net.URL;
import org.apache.solr.common.util.NamedList;
/**
* Wraps a {@link SolrInfoMBean}.
*/
public class SolrInfoMBeanWrapper implements SolrInfoMBean {
private final SolrInfoMBean mbean;
public SolrInfoMBeanWrapper(SolrInfoMBean mbean) {
this.mbean = mbean;
}
/** {@inheritDoc} */
@Override
public String getName() { return mbean.getName(); }
/** {@inheritDoc} */
@Override
public String getVersion() { return mbean.getVersion(); }
/** {@inheritDoc} */
@Override
public String getDescription() { return mbean.getDescription(); }
/** {@inheritDoc} */
@Override
public Category getCategory() { return mbean.getCategory(); }
/** {@inheritDoc} */
@Override
public String getSource() { return mbean.getSource(); }
/** {@inheritDoc} */
@Override
public URL[] getDocs() { return mbean.getDocs(); }
/** {@inheritDoc} */
@Override
public NamedList getStatistics() { return mbean.getStatistics(); }
}

View File

@ -100,7 +100,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
private String dataDir;
private final List<SolrCoreAware> waitingForCore = Collections.synchronizedList(new ArrayList<SolrCoreAware>());
private final List<SolrInfoMBean> infoMBeans = Collections.synchronizedList(new ArrayList<SolrInfoMBean>());
private final List<SolrInfoBean> infoMBeans = Collections.synchronizedList(new ArrayList<SolrInfoBean>());
private final List<ResourceLoaderAware> waitingForResources = Collections.synchronizedList(new ArrayList<ResourceLoaderAware>());
private static final Charset UTF_8 = StandardCharsets.UTF_8;
@ -664,9 +664,9 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
assertAwareCompatibility( ResourceLoaderAware.class, obj );
waitingForResources.add( (ResourceLoaderAware)obj );
}
if (obj instanceof SolrInfoMBean){
if (obj instanceof SolrInfoBean){
//TODO: Assert here?
infoMBeans.add((SolrInfoMBean) obj);
infoMBeans.add((SolrInfoBean) obj);
}
}
@ -722,21 +722,21 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
}
/**
* Register any {@link org.apache.solr.core.SolrInfoMBean}s
* Register any {@link SolrInfoBean}s
* @param infoRegistry The Info Registry
*/
public void inform(Map<String, SolrInfoMBean> infoRegistry) {
public void inform(Map<String, SolrInfoBean> infoRegistry) {
// this can currently happen concurrently with requests starting and lazy components
// loading. Make sure infoMBeans doesn't change.
SolrInfoMBean[] arr;
SolrInfoBean[] arr;
synchronized (infoMBeans) {
arr = infoMBeans.toArray(new SolrInfoMBean[infoMBeans.size()]);
arr = infoMBeans.toArray(new SolrInfoBean[infoMBeans.size()]);
waitingForResources.clear();
}
for (SolrInfoMBean bean : arr) {
for (SolrInfoBean bean : arr) {
// Too slow? I suspect not, but we may need
// to start tracking this in a Set.
if (!infoRegistry.containsValue(bean)) {
@ -879,7 +879,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
public void close() throws IOException {
IOUtils.close(classLoader);
}
public List<SolrInfoMBean> getInfoMBeans(){
public List<SolrInfoBean> getInfoMBeans(){
return Collections.unmodifiableList(infoMBeans);
}

View File

@ -16,6 +16,7 @@
*/
package org.apache.solr.core;
import javax.management.MBeanServer;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
@ -25,7 +26,10 @@ import java.lang.invoke.MethodHandles;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
@ -35,8 +39,10 @@ import org.apache.commons.io.IOUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.logging.LogWatcherConfig;
import org.apache.solr.metrics.reporters.SolrJmxReporter;
import org.apache.solr.update.UpdateShardHandlerConfig;
import org.apache.solr.util.DOMUtil;
import org.apache.solr.util.JmxUtil;
import org.apache.solr.util.PropertiesUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -448,14 +454,30 @@ public class SolrXmlConfig {
private static PluginInfo[] getMetricReporterPluginInfos(Config config) {
NodeList nodes = (NodeList) config.evaluate("solr/metrics/reporter", XPathConstants.NODESET);
if (nodes == null || nodes.getLength() == 0)
return new PluginInfo[0];
PluginInfo[] configs = new PluginInfo[nodes.getLength()];
for (int i = 0; i < nodes.getLength(); i++) {
// we don't require class in order to support predefined replica and node reporter classes
configs[i] = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
List<PluginInfo> configs = new ArrayList<>();
boolean hasJmxReporter = false;
if (nodes != null && nodes.getLength() > 0) {
for (int i = 0; i < nodes.getLength(); i++) {
// we don't require class in order to support predefined replica and node reporter classes
PluginInfo info = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
String clazz = info.className;
if (clazz != null && clazz.equals(SolrJmxReporter.class.getName())) {
hasJmxReporter = true;
}
configs.add(info);
}
}
return configs;
// if there's an MBean server running but there was no JMX reporter then add a default one
MBeanServer mBeanServer = JmxUtil.findFirstMBeanServer();
if (mBeanServer != null && !hasJmxReporter) {
log.info("MBean server found: " + mBeanServer + ", but no JMX reporters were configured - adding default JMX reporter.");
Map<String,Object> attributes = new HashMap<>();
attributes.put("name", "default");
attributes.put("class", SolrJmxReporter.class.getName());
PluginInfo defaultPlugin = new PluginInfo("reporter", attributes);
configs.add(defaultPlugin);
}
return configs.toArray(new PluginInfo[configs.size()]);
}
private static PluginInfo getTransientCoreCacheFactoryPluginInfo(Config config) {
Node node = config.getNode("solr/transientCoreCacheFactory", false);

View File

@ -14,9 +14,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package org.apache.solr.handler;
import java.io.IOException;

View File

@ -19,8 +19,6 @@ package org.apache.solr.handler;
import java.io.IOException;
import java.io.Reader;
import java.lang.invoke.MethodHandles;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
@ -481,12 +479,4 @@ public class MoreLikeThisHandler extends RequestHandlerBase
public String getDescription() {
return "Solr MoreLikeThis";
}
@Override
public URL[] getDocs() {
try {
return new URL[] { new URL("http://wiki.apache.org/solr/MoreLikeThis") };
}
catch( MalformedURLException ex ) { return null; }
}
}

View File

@ -20,7 +20,6 @@ import org.apache.solr.api.Api;
import org.apache.solr.api.ApiBag;
import org.apache.solr.handler.component.*;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@ -42,11 +41,6 @@ public class RealTimeGetHandler extends SearchHandler {
return "The realtime get handler";
}
@Override
public URL[] getDocs() {
return null;
}
@Override
public Collection<Api> getApis() {
return ApiBag.wrapRequestHandlers(this, "core.RealtimeGet");

View File

@ -90,6 +90,8 @@ import org.apache.solr.core.SolrEventListener;
import org.apache.solr.core.backup.repository.BackupRepository;
import org.apache.solr.core.backup.repository.LocalFileSystemRepository;
import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.handler.IndexFetcher.IndexFetchResult;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
@ -162,6 +164,10 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
}
return new CommitVersionInfo(generation, version);
}
public String toString() {
return "generation=" + generation + ",version=" + version;
}
}
private IndexFetcher pollingIndexFetcher;
@ -851,52 +857,56 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
}
@Override
@SuppressWarnings("unchecked")
public NamedList getStatistics() {
NamedList list = super.getStatistics();
if (core != null) {
list.add("indexSize", NumberUtils.readableSize(core.getIndexSize()));
CommitVersionInfo vInfo = (core != null && !core.isClosed()) ? getIndexVersion(): null;
list.add("indexVersion", null == vInfo ? 0 : vInfo.version);
list.add(GENERATION, null == vInfo ? 0 : vInfo.generation);
list.add("indexPath", core.getIndexDir());
list.add("isMaster", String.valueOf(isMaster));
list.add("isSlave", String.valueOf(isSlave));
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
super.initializeMetrics(manager, registry, scope);
manager.registerGauge(this, registry, () -> core != null ? NumberUtils.readableSize(core.getIndexSize()) : "", true,
"indexSize", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> (core != null && !core.isClosed() ? getIndexVersion().toString() : ""), true,
"indexVersion", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> (core != null && !core.isClosed() ? getIndexVersion().generation : 0), true,
GENERATION, getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> core != null ? core.getIndexDir() : "", true,
"indexPath", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> isMaster, true,
"isMaster", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> isSlave, true,
"isSlave", getCategory().toString(), scope);
final MetricsMap fetcherMap = new MetricsMap((detailed, map) -> {
IndexFetcher fetcher = currentIndexFetcher;
if (fetcher != null) {
list.add(MASTER_URL, fetcher.getMasterUrl());
map.put(MASTER_URL, fetcher.getMasterUrl());
if (getPollInterval() != null) {
list.add(POLL_INTERVAL, getPollInterval());
map.put(POLL_INTERVAL, getPollInterval());
}
list.add("isPollingDisabled", String.valueOf(isPollingDisabled()));
list.add("isReplicating", String.valueOf(isReplicating()));
map.put("isPollingDisabled", isPollingDisabled());
map.put("isReplicating", isReplicating());
long elapsed = fetcher.getReplicationTimeElapsed();
long val = fetcher.getTotalBytesDownloaded();
if (elapsed > 0) {
list.add("timeElapsed", elapsed);
list.add("bytesDownloaded", val);
list.add("downloadSpeed", val / elapsed);
map.put("timeElapsed", elapsed);
map.put("bytesDownloaded", val);
map.put("downloadSpeed", val / elapsed);
}
Properties props = loadReplicationProperties();
addVal(list, IndexFetcher.PREVIOUS_CYCLE_TIME_TAKEN, props, Long.class);
addVal(list, IndexFetcher.INDEX_REPLICATED_AT, props, Date.class);
addVal(list, IndexFetcher.CONF_FILES_REPLICATED_AT, props, Date.class);
addVal(list, IndexFetcher.REPLICATION_FAILED_AT, props, Date.class);
addVal(list, IndexFetcher.TIMES_FAILED, props, Integer.class);
addVal(list, IndexFetcher.TIMES_INDEX_REPLICATED, props, Integer.class);
addVal(list, IndexFetcher.LAST_CYCLE_BYTES_DOWNLOADED, props, Long.class);
addVal(list, IndexFetcher.TIMES_CONFIG_REPLICATED, props, Integer.class);
addVal(list, IndexFetcher.CONF_FILES_REPLICATED, props, String.class);
addVal(map, IndexFetcher.PREVIOUS_CYCLE_TIME_TAKEN, props, Long.class);
addVal(map, IndexFetcher.INDEX_REPLICATED_AT, props, Date.class);
addVal(map, IndexFetcher.CONF_FILES_REPLICATED_AT, props, Date.class);
addVal(map, IndexFetcher.REPLICATION_FAILED_AT, props, Date.class);
addVal(map, IndexFetcher.TIMES_FAILED, props, Integer.class);
addVal(map, IndexFetcher.TIMES_INDEX_REPLICATED, props, Integer.class);
addVal(map, IndexFetcher.LAST_CYCLE_BYTES_DOWNLOADED, props, Long.class);
addVal(map, IndexFetcher.TIMES_CONFIG_REPLICATED, props, Integer.class);
addVal(map, IndexFetcher.CONF_FILES_REPLICATED, props, String.class);
}
if (isMaster) {
if (includeConfFiles != null) list.add("confFilesToReplicate", includeConfFiles);
list.add(REPLICATE_AFTER, getReplicateAfterStrings());
list.add("replicationEnabled", String.valueOf(replicationEnabled.get()));
}
}
return list;
});
manager.registerGauge(this, registry, fetcherMap, true, "fetcher", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> isMaster && includeConfFiles != null ? includeConfFiles : "", true,
"confFilesToReplicate", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> isMaster ? getReplicateAfterStrings() : Collections.<String>emptyList(), true,
REPLICATE_AFTER, getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> isMaster && replicationEnabled.get(), true,
"replicationEnabled", getCategory().toString(), scope);
}
/**
@ -1064,24 +1074,39 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
}
private void addVal(NamedList<Object> nl, String key, Properties props, Class clzz) {
Object val = formatVal(key, props, clzz);
if (val != null) {
nl.add(key, val);
}
}
private void addVal(Map<String, Object> map, String key, Properties props, Class clzz) {
Object val = formatVal(key, props, clzz);
if (val != null) {
map.put(key, val);
}
}
private Object formatVal(String key, Properties props, Class clzz) {
String s = props.getProperty(key);
if (s == null || s.trim().length() == 0) return;
if (s == null || s.trim().length() == 0) return null;
if (clzz == Date.class) {
try {
Long l = Long.parseLong(s);
nl.add(key, new Date(l).toString());
} catch (NumberFormatException e) {/*no op*/ }
return new Date(l).toString();
} catch (NumberFormatException e) {
return null;
}
} else if (clzz == List.class) {
String ss[] = s.split(",");
List<String> l = new ArrayList<>();
for (String s1 : ss) {
l.add(new Date(Long.parseLong(s1)).toString());
}
nl.add(key, l);
return l;
} else {
nl.add(key, s);
return s;
}
}
private List<String> getReplicateAfterStrings() {

View File

@ -17,9 +17,11 @@
package org.apache.solr.handler;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ImmutableList;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Meter;
@ -27,11 +29,10 @@ import com.codahale.metrics.Timer;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.SuppressForbidden;
import org.apache.solr.core.PluginBag;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.request.SolrQueryRequest;
@ -42,7 +43,6 @@ import org.apache.solr.util.SolrPluginUtils;
import org.apache.solr.api.Api;
import org.apache.solr.api.ApiBag;
import org.apache.solr.api.ApiSupport;
import org.apache.solr.util.stats.MetricUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -51,7 +51,7 @@ import static org.apache.solr.core.RequestParams.USEPARAM;
/**
*
*/
public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfoMBean, SolrMetricProducer, NestedRequestHandler,ApiSupport {
public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfoBean, SolrMetricProducer, NestedRequestHandler,ApiSupport {
protected NamedList initArgs = null;
protected SolrParams defaults;
@ -74,6 +74,9 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
private PluginInfo pluginInfo;
private Set<String> metricNames = new HashSet<>();
private MetricRegistry registry;
@SuppressForbidden(reason = "Need currentTimeMillis, used only for stats output")
public RequestHandlerBase() {
handlerStart = System.currentTimeMillis();
@ -138,13 +141,15 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
@Override
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
numErrors = manager.meter(registryName, "errors", getCategory().toString(), scope);
numServerErrors = manager.meter(registryName, "serverErrors", getCategory().toString(), scope);
numClientErrors = manager.meter(registryName, "clientErrors", getCategory().toString(), scope);
numTimeouts = manager.meter(registryName, "timeouts", getCategory().toString(), scope);
requests = manager.counter(registryName, "requests", getCategory().toString(), scope);
requestTimes = manager.timer(registryName, "requestTimes", getCategory().toString(), scope);
totalTime = manager.counter(registryName, "totalTime", getCategory().toString(), scope);
registry = manager.registry(registryName);
numErrors = manager.meter(this, registryName, "errors", getCategory().toString(), scope);
numServerErrors = manager.meter(this, registryName, "serverErrors", getCategory().toString(), scope);
numClientErrors = manager.meter(this, registryName, "clientErrors", getCategory().toString(), scope);
numTimeouts = manager.meter(this, registryName, "timeouts", getCategory().toString(), scope);
requests = manager.counter(this, registryName, "requests", getCategory().toString(), scope);
requestTimes = manager.timer(this, registryName, "requestTimes", getCategory().toString(), scope);
totalTime = manager.counter(this, registryName, "totalTime", getCategory().toString(), scope);
manager.registerGauge(this, registryName, () -> handlerStart, true, "handlerStart", getCategory().toString(), scope);
}
public static SolrParams getSolrParamsFromNamedList(NamedList args, String key) {
@ -225,24 +230,21 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
@Override
public abstract String getDescription();
@Override
public String getSource() { return null; }
@Override
public String getVersion() {
return getClass().getPackage().getSpecificationVersion();
}
@Override
public Category getCategory() {
return Category.QUERY;
}
@Override
public URL[] getDocs() {
return null; // this can be overridden, but not required
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public MetricRegistry getMetricRegistry() {
return registry;
}
@Override
public SolrRequestHandler getSubHandler(String subPath) {
@ -285,22 +287,6 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
return pluginInfo;
}
@Override
public NamedList<Object> getStatistics() {
NamedList<Object> lst = new SimpleOrderedMap<>();
lst.add("handlerStart",handlerStart);
lst.add("requests", requests.getCount());
lst.add("errors", numErrors.getCount());
lst.add("serverErrors", numServerErrors.getCount());
lst.add("clientErrors", numClientErrors.getCount());
lst.add("timeouts", numTimeouts.getCount());
// convert totalTime to ms
lst.add("totalTime", MetricUtils.nsToMs(totalTime.getCount()));
MetricUtils.addMetrics(lst, requestTimes);
return lst;
}
@Override
public Collection<Api> getApis() {
return ImmutableList.of(new ApiBag.ReqHandlerToApi(this, ApiBag.constructSpec(pluginInfo)));

View File

@ -702,12 +702,6 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
return "Edit solrconfig.xml";
}
@Override
public String getVersion() {
return getClass().getPackage().getSpecificationVersion();
}
@Override
public Category getCategory() {
return Category.ADMIN;

View File

@ -18,9 +18,6 @@ package org.apache.solr.handler;
import org.apache.solr.handler.component.*;
import java.net.MalformedURLException;
import java.net.URL;
/**
*
*
@ -47,14 +44,6 @@ public class StandardRequestHandler extends SearchHandler
public String getDescription() {
return "The standard Solr request handler";
}
@Override
public URL[] getDocs() {
try {
return new URL[] { new URL("http://wiki.apache.org/solr/StandardRequestHandler") };
}
catch( MalformedURLException ex ) { return null; }
}
}

View File

@ -121,7 +121,7 @@ public class CoreAdminHandler extends RequestHandlerBase implements PermissionNa
@Override
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
super.initializeMetrics(manager, registryName, scope);
parallelExecutor = MetricUtils.instrumentedExecutorService(parallelExecutor, manager.registry(registryName),
parallelExecutor = MetricUtils.instrumentedExecutorService(parallelExecutor, this, manager.registry(registryName),
SolrMetricManager.mkName("parallelCoreAdminExecutor", getCategory().name(),scope, "threadPool"));
}
@Override

View File

@ -22,8 +22,6 @@ import static org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
@ -707,14 +705,6 @@ public class LukeRequestHandler extends RequestHandlerBase
return Category.ADMIN;
}
@Override
public URL[] getDocs() {
try {
return new URL[] { new URL("http://wiki.apache.org/solr/LukeRequestHandler") };
}
catch( MalformedURLException ex ) { return null; }
}
///////////////////////////////////////////////////////////////////////////////////////
static class TermHistogram

View File

@ -134,7 +134,7 @@ public class MetricsCollectorHandler extends RequestHandlerBase {
@Override
public String getDescription() {
return "Handler for collecting and aggregating metric reports.";
return "Handler for collecting and aggregating SolrCloud metric reports.";
}
private static class MetricUpdateProcessor extends UpdateRequestProcessor {

View File

@ -19,6 +19,7 @@ package org.apache.solr.handler.admin;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@ -52,6 +53,13 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
final SolrMetricManager metricManager;
public static final String COMPACT_PARAM = "compact";
public static final String PREFIX_PARAM = "prefix";
public static final String REGEX_PARAM = "regex";
public static final String REGISTRY_PARAM = "registry";
public static final String GROUP_PARAM = "group";
public static final String TYPE_PARAM = "type";
public static final String ALL = "all";
public MetricsHandler() {
this.container = null;
@ -84,29 +92,38 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
for (String registryName : requestedRegistries) {
MetricRegistry registry = metricManager.registry(registryName);
response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter, false,
false, compact, null));
false, compact));
}
rsp.getValues().add("metrics", response);
}
private MetricFilter parseMustMatchFilter(SolrQueryRequest req) {
String[] prefixes = req.getParams().getParams("prefix");
MetricFilter mustMatchFilter;
String[] prefixes = req.getParams().getParams(PREFIX_PARAM);
MetricFilter prefixFilter = null;
if (prefixes != null && prefixes.length > 0) {
Set<String> prefixSet = new HashSet<>();
for (String prefix : prefixes) {
prefixSet.addAll(StrUtils.splitSmart(prefix, ','));
}
mustMatchFilter = new SolrMetricManager.PrefixFilter((String[])prefixSet.toArray(new String[prefixSet.size()]));
} else {
prefixFilter = new SolrMetricManager.PrefixFilter((String[])prefixSet.toArray(new String[prefixSet.size()]));
}
String[] regexes = req.getParams().getParams(REGEX_PARAM);
MetricFilter regexFilter = null;
if (regexes != null && regexes.length > 0) {
regexFilter = new SolrMetricManager.RegexFilter(regexes);
}
MetricFilter mustMatchFilter;
if (prefixFilter == null && regexFilter == null) {
mustMatchFilter = MetricFilter.ALL;
} else {
mustMatchFilter = new SolrMetricManager.OrFilter(prefixFilter, regexFilter);
}
return mustMatchFilter;
}
private Set<String> parseRegistries(SolrQueryRequest req) {
String[] groupStr = req.getParams().getParams("group");
String[] registryStr = req.getParams().getParams("registry");
String[] groupStr = req.getParams().getParams(GROUP_PARAM);
String[] registryStr = req.getParams().getParams(REGISTRY_PARAM);
if ((groupStr == null || groupStr.length == 0) && (registryStr == null || registryStr.length == 0)) {
// return all registries
return container.getMetricManager().registryNames();
@ -118,7 +135,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
for (String g : groupStr) {
List<String> split = StrUtils.splitSmart(g, ',');
for (String s : split) {
if (s.trim().equals("all")) {
if (s.trim().equals(ALL)) {
allRegistries = true;
break;
}
@ -137,7 +154,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
for (String r : registryStr) {
List<String> split = StrUtils.splitSmart(r, ',');
for (String s : split) {
if (s.trim().equals("all")) {
if (s.trim().equals(ALL)) {
allRegistries = true;
break;
}
@ -161,7 +178,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
}
private List<MetricType> parseMetricTypes(SolrQueryRequest req) {
String[] typeStr = req.getParams().getParams("type");
String[] typeStr = req.getParams().getParams(TYPE_PARAM);
List<String> types = Collections.emptyList();
if (typeStr != null && typeStr.length > 0) {
types = new ArrayList<>();
@ -176,7 +193,8 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
metricTypes = types.stream().map(String::trim).map(MetricType::valueOf).collect(Collectors.toList());
}
} catch (IllegalArgumentException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid metric type in: " + types + " specified. Must be one of (all, meter, timer, histogram, counter, gauge)", e);
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid metric type in: " + types +
" specified. Must be one of " + MetricType.SUPPORTED_TYPES_MSG, e);
}
return metricTypes;
}
@ -199,6 +217,8 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
gauge(Gauge.class),
all(null);
public static final String SUPPORTED_TYPES_MSG = EnumSet.allOf(MetricType.class).toString();
private final Class klass;
MetricType(Class klass) {

View File

@ -16,14 +16,12 @@
*/
package org.apache.solr.handler.admin;
import java.net.URL;
import java.util.ArrayList;
import java.util.Map;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
@ -48,13 +46,13 @@ public class PluginInfoHandler extends RequestHandlerBase
private static SimpleOrderedMap<Object> getSolrInfoBeans( SolrCore core, boolean stats )
{
SimpleOrderedMap<Object> list = new SimpleOrderedMap<>();
for (SolrInfoMBean.Category cat : SolrInfoMBean.Category.values())
for (SolrInfoBean.Category cat : SolrInfoBean.Category.values())
{
SimpleOrderedMap<Object> category = new SimpleOrderedMap<>();
list.add( cat.name(), category );
Map<String, SolrInfoMBean> reg = core.getInfoRegistry();
for (Map.Entry<String,SolrInfoMBean> entry : reg.entrySet()) {
SolrInfoMBean m = entry.getValue();
Map<String, SolrInfoBean> reg = core.getInfoRegistry();
for (Map.Entry<String,SolrInfoBean> entry : reg.entrySet()) {
SolrInfoBean m = entry.getValue();
if (m.getCategory() != cat) continue;
String na = "Not Declared";
@ -62,21 +60,10 @@ public class PluginInfoHandler extends RequestHandlerBase
category.add( entry.getKey(), info );
info.add( NAME, (m.getName() !=null ? m.getName() : na) );
info.add( "version", (m.getVersion() !=null ? m.getVersion() : na) );
info.add( "description", (m.getDescription()!=null ? m.getDescription() : na) );
info.add( "source", (m.getSource() !=null ? m.getSource() : na) );
URL[] urls = m.getDocs();
if ((urls != null) && (urls.length > 0)) {
ArrayList<String> docs = new ArrayList<>(urls.length);
for( URL u : urls ) {
docs.add( u.toExternalForm() );
}
info.add( "docs", docs );
}
if( stats ) {
info.add( "stats", m.getStatistics() );
if (stats) {
info.add( "stats", m.getMetricsSnapshot());
}
}
}

View File

@ -20,7 +20,7 @@ import org.apache.commons.io.IOUtils;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.client.solrj.impl.XMLResponseParser;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.ContentStream;
@ -30,10 +30,7 @@ import org.apache.solr.response.BinaryResponseWriter;
import org.apache.solr.response.SolrQueryResponse;
import java.io.StringReader;
import java.net.URL;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.Map;
@ -117,7 +114,7 @@ public class SolrInfoMBeanHandler extends RequestHandlerBase {
String[] requestedCats = req.getParams().getParams("cat");
if (null == requestedCats || 0 == requestedCats.length) {
for (SolrInfoMBean.Category cat : SolrInfoMBean.Category.values()) {
for (SolrInfoBean.Category cat : SolrInfoBean.Category.values()) {
cats.add(cat.name(), new SimpleOrderedMap<NamedList<Object>>());
}
} else {
@ -128,39 +125,27 @@ public class SolrInfoMBeanHandler extends RequestHandlerBase {
Set<String> requestedKeys = arrayToSet(req.getParams().getParams("key"));
Map<String, SolrInfoMBean> reg = req.getCore().getInfoRegistry();
for (Map.Entry<String, SolrInfoMBean> entry : reg.entrySet()) {
Map<String, SolrInfoBean> reg = req.getCore().getInfoRegistry();
for (Map.Entry<String, SolrInfoBean> entry : reg.entrySet()) {
addMBean(req, cats, requestedKeys, entry.getKey(),entry.getValue());
}
for (SolrInfoMBean infoMBean : req.getCore().getCoreDescriptor().getCoreContainer().getResourceLoader().getInfoMBeans()) {
for (SolrInfoBean infoMBean : req.getCore().getCoreDescriptor().getCoreContainer().getResourceLoader().getInfoMBeans()) {
addMBean(req,cats,requestedKeys,infoMBean.getName(),infoMBean);
}
return cats;
}
private void addMBean(SolrQueryRequest req, NamedList<NamedList<NamedList<Object>>> cats, Set<String> requestedKeys, String key, SolrInfoMBean m) {
private void addMBean(SolrQueryRequest req, NamedList<NamedList<NamedList<Object>>> cats, Set<String> requestedKeys, String key, SolrInfoBean m) {
if ( ! ( requestedKeys.isEmpty() || requestedKeys.contains(key) ) ) return;
NamedList<NamedList<Object>> catInfo = cats.get(m.getCategory().name());
if ( null == catInfo ) return;
NamedList<Object> mBeanInfo = new SimpleOrderedMap<>();
mBeanInfo.add("class", m.getName());
mBeanInfo.add("version", m.getVersion());
mBeanInfo.add("description", m.getDescription());
mBeanInfo.add("src", m.getSource());
// Use an external form
URL[] urls = m.getDocs();
if(urls!=null) {
List<String> docs = new ArrayList<>(urls.length);
for(URL url : urls) {
docs.add(url.toExternalForm());
}
mBeanInfo.add("docs", docs);
}
if (req.getParams().getFieldBool(key, "stats", false))
mBeanInfo.add("stats", m.getStatistics());
mBeanInfo.add("stats", m.getMetricsSnapshot());
catInfo.add(key, mBeanInfo);
}
@ -246,6 +231,9 @@ public class SolrInfoMBeanHandler extends RequestHandlerBase {
}
public Object diffObject(Object ref, Object now) {
if (now instanceof Map) {
now = new NamedList((Map)now);
}
if(ref instanceof NamedList) {
return diffNamedList((NamedList)ref, (NamedList)now);
}

View File

@ -16,10 +16,6 @@
*/
package org.apache.solr.handler.admin;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
@ -27,23 +23,20 @@ import java.io.InputStreamReader;
import java.lang.invoke.MethodHandles;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.lang.management.PlatformManagedObject;
import java.lang.management.RuntimeMXBean;
import java.lang.reflect.InvocationTargetException;
import java.net.InetAddress;
import java.nio.charset.Charset;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import com.codahale.metrics.Gauge;
import org.apache.commons.io.IOUtils;
import org.apache.lucene.LucenePackage;
import org.apache.lucene.util.Constants;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrCore;
@ -53,6 +46,7 @@ import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.util.RTimer;
import org.apache.solr.util.RedactionUtils;
import org.apache.solr.util.stats.MetricUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -207,29 +201,13 @@ public class SystemInfoHandler extends RequestHandlerBase
OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean();
info.add(NAME, os.getName()); // add at least this one
try {
// add remaining ones dynamically using Java Beans API
addMXBeanProperties(os, OperatingSystemMXBean.class, info);
} catch (IntrospectionException | ReflectiveOperationException e) {
log.warn("Unable to fetch properties of OperatingSystemMXBean.", e);
}
// There are some additional beans we want to add (not available on all JVMs):
for (String clazz : Arrays.asList(
"com.sun.management.OperatingSystemMXBean",
"com.sun.management.UnixOperatingSystemMXBean",
"com.ibm.lang.management.OperatingSystemMXBean"
)) {
try {
final Class<? extends PlatformManagedObject> intf = Class.forName(clazz)
.asSubclass(PlatformManagedObject.class);
addMXBeanProperties(os, intf, info);
} catch (ClassNotFoundException e) {
// ignore
} catch (IntrospectionException | ReflectiveOperationException e) {
log.warn("Unable to fetch properties of JVM-specific OperatingSystemMXBean.", e);
// add remaining ones dynamically using Java Beans API
// also those from JVM implementation-specific classes
MetricUtils.addMXBeanMetrics(os, MetricUtils.OS_MXBEAN_CLASSES, null, (name, metric) -> {
if (info.get(name) == null) {
info.add(name, ((Gauge) metric).getValue());
}
}
});
// Try some command line things:
try {
@ -243,34 +221,6 @@ public class SystemInfoHandler extends RequestHandlerBase
return info;
}
/**
* Add all bean properties of a {@link PlatformManagedObject} to the given {@link NamedList}.
* <p>
* If you are running a OpenJDK/Oracle JVM, there are nice properties in:
* {@code com.sun.management.UnixOperatingSystemMXBean} and
* {@code com.sun.management.OperatingSystemMXBean}
*/
static <T extends PlatformManagedObject> void addMXBeanProperties(T obj, Class<? extends T> intf, NamedList<Object> info)
throws IntrospectionException, ReflectiveOperationException {
if (intf.isInstance(obj)) {
final BeanInfo beanInfo = Introspector.getBeanInfo(intf, intf.getSuperclass(), Introspector.IGNORE_ALL_BEANINFO);
for (final PropertyDescriptor desc : beanInfo.getPropertyDescriptors()) {
final String name = desc.getName();
if (info.get(name) == null) {
try {
final Object v = desc.getReadMethod().invoke(obj);
if(v != null) {
info.add(name, v);
}
} catch (InvocationTargetException ite) {
// ignore (some properties throw UOE)
}
}
}
}
}
/**
* Utility function to execute a function
*/

View File

@ -17,7 +17,6 @@
package org.apache.solr.handler.component;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -380,7 +379,7 @@ public class DebugComponent extends SearchComponent
/////////////////////////////////////////////
/// SolrInfoMBean
/// SolrInfoBean
////////////////////////////////////////////
@Override
@ -392,9 +391,4 @@ public class DebugComponent extends SearchComponent
public Category getCategory() {
return Category.OTHER;
}
@Override
public URL[] getDocs() {
return null;
}
}

View File

@ -17,8 +17,6 @@
package org.apache.solr.handler.component;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@ -764,7 +762,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
////////////////////////////////////////////
/// SolrInfoMBean
/// SolrInfoBean
////////////////////////////////////////////
@Override
@ -777,17 +775,6 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
return Category.QUERY;
}
@Override
public URL[] getDocs() {
try {
return new URL[]{
new URL("http://wiki.apache.org/solr/ExpandComponent")
};
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
// this reader alters the content of the given reader so it should not
// delegate the caching stuff
private static class ReaderWrapper extends FilterLeafReader {

View File

@ -18,7 +18,6 @@ package org.apache.solr.handler.component;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -1212,7 +1211,7 @@ public class FacetComponent extends SearchComponent {
/////////////////////////////////////////////
/// SolrInfoMBean
/// SolrInfoBean
////////////////////////////////////////////
@Override
@ -1225,11 +1224,6 @@ public class FacetComponent extends SearchComponent {
return Category.QUERY;
}
@Override
public URL[] getDocs() {
return null;
}
/**
* This class is used exclusively for merging results from each shard
* in a distributed facet request. It plays no role in the computation

View File

@ -17,7 +17,6 @@
package org.apache.solr.handler.component;
import java.io.IOException;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -266,7 +265,7 @@ public class HighlightComponent extends SearchComponent implements PluginInfoIni
}
////////////////////////////////////////////
/// SolrInfoMBean
/// SolrInfoBean
////////////////////////////////////////////
@Override
@ -278,9 +277,4 @@ public class HighlightComponent extends SearchComponent implements PluginInfoIni
public Category getCategory() {
return Category.HIGHLIGHTER;
}
@Override
public URL[] getDocs() {
return null;
}
}

View File

@ -36,7 +36,7 @@ import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.URLUtil;
import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.update.UpdateShardHandlerConfig;
@ -373,10 +373,10 @@ public class HttpShardHandlerFactory extends ShardHandlerFactory implements org.
@Override
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
String expandedScope = SolrMetricManager.mkName(scope, SolrInfoMBean.Category.QUERY.name());
String expandedScope = SolrMetricManager.mkName(scope, SolrInfoBean.Category.QUERY.name());
clientConnectionManager.initializeMetrics(manager, registry, expandedScope);
httpRequestExecutor.initializeMetrics(manager, registry, expandedScope);
commExecutor = MetricUtils.instrumentedExecutorService(commExecutor,
commExecutor = MetricUtils.instrumentedExecutorService(commExecutor, null,
manager.registry(registry),
SolrMetricManager.mkName("httpShardExecutor", expandedScope, "threadPool"));
}

View File

@ -18,7 +18,6 @@ package org.apache.solr.handler.component;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@ -414,7 +413,7 @@ public class MoreLikeThisComponent extends SearchComponent {
}
// ///////////////////////////////////////////
// / SolrInfoMBean
// / SolrInfoBean
// //////////////////////////////////////////
@Override
@ -426,9 +425,4 @@ public class MoreLikeThisComponent extends SearchComponent {
public Category getCategory() {
return Category.QUERY;
}
@Override
public URL[] getDocs() {
return null;
}
}

View File

@ -20,7 +20,6 @@ import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -1378,7 +1377,7 @@ public class QueryComponent extends SearchComponent
}
/////////////////////////////////////////////
/// SolrInfoMBean
/// SolrInfoBean
////////////////////////////////////////////
@Override
@ -1391,11 +1390,6 @@ public class QueryComponent extends SearchComponent
return Category.QUERY;
}
@Override
public URL[] getDocs() {
return null;
}
/**
* Fake scorer for a single document
*

View File

@ -24,8 +24,6 @@ import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.invoke.MethodHandles;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@ -597,7 +595,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
}
//---------------------------------------------------------------------------------
// SolrInfoMBean
// SolrInfoBean
//---------------------------------------------------------------------------------
@Override
@ -605,16 +603,6 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
return "Query Boosting -- boost particular documents for a given query";
}
@Override
public URL[] getDocs() {
try {
return new URL[]{
new URL("http://wiki.apache.org/solr/QueryElevationComponent")
};
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
class ElevationComparatorSource extends FieldComparatorSource {
private QueryElevationComponent.ElevationObj elevations;
private SentinelIntSet ordSet; //the key half of the map

View File

@ -18,7 +18,6 @@ package org.apache.solr.handler.component;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@ -927,7 +926,7 @@ public class RealTimeGetComponent extends SearchComponent
////////////////////////////////////////////
/// SolrInfoMBean
/// SolrInfoBean
////////////////////////////////////////////
@Override
@ -940,13 +939,6 @@ public class RealTimeGetComponent extends SearchComponent
return Category.QUERY;
}
@Override
public URL[] getDocs() {
return null;
}
public void processGetFingeprint(ResponseBuilder rb) throws IOException {
SolrQueryRequest req = rb.req;
SolrParams params = req.getParams();

View File

@ -17,13 +17,15 @@
package org.apache.solr.handler.component;
import java.io.IOException;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.search.facet.FacetModule;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
@ -33,12 +35,16 @@ import org.apache.solr.util.plugin.NamedListInitializedPlugin;
*
* @since solr 1.3
*/
public abstract class SearchComponent implements SolrInfoMBean, NamedListInitializedPlugin
public abstract class SearchComponent implements SolrInfoBean, NamedListInitializedPlugin
{
/**
* The name given to this component in solrconfig.xml file
*/
private String name = this.getClass().getName();
protected Set<String> metricNames = new HashSet<>();
protected MetricRegistry registry;
/**
* Prepare the response. Guaranteed to be called before any SearchComponent {@link #process(org.apache.solr.handler.component.ResponseBuilder)} method.
* Called for every incoming request.
@ -103,31 +109,24 @@ public abstract class SearchComponent implements SolrInfoMBean, NamedListInitial
@Override
public abstract String getDescription();
@Override
public String getSource() { return null; }
@Override
public String getVersion() {
return getClass().getPackage().getSpecificationVersion();
}
@Override
public Category getCategory() {
return Category.OTHER;
}
@Override
public URL[] getDocs() {
return null; // this can be overridden, but not required
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public NamedList getStatistics() {
return null;
public MetricRegistry getMetricRegistry() {
return registry;
}
public static final Map<String, Class<? extends SearchComponent>> standard_components;
;
static {
HashMap<String, Class<? extends SearchComponent>> map = new HashMap<>();

View File

@ -853,7 +853,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
}
// ///////////////////////////////////////////
// / SolrInfoMBean
// / SolrInfoBean
// //////////////////////////////////////////
@Override

View File

@ -160,7 +160,7 @@ public class StatsComponent extends SearchComponent {
}
/////////////////////////////////////////////
/// SolrInfoMBean
/// SolrInfoBean
////////////////////////////////////////////
@Override

View File

@ -47,6 +47,9 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrEventListener;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.spelling.suggest.SolrSuggester;
import org.apache.solr.spelling.suggest.SuggesterOptions;
@ -61,7 +64,7 @@ import org.slf4j.LoggerFactory;
* Responsible for routing commands and queries to the appropriate {@link SolrSuggester}
* and for initializing them as specified by SolrConfig
*/
public class SuggestComponent extends SearchComponent implements SolrCoreAware, SuggesterParams, Accountable {
public class SuggestComponent extends SearchComponent implements SolrCoreAware, SuggesterParams, Accountable, SolrMetricProducer {
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
/** Name used to identify whether the user query concerns this component */
@ -89,7 +92,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
* Key is the dictionary name used in SolrConfig, value is the corresponding {@link SolrSuggester}
*/
protected Map<String, SolrSuggester> suggesters = new ConcurrentHashMap<>();
/** Container for various labels used in the responses generated by this component */
private static class SuggesterResultLabels {
static final String SUGGEST = "suggest";
@ -345,16 +348,18 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
}
@Override
public NamedList getStatistics() {
NamedList<String> stats = new SimpleOrderedMap<>();
stats.add("totalSizeInBytes", String.valueOf(ramBytesUsed()));
for (Map.Entry<String, SolrSuggester> entry : suggesters.entrySet()) {
SolrSuggester suggester = entry.getValue();
stats.add(entry.getKey(), suggester.toString());
}
return stats;
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
registry = manager.registry(registryName);
manager.registerGauge(this, registryName, () -> ramBytesUsed(), true, "totalSizeInBytes", getCategory().toString(), scope);
MetricsMap suggestersMap = new MetricsMap((detailed, map) -> {
for (Map.Entry<String, SolrSuggester> entry : suggesters.entrySet()) {
SolrSuggester suggester = entry.getValue();
map.put(entry.getKey(), suggester.toString());
}
});
manager.registerGauge(this, registryName, suggestersMap, true, "suggesters", getCategory().toString(), scope);
}
@Override
public long ramBytesUsed() {
long sizeInBytes = 0;

View File

@ -128,40 +128,58 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
// Load the fragmenters
SolrFragmenter frag = solrCore.initPlugins(info.getChildren("fragmenter") , fragmenters,SolrFragmenter.class,null);
if (frag == null) frag = new GapFragmenter();
if (frag == null) {
frag = new GapFragmenter();
solrCore.initDefaultPlugin(frag, SolrFragmenter.class);
}
fragmenters.put("", frag);
fragmenters.put(null, frag);
// Load the formatters
SolrFormatter fmt = solrCore.initPlugins(info.getChildren("formatter"), formatters,SolrFormatter.class,null);
if (fmt == null) fmt = new HtmlFormatter();
if (fmt == null) {
fmt = new HtmlFormatter();
solrCore.initDefaultPlugin(fmt, SolrFormatter.class);
}
formatters.put("", fmt);
formatters.put(null, fmt);
// Load the encoders
SolrEncoder enc = solrCore.initPlugins(info.getChildren("encoder"), encoders,SolrEncoder.class,null);
if (enc == null) enc = new DefaultEncoder();
if (enc == null) {
enc = new DefaultEncoder();
solrCore.initDefaultPlugin(enc, SolrEncoder.class);
}
encoders.put("", enc);
encoders.put(null, enc);
// Load the FragListBuilders
SolrFragListBuilder fragListBuilder = solrCore.initPlugins(info.getChildren("fragListBuilder"),
fragListBuilders, SolrFragListBuilder.class, null );
if( fragListBuilder == null ) fragListBuilder = new SimpleFragListBuilder();
if( fragListBuilder == null ) {
fragListBuilder = new SimpleFragListBuilder();
solrCore.initDefaultPlugin(fragListBuilder, SolrFragListBuilder.class);
}
fragListBuilders.put( "", fragListBuilder );
fragListBuilders.put( null, fragListBuilder );
// Load the FragmentsBuilders
SolrFragmentsBuilder fragsBuilder = solrCore.initPlugins(info.getChildren("fragmentsBuilder"),
fragmentsBuilders, SolrFragmentsBuilder.class, null);
if( fragsBuilder == null ) fragsBuilder = new ScoreOrderFragmentsBuilder();
if( fragsBuilder == null ) {
fragsBuilder = new ScoreOrderFragmentsBuilder();
solrCore.initDefaultPlugin(fragsBuilder, SolrFragmentsBuilder.class);
}
fragmentsBuilders.put( "", fragsBuilder );
fragmentsBuilders.put( null, fragsBuilder );
// Load the BoundaryScanners
SolrBoundaryScanner boundaryScanner = solrCore.initPlugins(info.getChildren("boundaryScanner"),
boundaryScanners, SolrBoundaryScanner.class, null);
if(boundaryScanner == null) boundaryScanner = new SimpleBoundaryScanner();
if(boundaryScanner == null) {
boundaryScanner = new SimpleBoundaryScanner();
solrCore.initDefaultPlugin(boundaryScanner, SolrBoundaryScanner.class);
}
boundaryScanners.put("", boundaryScanner);
boundaryScanners.put(null, boundaryScanner);

View File

@ -30,7 +30,7 @@ public class GapFragmenter extends HighlightingPluginBase implements SolrFragmen
@Override
public Fragmenter getFragmenter(String fieldName, SolrParams params )
{
numRequests++;
numRequests.inc();
params = SolrParams.wrapDefaults(params, defaults);
int fragsize = params.getFieldInt( fieldName, HighlightParams.FRAGSIZE, 100 );

View File

@ -16,21 +16,27 @@
*/
package org.apache.solr.highlight;
import java.net.URL;
import java.util.HashSet;
import java.util.Set;
import com.codahale.metrics.Counter;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
/**
*
* @since solr 1.3
*/
public abstract class HighlightingPluginBase implements SolrInfoMBean
public abstract class HighlightingPluginBase implements SolrInfoBean, SolrMetricProducer
{
protected long numRequests;
protected Counter numRequests;
protected SolrParams defaults;
protected Set<String> metricNames = new HashSet<>(1);
protected MetricRegistry registry;
public void init(NamedList args) {
if( args != null ) {
@ -50,14 +56,7 @@ public abstract class HighlightingPluginBase implements SolrInfoMBean
@Override
public abstract String getDescription();
@Override
public String getSource() { return null; }
@Override
public String getVersion() {
return getClass().getPackage().getSpecificationVersion();
}
@Override
public Category getCategory()
{
@ -65,15 +64,19 @@ public abstract class HighlightingPluginBase implements SolrInfoMBean
}
@Override
public URL[] getDocs() {
return null; // this can be overridden, but not required
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public NamedList getStatistics() {
NamedList<Long> lst = new SimpleOrderedMap<>();
lst.add("requests", numRequests);
return lst;
public MetricRegistry getMetricRegistry() {
return registry;
}
@Override
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
registry = manager.registry(registryName);
numRequests = manager.counter(this, registryName, "requests", getCategory().toString(), scope);
}
}

View File

@ -29,7 +29,7 @@ public class HtmlFormatter extends HighlightingPluginBase implements SolrFormatt
@Override
public Formatter getFormatter(String fieldName, SolrParams params )
{
numRequests++;
numRequests.inc();
params = SolrParams.wrapDefaults(params, defaults);
return new SimpleHTMLFormatter(

View File

@ -60,7 +60,7 @@ public class RegexFragmenter extends HighlightingPluginBase implements SolrFragm
@Override
public Fragmenter getFragmenter(String fieldName, SolrParams params )
{
numRequests++;
numRequests.inc();
params = SolrParams.wrapDefaults(params, defaults);
int fragsize = params.getFieldInt( fieldName, HighlightParams.FRAGSIZE, LuceneRegexFragmenter.DEFAULT_FRAGMENT_SIZE );

View File

@ -28,7 +28,7 @@ public class SimpleFragListBuilder extends HighlightingPluginBase implements
// If that ever changes, it should wrap them with defaults...
// params = SolrParams.wrapDefaults(params, defaults)
numRequests++;
numRequests.inc();
return new org.apache.lucene.search.vectorhighlight.SimpleFragListBuilder();
}

View File

@ -28,7 +28,7 @@ public class SingleFragListBuilder extends HighlightingPluginBase implements
// If that ever changes, it should wrap them with defaults...
// params = SolrParams.wrapDefaults(params, defaults)
numRequests++;
numRequests.inc();
return new org.apache.lucene.search.vectorhighlight.SingleFragListBuilder();
}

View File

@ -18,14 +18,14 @@ package org.apache.solr.highlight;
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
public abstract class SolrBoundaryScanner extends HighlightingPluginBase implements
SolrInfoMBean, NamedListInitializedPlugin {
SolrInfoBean, NamedListInitializedPlugin {
public BoundaryScanner getBoundaryScanner(String fieldName, SolrParams params){
numRequests++;
numRequests.inc();
params = SolrParams.wrapDefaults(params, defaults);
return get(fieldName, params);

View File

@ -19,10 +19,10 @@ package org.apache.solr.highlight;
import org.apache.lucene.search.highlight.Encoder;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
public interface SolrEncoder extends SolrInfoMBean, NamedListInitializedPlugin {
public interface SolrEncoder extends SolrInfoBean, NamedListInitializedPlugin {
/** <code>init</code> will be called just once, immediately after creation.
* <p>The args are user-level initialization parameters that

View File

@ -19,10 +19,10 @@ package org.apache.solr.highlight;
import org.apache.lucene.search.highlight.Formatter;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
public interface SolrFormatter extends SolrInfoMBean, NamedListInitializedPlugin {
public interface SolrFormatter extends SolrInfoBean, NamedListInitializedPlugin {
/** <code>init</code> will be called just once, immediately after creation.
* <p>The args are user-level initialization parameters that

View File

@ -19,10 +19,10 @@ package org.apache.solr.highlight;
import org.apache.lucene.search.vectorhighlight.FragListBuilder;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
public interface SolrFragListBuilder extends SolrInfoMBean, NamedListInitializedPlugin {
public interface SolrFragListBuilder extends SolrInfoBean, NamedListInitializedPlugin {
/** <code>init</code> will be called just once, immediately after creation.
* <p>The args are user-level initialization parameters that

View File

@ -19,10 +19,10 @@ package org.apache.solr.highlight;
import org.apache.lucene.search.highlight.Fragmenter;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
public interface SolrFragmenter extends SolrInfoMBean, NamedListInitializedPlugin {
public interface SolrFragmenter extends SolrInfoBean, NamedListInitializedPlugin {
/** <code>init</code> will be called just once, immediately after creation.
* <p>The args are user-level initialization parameters that

View File

@ -21,11 +21,11 @@ import org.apache.lucene.search.vectorhighlight.FragmentsBuilder;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.HighlightParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
public abstract class SolrFragmentsBuilder extends HighlightingPluginBase
implements SolrInfoMBean, NamedListInitializedPlugin {
implements SolrInfoBean, NamedListInitializedPlugin {
public static final String DEFAULT_PRE_TAGS = "<em>";
public static final String DEFAULT_POST_TAGS = "</em>";
@ -37,7 +37,7 @@ public abstract class SolrFragmentsBuilder extends HighlightingPluginBase
* @return An appropriate {@link org.apache.lucene.search.vectorhighlight.FragmentsBuilder}.
*/
public FragmentsBuilder getFragmentsBuilder(SolrParams params, BoundaryScanner bs) {
numRequests++;
numRequests.inc();
params = SolrParams.wrapDefaults(params, defaults);
return getFragmentsBuilder( params, getPreTags( params, null ), getPostTags( params, null ), bs );

View File

@ -28,7 +28,7 @@ public class WeightedFragListBuilder extends HighlightingPluginBase implements
// If that ever changes, it should wrap them with defaults...
// params = SolrParams.wrapDefaults(params, defaults)
numRequests++;
numRequests.inc();
return new org.apache.lucene.search.vectorhighlight.WeightedFragListBuilder();
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.metrics;
import java.lang.management.BufferPoolMXBean;
import java.lang.management.ManagementFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricSet;
/**
* This is an alternative implementation of {@link com.codahale.metrics.jvm.BufferPoolMetricSet} that
* doesn't need an MBean server.
*/
public class AltBufferPoolMetricSet implements MetricSet {
@Override
public Map<String, Metric> getMetrics() {
final Map<String, Metric> metrics = new HashMap<>();
List<BufferPoolMXBean> pools = ManagementFactory.getPlatformMXBeans(BufferPoolMXBean.class);
for (final BufferPoolMXBean pool : pools) {
String name = pool.getName();
metrics.put(name + ".Count", (Gauge<Long>)() -> pool.getCount());
metrics.put(name + ".MemoryUsed", (Gauge<Long>)() -> pool.getMemoryUsed());
metrics.put(name + ".TotalCapacity", (Gauge<Long>)() -> pool.getTotalCapacity());
}
return metrics;
}
}

View File

@ -0,0 +1,184 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.metrics;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.ReflectionException;
import javax.management.openmbean.OpenMBeanAttributeInfoSupport;
import javax.management.openmbean.OpenType;
import javax.management.openmbean.SimpleType;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.function.BiConsumer;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Metric;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.solr.common.SolrException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Dynamically constructed map of metrics, intentionally different from {@link com.codahale.metrics.MetricSet}
* where each metric had to be known in advance and registered separately in {@link com.codahale.metrics.MetricRegistry}.
* <p>Note: this awkwardly extends {@link Gauge} and not {@link Metric} because awkwardly {@link Metric} instances
* are not supported by {@link com.codahale.metrics.MetricRegistryListener} :(</p>
* <p>Note 2: values added to this metric map should belong to the list of types supported by JMX:
* {@link javax.management.openmbean.OpenType#ALLOWED_CLASSNAMES_LIST}, otherwise only their toString()
* representation will be shown in JConsole.</p>
*/
public class MetricsMap implements Gauge<Map<String,Object>>, DynamicMBean {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
// set to true to use cached statistics between getMBeanInfo calls to work
// around over calling getStatistics on MBeanInfos when iterating over all attributes (SOLR-6586)
private final boolean useCachedStatsBetweenGetMBeanInfoCalls = Boolean.getBoolean("useCachedStatsBetweenGetMBeanInfoCalls");
private BiConsumer<Boolean, Map<String, Object>> initializer;
private volatile Map<String,Object> cachedValue;
public MetricsMap(BiConsumer<Boolean, Map<String,Object>> initializer) {
this.initializer = initializer;
}
@Override
public Map<String,Object> getValue() {
return getValue(true);
}
public Map<String,Object> getValue(boolean detailed) {
Map<String,Object> map = new HashMap<>();
initializer.accept(detailed, map);
return map;
}
public String toString() {
return getValue().toString();
}
@Override
public Object getAttribute(String attribute) throws AttributeNotFoundException, MBeanException, ReflectionException {
Object val;
Map<String,Object> stats = null;
if (useCachedStatsBetweenGetMBeanInfoCalls) {
Map<String,Object> cachedStats = this.cachedValue;
if (cachedStats != null) {
stats = cachedStats;
}
}
if (stats == null) {
stats = getValue(true);
}
val = stats.get(attribute);
if (val != null) {
// It's String or one of the simple types, just return it as JMX suggests direct support for such types
for (String simpleTypeName : SimpleType.ALLOWED_CLASSNAMES_LIST) {
if (val.getClass().getName().equals(simpleTypeName)) {
return val;
}
}
// It's an arbitrary object which could be something complex and odd, return its toString, assuming that is
// a workable representation of the object
return val.toString();
}
return null;
}
@Override
public void setAttribute(Attribute attribute) throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException {
throw new UnsupportedOperationException("Operation not Supported");
}
@Override
public AttributeList getAttributes(String[] attributes) {
AttributeList list = new AttributeList();
for (String attribute : attributes) {
try {
list.add(new Attribute(attribute, getAttribute(attribute)));
} catch (Exception e) {
log.warn("Could not get attribute " + attribute);
}
}
return list;
}
@Override
public AttributeList setAttributes(AttributeList attributes) {
throw new UnsupportedOperationException("Operation not Supported");
}
@Override
public Object invoke(String actionName, Object[] params, String[] signature) throws MBeanException, ReflectionException {
throw new UnsupportedOperationException("Operation not Supported");
}
@Override
public MBeanInfo getMBeanInfo() {
ArrayList<MBeanAttributeInfo> attrInfoList = new ArrayList<>();
Map<String,Object> stats = getValue(true);
if (useCachedStatsBetweenGetMBeanInfoCalls) {
cachedValue = stats;
}
try {
stats.forEach((k, v) -> {
Class type = v.getClass();
OpenType typeBox = determineType(type);
if (type.equals(String.class) || typeBox == null) {
attrInfoList.add(new MBeanAttributeInfo(k, String.class.getName(),
null, true, false, false));
} else {
attrInfoList.add(new OpenMBeanAttributeInfoSupport(
k, k, typeBox, true, false, false));
}
});
} catch (Exception e) {
// don't log issue if the core is closing
if (!(SolrException.getRootCause(e) instanceof AlreadyClosedException))
log.warn("Could not get attributes of MetricsMap: {}", this, e);
}
MBeanAttributeInfo[] attrInfoArr = attrInfoList
.toArray(new MBeanAttributeInfo[attrInfoList.size()]);
return new MBeanInfo(getClass().getName(), "MetricsMap", attrInfoArr, null, null, null);
}
private OpenType determineType(Class type) {
try {
for (Field field : SimpleType.class.getFields()) {
if (field.getType().equals(SimpleType.class)) {
SimpleType candidate = (SimpleType) field.get(SimpleType.class);
if (candidate.getTypeName().equals(type.getName())) {
return candidate;
}
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
}

View File

@ -16,77 +16,31 @@
*/
package org.apache.solr.metrics;
import javax.management.JMException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanInfo;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import java.lang.invoke.MethodHandles;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.codahale.metrics.JmxAttributeGauge;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.solr.util.stats.MetricUtils;
/**
* This is an extended replacement for {@link com.codahale.metrics.jvm.FileDescriptorRatioGauge}
* - that class uses reflection and doesn't work under Java 9. We can also get much more
* information about OS environment once we have to go through MBeanServer anyway.
* - that class uses reflection and doesn't work under Java 9. This implementation tries to retrieve
* bean properties from known implementations of {@link java.lang.management.OperatingSystemMXBean}.
*/
public class OperatingSystemMetricSet implements MetricSet {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
/** Metric names - these correspond to known numeric MBean attributes. Depending on the OS and
* Java implementation only some of them may be actually present.
*/
public static final String[] METRICS = {
"AvailableProcessors",
"CommittedVirtualMemorySize",
"FreePhysicalMemorySize",
"FreeSwapSpaceSize",
"MaxFileDescriptorCount",
"OpenFileDescriptorCount",
"ProcessCpuLoad",
"ProcessCpuTime",
"SystemLoadAverage",
"TotalPhysicalMemorySize",
"TotalSwapSpaceSize"
};
private final MBeanServer mBeanServer;
public OperatingSystemMetricSet(MBeanServer mBeanServer) {
this.mBeanServer = mBeanServer;
}
@Override
public Map<String, Metric> getMetrics() {
final Map<String, Metric> metrics = new HashMap<>();
try {
final ObjectName on = new ObjectName("java.lang:type=OperatingSystem");
// verify that it exists
MBeanInfo info = mBeanServer.getMBeanInfo(on);
// collect valid attributes
Set<String> attributes = new HashSet<>();
for (MBeanAttributeInfo ai : info.getAttributes()) {
attributes.add(ai.getName());
OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean();
MetricUtils.addMXBeanMetrics(os, MetricUtils.OS_MXBEAN_CLASSES, null, (k, v) -> {
if (!metrics.containsKey(k)) {
metrics.put(k, v);
}
for (String metric : METRICS) {
// verify that an attribute exists before attempting to add it
if (attributes.contains(metric)) {
metrics.put(metric, new JmxAttributeGauge(mBeanServer, on, metric));
}
}
} catch (JMException ignored) {
log.debug("Unable to load OperatingSystem MBean", ignored);
}
});
return metrics;
}
}

View File

@ -20,11 +20,12 @@ import java.io.Closeable;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.cloud.CloudDescriptor;
import org.apache.solr.core.NodeConfig;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -76,14 +77,14 @@ public class SolrCoreMetricManager implements Closeable {
}
/**
* Load reporters configured globally and specific to {@link org.apache.solr.core.SolrInfoMBean.Group#core}
* Load reporters configured globally and specific to {@link org.apache.solr.core.SolrInfoBean.Group#core}
* group or with a registry name specific to this core.
*/
public void loadReporters() {
NodeConfig nodeConfig = core.getCoreDescriptor().getCoreContainer().getConfig();
PluginInfo[] pluginInfos = nodeConfig.getMetricReporterPlugins();
metricManager.loadReporters(pluginInfos, core.getResourceLoader(), tag,
SolrInfoMBean.Group.core, registryName);
SolrInfoBean.Group.core, registryName);
if (cloudMode) {
metricManager.loadShardReporters(pluginInfos, core);
}
@ -126,12 +127,26 @@ public class SolrCoreMetricManager implements Closeable {
producer.initializeMetrics(metricManager, getRegistryName(), scope);
}
/**
* Return the registry used by this SolrCore.
*/
public MetricRegistry getRegistry() {
if (registryName != null) {
return metricManager.registry(registryName);
} else {
return null;
}
}
/**
* Closes reporters specific to this core.
*/
@Override
public void close() throws IOException {
metricManager.closeReporters(getRegistryName(), tag);
if (getLeaderRegistryName() != null) {
metricManager.closeReporters(getLeaderRegistryName(), tag);
}
}
public SolrCore getCore() {
@ -176,9 +191,9 @@ public class SolrCoreMetricManager implements Closeable {
public static String createRegistryName(boolean cloud, String collectionName, String shardName, String replicaName, String coreName) {
if (cloud) { // build registry name from logical names
return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, collectionName, shardName, replicaName);
return SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, collectionName, shardName, replicaName);
} else {
return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, coreName);
return SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, coreName);
}
}
@ -224,7 +239,7 @@ public class SolrCoreMetricManager implements Closeable {
public static String createLeaderRegistryName(boolean cloud, String collectionName, String shardName) {
if (cloud) {
return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.collection, collectionName, shardName, "leader");
return SolrMetricManager.getRegistryName(SolrInfoBean.Group.collection, collectionName, shardName, "leader");
} else {
return null;
}

View File

@ -17,7 +17,7 @@
package org.apache.solr.metrics;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
/**
* Wraps meta-data for a metric.
@ -25,7 +25,7 @@ import org.apache.solr.core.SolrInfoMBean;
public final class SolrMetricInfo {
public final String name;
public final String scope;
public final SolrInfoMBean.Category category;
public final SolrInfoBean.Category category;
/**
* Creates a new instance of {@link SolrMetricInfo}.
@ -34,7 +34,7 @@ public final class SolrMetricInfo {
* @param scope the scope of the metric (e.g. `/admin/ping`)
* @param name the name of the metric (e.g. `Requests`)
*/
public SolrMetricInfo(SolrInfoMBean.Category category, String scope, String name) {
public SolrMetricInfo(SolrInfoBean.Category category, String scope, String name) {
this.name = name;
this.scope = scope;
this.category = category;
@ -45,18 +45,25 @@ public final class SolrMetricInfo {
return null;
}
String[] names = fullName.split("\\.");
if (names.length < 3) { // not a valid info
if (names.length < 2) { // not a valid info
return null;
}
// check top-level name for valid category
SolrInfoMBean.Category category;
SolrInfoBean.Category category;
try {
category = SolrInfoMBean.Category.valueOf(names[0]);
category = SolrInfoBean.Category.valueOf(names[0]);
} catch (IllegalArgumentException e) { // not a valid category
return null;
}
String scope = names[1];
String name = fullName.substring(names[0].length() + names[1].length() + 2);
String scope;
String name;
if (names.length == 2) {
scope = null;
name = fullName.substring(names[0].length() + 1);
} else {
scope = names[1];
name = fullName.substring(names[0].length() + names[1].length() + 2);
}
return new SolrMetricInfo(category, scope, name);
}

View File

@ -51,7 +51,7 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.metrics.reporters.solr.SolrClusterReporter;
import org.apache.solr.metrics.reporters.solr.SolrShardReporter;
@ -69,11 +69,11 @@ import org.slf4j.LoggerFactory;
* {@link MetricRegistry} instances are automatically created when first referenced by name. Similarly,
* instances of {@link Metric} implementations, such as {@link Meter}, {@link Counter}, {@link Timer} and
* {@link Histogram} are automatically created and registered under hierarchical names, in a specified
* registry, when {@link #meter(String, String, String...)} and other similar methods are called.
* registry, when {@link #meter(SolrInfoBean, String, String, String...)} and other similar methods are called.
* <p>This class enforces a common prefix ({@link #REGISTRY_NAME_PREFIX}) in all registry
* names.</p>
* <p>Solr uses several different registries for collecting metrics belonging to different groups, using
* {@link org.apache.solr.core.SolrInfoMBean.Group} as the main name of the registry (plus the
* {@link org.apache.solr.core.SolrInfoBean.Group} as the main name of the registry (plus the
* above-mentioned prefix). Instances of {@link SolrMetricManager} are created for each {@link org.apache.solr.core.CoreContainer},
* and most registries are local to each instance, with the exception of two global registries:
* <code>solr.jetty</code> and <code>solr.jvm</code>, which are shared between all {@link org.apache.solr.core.CoreContainer}-s</p>
@ -87,11 +87,11 @@ public class SolrMetricManager {
/** Registry name for Jetty-specific metrics. This name is also subject to overrides controlled by
* system properties. This registry is shared between instances of {@link SolrMetricManager}. */
public static final String JETTY_REGISTRY = REGISTRY_NAME_PREFIX + SolrInfoMBean.Group.jetty.toString();
public static final String JETTY_REGISTRY = REGISTRY_NAME_PREFIX + SolrInfoBean.Group.jetty.toString();
/** Registry name for JVM-specific metrics. This name is also subject to overrides controlled by
* system properties. This registry is shared between instances of {@link SolrMetricManager}. */
public static final String JVM_REGISTRY = REGISTRY_NAME_PREFIX + SolrInfoMBean.Group.jvm.toString();
public static final String JVM_REGISTRY = REGISTRY_NAME_PREFIX + SolrInfoBean.Group.jvm.toString();
private final ConcurrentMap<String, MetricRegistry> registries = new ConcurrentHashMap<>();
@ -247,6 +247,66 @@ public class SolrMetricManager {
}
}
public static class OrFilter implements MetricFilter {
List<MetricFilter> filters = new ArrayList<>();
public OrFilter(Collection<MetricFilter> filters) {
if (filters != null) {
this.filters.addAll(filters);
}
}
public OrFilter(MetricFilter... filters) {
if (filters != null) {
for (MetricFilter filter : filters) {
if (filter != null) {
this.filters.add(filter);
}
}
}
}
@Override
public boolean matches(String s, Metric metric) {
for (MetricFilter filter : filters) {
if (filter.matches(s, metric)) {
return true;
}
}
return false;
}
}
public static class AndFilter implements MetricFilter {
List<MetricFilter> filters = new ArrayList<>();
public AndFilter(Collection<MetricFilter> filters) {
if (filters != null) {
this.filters.addAll(filters);
}
}
public AndFilter(MetricFilter... filters) {
if (filters != null) {
for (MetricFilter filter : filters) {
if (filter != null) {
this.filters.add(filter);
}
}
}
}
@Override
public boolean matches(String s, Metric metric) {
for (MetricFilter filter : filters) {
if (!filter.matches(s, metric)) {
return false;
}
}
return true;
}
}
/**
* Return a set of existing registry names.
*/
@ -451,6 +511,21 @@ public class SolrMetricManager {
return filter.getMatched();
}
/**
* Retrieve matching metrics and their names.
* @param registry registry name.
* @param metricFilter filter (null is equivalent to {@link MetricFilter#ALL}).
* @return map of matching names and metrics
*/
public Map<String, Metric> getMetrics(String registry, MetricFilter metricFilter) {
if (metricFilter == null || metricFilter == MetricFilter.ALL) {
return registry(registry).getMetrics();
}
return registry(registry).getMetrics().entrySet().stream()
.filter(entry -> metricFilter.matches(entry.getKey(), entry.getValue()))
.collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue()));
}
/**
* Create or get an existing named {@link Meter}
* @param registry registry name
@ -459,8 +534,12 @@ public class SolrMetricManager {
* @param metricPath (optional) additional top-most metric name path elements
* @return existing or a newly created {@link Meter}
*/
public Meter meter(String registry, String metricName, String... metricPath) {
return registry(registry).meter(mkName(metricName, metricPath));
public Meter meter(SolrInfoBean info, String registry, String metricName, String... metricPath) {
final String name = mkName(metricName, metricPath);
if (info != null) {
info.registerMetricName(name);
}
return registry(registry).meter(name);
}
/**
@ -471,8 +550,12 @@ public class SolrMetricManager {
* @param metricPath (optional) additional top-most metric name path elements
* @return existing or a newly created {@link Timer}
*/
public Timer timer(String registry, String metricName, String... metricPath) {
return registry(registry).timer(mkName(metricName, metricPath));
public Timer timer(SolrInfoBean info, String registry, String metricName, String... metricPath) {
final String name = mkName(metricName, metricPath);
if (info != null) {
info.registerMetricName(name);
}
return registry(registry).timer(name);
}
/**
@ -483,8 +566,12 @@ public class SolrMetricManager {
* @param metricPath (optional) additional top-most metric name path elements
* @return existing or a newly created {@link Counter}
*/
public Counter counter(String registry, String metricName, String... metricPath) {
return registry(registry).counter(mkName(metricName, metricPath));
public Counter counter(SolrInfoBean info, String registry, String metricName, String... metricPath) {
final String name = mkName(metricName, metricPath);
if (info != null) {
info.registerMetricName(name);
}
return registry(registry).counter(name);
}
/**
@ -495,8 +582,12 @@ public class SolrMetricManager {
* @param metricPath (optional) additional top-most metric name path elements
* @return existing or a newly created {@link Histogram}
*/
public Histogram histogram(String registry, String metricName, String... metricPath) {
return registry(registry).histogram(mkName(metricName, metricPath));
public Histogram histogram(SolrInfoBean info, String registry, String metricName, String... metricPath) {
final String name = mkName(metricName, metricPath);
if (info != null) {
info.registerMetricName(name);
}
return registry(registry).histogram(name);
}
/**
@ -510,9 +601,12 @@ public class SolrMetricManager {
* using dotted notation
* @param metricPath (optional) additional top-most metric name path elements
*/
public void register(String registry, Metric metric, boolean force, String metricName, String... metricPath) {
public void register(SolrInfoBean info, String registry, Metric metric, boolean force, String metricName, String... metricPath) {
MetricRegistry metricRegistry = registry(registry);
String fullName = mkName(metricName, metricPath);
if (info != null) {
info.registerMetricName(fullName);
}
synchronized (metricRegistry) {
if (force && metricRegistry.getMetrics().containsKey(fullName)) {
metricRegistry.remove(fullName);
@ -521,8 +615,8 @@ public class SolrMetricManager {
}
}
public void registerGauge(String registry, Gauge<?> gauge, boolean force, String metricName, String... metricPath) {
register(registry, gauge, force, metricName, metricPath);
public void registerGauge(SolrInfoBean info, String registry, Gauge<?> gauge, boolean force, String metricName, String... metricPath) {
register(info, registry, gauge, force, metricName, metricPath);
}
/**
@ -569,7 +663,7 @@ public class SolrMetricManager {
* </pre>
* <b>NOTE:</b> Once a registry is renamed in a way that its metrics are combined with another repository
* it is no longer possible to retrieve the original metrics until this renaming is removed and the Solr
* {@link org.apache.solr.core.SolrInfoMBean.Group} of components that reported to that name is restarted.
* {@link org.apache.solr.core.SolrInfoBean.Group} of components that reported to that name is restarted.
* @param registry The name of the registry
* @return A potentially overridden (via System properties) registry name
*/
@ -600,7 +694,7 @@ public class SolrMetricManager {
* and the group parameter will be ignored.
* @return fully-qualified and prefixed registry name, with overrides applied.
*/
public static String getRegistryName(SolrInfoMBean.Group group, String... names) {
public static String getRegistryName(SolrInfoBean.Group group, String... names) {
String fullName;
String prefix = REGISTRY_NAME_PREFIX + group.toString() + ".";
// check for existing prefix and group
@ -622,7 +716,7 @@ public class SolrMetricManager {
// reporter management
/**
* Create and register {@link SolrMetricReporter}-s specific to a {@link org.apache.solr.core.SolrInfoMBean.Group}.
* Create and register {@link SolrMetricReporter}-s specific to a {@link org.apache.solr.core.SolrInfoBean.Group}.
* Note: reporters that specify neither "group" nor "registry" attributes are treated as universal -
* they will always be loaded for any group. These two attributes may also contain multiple comma- or
* whitespace-separated values, in which case the reporter will be loaded for any matching value from
@ -634,7 +728,7 @@ public class SolrMetricManager {
* @param group selected group, not null
* @param registryNames optional child registry name elements
*/
public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, String tag, SolrInfoMBean.Group group, String... registryNames) {
public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, String tag, SolrInfoBean.Group group, String... registryNames) {
if (pluginInfos == null || pluginInfos.length == 0) {
return;
}
@ -941,13 +1035,13 @@ public class SolrMetricManager {
// prepare default plugin if none present in the config
Map<String, String> attrs = new HashMap<>();
attrs.put("name", "shardDefault");
attrs.put("group", SolrInfoMBean.Group.shard.toString());
attrs.put("group", SolrInfoBean.Group.shard.toString());
Map<String, Object> initArgs = new HashMap<>();
initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
String registryName = core.getCoreMetricManager().getRegistryName();
// collect infos and normalize
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.shard.toString(), SolrShardReporter.class.getName(),
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.shard.toString(), SolrShardReporter.class.getName(),
attrs, initArgs, null);
for (PluginInfo info : infos) {
try {
@ -967,12 +1061,12 @@ public class SolrMetricManager {
}
Map<String, String> attrs = new HashMap<>();
attrs.put("name", "clusterDefault");
attrs.put("group", SolrInfoMBean.Group.cluster.toString());
attrs.put("group", SolrInfoBean.Group.cluster.toString());
Map<String, Object> initArgs = new HashMap<>();
initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.cluster.toString(), SolrClusterReporter.class.getName(),
List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.cluster.toString(), SolrClusterReporter.class.getName(),
attrs, initArgs, null);
String registryName = getRegistryName(SolrInfoMBean.Group.cluster);
String registryName = getRegistryName(SolrInfoBean.Group.cluster);
for (PluginInfo info : infos) {
try {
SolrMetricReporter reporter = loadReporter(registryName, cc.getResourceLoader(), info, null);

View File

@ -30,6 +30,7 @@ public abstract class SolrMetricReporter implements Closeable, PluginInfoInitial
protected final String registryName;
protected final SolrMetricManager metricManager;
protected PluginInfo pluginInfo;
protected boolean enabled = true;
/**
* Create a reporter for metrics managed in a named registry.
@ -57,6 +58,17 @@ public abstract class SolrMetricReporter implements Closeable, PluginInfoInitial
validate();
}
/**
* Enable reporting, defaults to true. Implementations should check this flag in
* {@link #validate()} and accordingly enable or disable reporting.
* @param enabled enable, defaults to true when null or not set.
*/
public void setEnabled(Boolean enabled) {
if (enabled != null) {
this.enabled = enabled;
}
}
/**
* Get the effective {@link PluginInfo} instance that was used for
* initialization of this plugin.

View File

@ -50,6 +50,20 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
this.props = additionalProperties;
}
/**
* Return current domain.
*/
public String getDomain() {
return domain;
}
/**
* Return current reporterName.
*/
public String getReporterName() {
return reporterName;
}
/**
* Create a hierarchical name.
*
@ -60,7 +74,8 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
@Override
public ObjectName createName(String type, String currentDomain, String name) {
SolrMetricInfo metricInfo = SolrMetricInfo.of(name);
String safeName = metricInfo != null ? metricInfo.name : name;
safeName = safeName.replaceAll(":", "_");
// It turns out that ObjectName(String) mostly preserves key ordering
// as specified in the constructor (except for the 'type' key that ends
// up at top level) - unlike ObjectName(String, Map) constructor
@ -90,24 +105,42 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
sb.append(currentDomain);
sb.append(':');
}
sb.append("reporter=");
sb.append(reporterName);
sb.append(',');
if (props != null && props.length > 0) {
boolean added = false;
for (int i = 0; i < props.length; i += 2) {
if (props[i] == null || props[i].isEmpty()) {
continue;
}
if (props[i + 1] == null || props[i + 1].isEmpty()) {
continue;
}
sb.append(',');
sb.append(props[i]);
sb.append('=');
sb.append(props[i + 1]);
added = true;
}
if (added) {
sb.append(',');
}
}
if (metricInfo != null) {
sb.append("category=");
sb.append(metricInfo.category.toString());
sb.append(",scope=");
sb.append(metricInfo.scope);
if (metricInfo.scope != null) {
sb.append(",scope=");
sb.append(metricInfo.scope);
}
// we could also split by type, but don't call it 'type' :)
// if (type != null) {
// sb.append(",class=");
// sb.append(type);
// }
sb.append(",name=");
sb.append(metricInfo.name);
sb.append(safeName);
} else {
// make dotted names into hierarchies
String[] path = name.split("\\.");
String[] path = safeName.split("\\.");
for (int i = 0; i < path.length - 1; i++) {
if (i > 0) {
sb.append(',');
@ -127,20 +160,6 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
sb.append("name=");
sb.append(path[path.length - 1]);
}
if (props != null && props.length > 0) {
for (int i = 0; i < props.length; i += 2) {
if (props[i] == null || props[i].isEmpty()) {
continue;
}
if (props[i + 1] == null || props[i + 1].isEmpty()) {
continue;
}
sb.append(',');
sb.append(props[i]);
sb.append('=');
sb.append(props[i + 1]);
}
}
ObjectName objectName;

View File

@ -0,0 +1,84 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.metrics.reporters;
import java.io.Closeable;
import java.lang.invoke.MethodHandles;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Simple cache for reusable service clients used by some implementations of
* {@link org.apache.solr.metrics.SolrMetricReporter}.
*/
public class ReporterClientCache<T> implements Closeable {
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final Map<String, T> cache = new ConcurrentHashMap<>();
/**
* Provide an instance of service client.
* @param <T> formal type
*/
public interface ClientProvider<T> {
/**
* Get an instance of a service client. It's not specified that each time this
* method is invoked a new client instance should be returned.
* @return client instance
* @throws Exception when client creation encountered an error.
*/
T get() throws Exception;
}
/**
* Get existing or register a new client.
* @param id client id
* @param clientProvider provider of new client instances
*/
public synchronized T getOrCreate(String id, ClientProvider<T> clientProvider) {
T item = cache.get(id);
if (item == null) {
try {
item = clientProvider.get();
cache.put(id, item);
} catch (Exception e) {
LOG.warn("Error providing a new client for id=" + id, e);
item = null;
}
}
return item;
}
/**
* Empty this cache, and close all clients that are {@link Closeable}.
*/
public void close() {
for (T client : cache.values()) {
if (client instanceof Closeable) {
try {
((Closeable)client).close();
} catch (Exception e) {
LOG.warn("Error closing client " + client + ", ignoring...", e);
}
}
}
cache.clear();
}
}

View File

@ -17,6 +17,9 @@
package org.apache.solr.metrics.reporters;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.MetricFilter;
@ -24,21 +27,26 @@ import com.codahale.metrics.ganglia.GangliaReporter;
import info.ganglia.gmetric4j.gmetric.GMetric;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
*/
public class SolrGangliaReporter extends SolrMetricReporter {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private String host = null;
private int port = -1;
private boolean multicast;
private int period = 60;
private String instancePrefix = null;
private String filterPrefix = null;
private List<String> filters = new ArrayList<>();
private boolean testing;
private GangliaReporter reporter;
private static final ReporterClientCache<GMetric> serviceRegistry = new ReporterClientCache<>();
// for unit tests
GMetric ganglia = null;
@ -65,10 +73,24 @@ public class SolrGangliaReporter extends SolrMetricReporter {
this.instancePrefix = prefix;
}
public void setFilter(String filter) {
this.filterPrefix = filter;
/**
* Report only metrics with names matching any of the prefix filters.
* @param filters list of 0 or more prefixes. If the list is empty then
* all names will match.
*/
public void setFilter(List<String> filters) {
if (filters == null || filters.isEmpty()) {
return;
}
this.filters.addAll(filters);
}
// due to vagaries of SolrPluginUtils.invokeSetters we need this too
public void setFilter(String filter) {
if (filter != null && !filter.isEmpty()) {
this.filters.add(filter);
}
}
public void setPeriod(int period) {
this.period = period;
@ -89,6 +111,10 @@ public class SolrGangliaReporter extends SolrMetricReporter {
@Override
protected void validate() throws IllegalStateException {
if (!enabled) {
log.info("Reporter disabled for registry " + registryName);
return;
}
if (host == null) {
throw new IllegalStateException("Init argument 'host' must be set to a valid Ganglia server name.");
}
@ -106,12 +132,12 @@ public class SolrGangliaReporter extends SolrMetricReporter {
//this is a separate method for unit tests
void start() {
if (!testing) {
try {
ganglia = new GMetric(host, port,
multicast ? GMetric.UDPAddressingMode.MULTICAST : GMetric.UDPAddressingMode.UNICAST,
1);
} catch (IOException ioe) {
throw new IllegalStateException("Exception connecting to Ganglia", ioe);
String id = host + ":" + port + ":" + multicast;
ganglia = serviceRegistry.getOrCreate(id, () -> new GMetric(host, port,
multicast ? GMetric.UDPAddressingMode.MULTICAST : GMetric.UDPAddressingMode.UNICAST,
1));
if (ganglia == null) {
return;
}
}
if (instancePrefix == null) {
@ -125,8 +151,8 @@ public class SolrGangliaReporter extends SolrMetricReporter {
.convertDurationsTo(TimeUnit.MILLISECONDS)
.prefixedWith(instancePrefix);
MetricFilter filter;
if (filterPrefix != null) {
filter = new SolrMetricManager.PrefixFilter(filterPrefix);
if (!filters.isEmpty()) {
filter = new SolrMetricManager.PrefixFilter(filters);
} else {
filter = MetricFilter.ALL;
}

View File

@ -18,6 +18,8 @@ package org.apache.solr.metrics.reporters;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.MetricFilter;
@ -41,9 +43,11 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
private int period = 60;
private boolean pickled = false;
private String instancePrefix = null;
private String filterPrefix = null;
private List<String> filters = new ArrayList<>();
private GraphiteReporter reporter = null;
private static final ReporterClientCache<GraphiteSender> serviceRegistry = new ReporterClientCache<>();
/**
* Create a Graphite reporter for metrics managed in a named registry.
*
@ -67,10 +71,25 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
this.instancePrefix = prefix;
}
public void setFilter(String filter) {
this.filterPrefix = filter;
/**
* Report only metrics with names matching any of the prefix filters.
* @param filters list of 0 or more prefixes. If the list is empty then
* all names will match.
*/
public void setFilter(List<String> filters) {
if (filters == null || filters.isEmpty()) {
return;
}
this.filters.addAll(filters);
}
public void setFilter(String filter) {
if (filter != null && !filter.isEmpty()) {
this.filters.add(filter);
}
}
public void setPickled(boolean pickled) {
this.pickled = pickled;
}
@ -81,6 +100,10 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
@Override
protected void validate() throws IllegalStateException {
if (!enabled) {
log.info("Reporter disabled for registry " + registryName);
return;
}
if (host == null) {
throw new IllegalStateException("Init argument 'host' must be set to a valid Graphite server name.");
}
@ -93,12 +116,15 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
if (period < 1) {
throw new IllegalStateException("Init argument 'period' is in time unit 'seconds' and must be at least 1.");
}
final GraphiteSender graphite;
if (pickled) {
graphite = new PickledGraphite(host, port);
} else {
graphite = new Graphite(host, port);
}
GraphiteSender graphite;
String id = host + ":" + port + ":" + pickled;
graphite = serviceRegistry.getOrCreate(id, () -> {
if (pickled) {
return new PickledGraphite(host, port);
} else {
return new Graphite(host, port);
}
});
if (instancePrefix == null) {
instancePrefix = registryName;
} else {
@ -110,8 +136,8 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS);
MetricFilter filter;
if (filterPrefix != null) {
filter = new SolrMetricManager.PrefixFilter(filterPrefix);
if (!filters.isEmpty()) {
filter = new SolrMetricManager.PrefixFilter(filters);
} else {
filter = MetricFilter.ALL;
}

View File

@ -16,15 +16,25 @@
*/
package org.apache.solr.metrics.reporters;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanServer;
import javax.management.ObjectInstance;
import javax.management.ObjectName;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.MetricRegistryListener;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricReporter;
import org.apache.solr.util.JmxUtil;
@ -34,17 +44,25 @@ import org.slf4j.LoggerFactory;
/**
* A {@link SolrMetricReporter} that finds (or creates) a MBeanServer from
* the given configuration and registers metrics to it with JMX.
* <p>NOTE: {@link JmxReporter} that this class uses exports only newly added metrics (it doesn't
* process already existing metrics in a registry)</p>
*/
public class SolrJmxReporter extends SolrMetricReporter {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final ReporterClientCache<MBeanServer> serviceRegistry = new ReporterClientCache<>();
private String domain;
private String agentId;
private String serviceUrl;
private String rootName;
private List<String> filters = new ArrayList<>();
private JmxReporter reporter;
private MetricRegistry registry;
private MBeanServer mBeanServer;
private MetricsMapListener listener;
/**
* Creates a new instance of {@link SolrJmxReporter}.
@ -57,7 +75,7 @@ public class SolrJmxReporter extends SolrMetricReporter {
}
/**
* Initializes the reporter by finding (or creating) a MBeanServer
* Initializes the reporter by finding an MBeanServer
* and registering the metricManager's metric registry.
*
* @param pluginInfo the configuration for the reporter
@ -65,44 +83,62 @@ public class SolrJmxReporter extends SolrMetricReporter {
@Override
public synchronized void init(PluginInfo pluginInfo) {
super.init(pluginInfo);
if (!enabled) {
log.info("Reporter disabled for registry " + registryName);
return;
}
log.debug("Initializing for registry " + registryName);
if (serviceUrl != null && agentId != null) {
ManagementFactory.getPlatformMBeanServer(); // Ensure at least one MBeanServer is available.
mBeanServer = JmxUtil.findFirstMBeanServer();
log.warn("No more than one of serviceUrl(%s) and agentId(%s) should be configured, using first MBeanServer instead of configuration.",
log.warn("No more than one of serviceUrl({}) and agentId({}) should be configured, using first MBeanServer instead of configuration.",
serviceUrl, agentId, mBeanServer);
}
else if (serviceUrl != null) {
try {
mBeanServer = JmxUtil.findMBeanServerForServiceUrl(serviceUrl);
} catch (IOException e) {
log.warn("findMBeanServerForServiceUrl(%s) exception: %s", serviceUrl, e);
mBeanServer = null;
}
}
else if (agentId != null) {
} else if (serviceUrl != null) {
// reuse existing services
mBeanServer = serviceRegistry.getOrCreate(serviceUrl, () -> JmxUtil.findMBeanServerForServiceUrl(serviceUrl));
} else if (agentId != null) {
mBeanServer = JmxUtil.findMBeanServerForAgentId(agentId);
} else {
ManagementFactory.getPlatformMBeanServer(); // Ensure at least one MBeanServer is available.
mBeanServer = JmxUtil.findFirstMBeanServer();
log.warn("No serviceUrl or agentId was configured, using first MBeanServer.", mBeanServer);
log.debug("No serviceUrl or agentId was configured, using first MBeanServer: " + mBeanServer);
}
if (mBeanServer == null) {
log.warn("No JMX server found. Not exposing Solr metrics.");
log.warn("No JMX server found. Not exposing Solr metrics via JMX.");
return;
}
JmxObjectNameFactory jmxObjectNameFactory = new JmxObjectNameFactory(pluginInfo.name, domain);
if (domain == null || domain.isEmpty()) {
domain = registryName;
}
String fullDomain = domain;
if (rootName != null && !rootName.isEmpty()) {
fullDomain = rootName + "." + domain;
}
JmxObjectNameFactory jmxObjectNameFactory = new JmxObjectNameFactory(pluginInfo.name, fullDomain);
registry = metricManager.registry(registryName);
// filter out MetricsMap gauges - we have a better way of handling them
MetricFilter mmFilter = (name, metric) -> !(metric instanceof MetricsMap);
MetricFilter filter;
if (filters.isEmpty()) {
filter = mmFilter;
} else {
// apply also prefix filters
SolrMetricManager.PrefixFilter prefixFilter = new SolrMetricManager.PrefixFilter(filters);
filter = new SolrMetricManager.AndFilter(prefixFilter, mmFilter);
}
reporter = JmxReporter.forRegistry(metricManager.registry(registryName))
reporter = JmxReporter.forRegistry(registry)
.registerWith(mBeanServer)
.inDomain(domain)
.inDomain(fullDomain)
.filter(filter)
.createsObjectNamesWith(jmxObjectNameFactory)
.build();
reporter.start();
// workaround for inability to register custom MBeans (to be available in metrics 4.0?)
listener = new MetricsMapListener(mBeanServer, jmxObjectNameFactory);
registry.addListener(listener);
log.info("JMX monitoring enabled at server: " + mBeanServer);
log.info("JMX monitoring for '" + fullDomain + "' (registry '" + registryName + "') enabled at server: " + mBeanServer);
}
/**
@ -114,6 +150,11 @@ public class SolrJmxReporter extends SolrMetricReporter {
reporter.close();
reporter = null;
}
if (listener != null && registry != null) {
registry.removeListener(listener);
listener.close();
listener = null;
}
}
/**
@ -127,9 +168,19 @@ public class SolrJmxReporter extends SolrMetricReporter {
// Nothing to validate
}
/**
* Set root name of the JMX hierarchy for this reporter. Default (null or empty) is none, ie.
* the hierarchy will start from the domain name.
* @param rootName root name of the JMX name hierarchy, or null or empty for default.
*/
public void setRootName(String rootName) {
this.rootName = rootName;
}
/**
* Sets the domain with which MBeans are published. If none is set,
* the domain defaults to the name of the core.
* the domain defaults to the name of the registry.
*
* @param domain the domain
*/
@ -162,7 +213,46 @@ public class SolrJmxReporter extends SolrMetricReporter {
}
/**
* Retrieves the reporter's MBeanServer.
* Return configured agentId or null.
*/
public String getAgentId() {
return agentId;
}
/**
* Return configured serviceUrl or null.
*/
public String getServiceUrl() {
return serviceUrl;
}
/**
* Return configured domain or null.
*/
public String getDomain() {
return domain;
}
/**
* Report only metrics with names matching any of the prefix filters.
* @param filters list of 0 or more prefixes. If the list is empty then
* all names will match.
*/
public void setFilter(List<String> filters) {
if (filters == null || filters.isEmpty()) {
return;
}
this.filters.addAll(filters);
}
public void setFilter(String filter) {
if (filter != null && !filter.isEmpty()) {
this.filters.add(filter);
}
}
/**
* Return the reporter's MBeanServer.
*
* @return the reporter's MBeanServer
*/
@ -170,10 +260,72 @@ public class SolrJmxReporter extends SolrMetricReporter {
return mBeanServer;
}
@Override
public String toString() {
return String.format(Locale.ENGLISH, "[%s@%s: domain = %s, service url = %s, agent id = %s]",
getClass().getName(), Integer.toHexString(hashCode()), domain, serviceUrl, agentId);
/**
* For unit tests.
* @return true if this reporter is actively reporting metrics to JMX.
*/
public boolean isActive() {
return reporter != null;
}
@Override
public String toString() {
return String.format(Locale.ENGLISH, "[%s@%s: rootName = %s, domain = %s, service url = %s, agent id = %s]",
getClass().getName(), Integer.toHexString(hashCode()), rootName, domain, serviceUrl, agentId);
}
private static class MetricsMapListener extends MetricRegistryListener.Base {
MBeanServer server;
JmxObjectNameFactory nameFactory;
// keep the names so that we can unregister them on core close
Set<ObjectName> registered = new HashSet<>();
MetricsMapListener(MBeanServer server, JmxObjectNameFactory nameFactory) {
this.server = server;
this.nameFactory = nameFactory;
}
@Override
public void onGaugeAdded(String name, Gauge<?> gauge) {
if (!(gauge instanceof MetricsMap)) {
return;
}
synchronized (server) {
try {
ObjectName objectName = nameFactory.createName("gauges", nameFactory.getDomain(), name);
log.debug("REGISTER " + objectName);
if (registered.contains(objectName) || server.isRegistered(objectName)) {
log.debug("-unregistering old instance of " + objectName);
try {
server.unregisterMBean(objectName);
} catch (InstanceNotFoundException e) {
// ignore
}
}
// some MBean servers re-write object name to include additional properties
ObjectInstance instance = server.registerMBean(gauge, objectName);
if (instance != null) {
registered.add(instance.getObjectName());
}
} catch (Exception e) {
log.warn("bean registration error", e);
}
}
}
public void close() {
synchronized (server) {
for (ObjectName name : registered) {
try {
if (server.isRegistered(name)) {
server.unregisterMBean(name);
}
} catch (Exception e) {
log.debug("bean unregistration error", e);
}
}
registered.clear();
}
}
}
}

View File

@ -18,6 +18,8 @@ package org.apache.solr.metrics.reporters;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.MetricFilter;
@ -47,7 +49,7 @@ public class SolrSlf4jReporter extends SolrMetricReporter {
private int period = 60;
private String instancePrefix = null;
private String logger = null;
private String filterPrefix = null;
private List<String> filters = new ArrayList<>();
private Slf4jReporter reporter;
/**
@ -65,10 +67,25 @@ public class SolrSlf4jReporter extends SolrMetricReporter {
this.instancePrefix = prefix;
}
public void setFilter(String filter) {
this.filterPrefix = filter;
/**
* Report only metrics with names matching any of the prefix filters.
* @param filters list of 0 or more prefixes. If the list is empty then
* all names will match.
*/
public void setFilter(List<String> filters) {
if (filters == null || filters.isEmpty()) {
return;
}
this.filters.addAll(filters);
}
public void setFilter(String filter) {
if (filter != null && !filter.isEmpty()) {
this.filters.add(filter);
}
}
public void setLogger(String logger) {
this.logger = logger;
}
@ -79,6 +96,10 @@ public class SolrSlf4jReporter extends SolrMetricReporter {
@Override
protected void validate() throws IllegalStateException {
if (!enabled) {
log.info("Reporter disabled for registry " + registryName);
return;
}
if (period < 1) {
throw new IllegalStateException("Init argument 'period' is in time unit 'seconds' and must be at least 1.");
}
@ -93,8 +114,8 @@ public class SolrSlf4jReporter extends SolrMetricReporter {
.convertDurationsTo(TimeUnit.MILLISECONDS);
MetricFilter filter;
if (filterPrefix != null) {
filter = new SolrMetricManager.PrefixFilter(filterPrefix);
if (!filters.isEmpty()) {
filter = new SolrMetricManager.PrefixFilter(filters);
} else {
filter = MetricFilter.ALL;
}

View File

@ -33,7 +33,7 @@ import org.apache.solr.cloud.ZkController;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.handler.admin.MetricsCollectorHandler;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricReporter;
@ -92,14 +92,14 @@ import static org.apache.solr.common.params.CommonParams.ID;
public class SolrClusterReporter extends SolrMetricReporter {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final String CLUSTER_GROUP = SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.cluster.toString());
public static final String CLUSTER_GROUP = SolrMetricManager.overridableRegistryName(SolrInfoBean.Group.cluster.toString());
public static final List<SolrReporter.Report> DEFAULT_REPORTS = new ArrayList<SolrReporter.Report>() {{
add(new SolrReporter.Report(CLUSTER_GROUP, "jetty",
SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jetty.toString()),
SolrMetricManager.overridableRegistryName(SolrInfoBean.Group.jetty.toString()),
Collections.emptySet())); // all metrics
add(new SolrReporter.Report(CLUSTER_GROUP, "jvm",
SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jvm.toString()),
SolrMetricManager.overridableRegistryName(SolrInfoBean.Group.jvm.toString()),
new HashSet<String>() {{
add("memory\\.total\\..*");
add("memory\\.heap\\..*");
@ -109,7 +109,7 @@ public class SolrClusterReporter extends SolrMetricReporter {
add("os\\.OpenFileDescriptorCount");
add("threads\\.count");
}}));
add(new SolrReporter.Report(CLUSTER_GROUP, "node", SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.node.toString()),
add(new SolrReporter.Report(CLUSTER_GROUP, "node", SolrMetricManager.overridableRegistryName(SolrInfoBean.Group.node.toString()),
new HashSet<String>() {{
add("CONTAINER\\.cores\\..*");
add("CONTAINER\\.fs\\..*");
@ -159,6 +159,16 @@ public class SolrClusterReporter extends SolrMetricReporter {
});
}
public void setReport(Map map) {
if (map == null || map.isEmpty()) {
return;
}
SolrReporter.Report r = SolrReporter.Report.fromMap(map);
if (r != null) {
reports.add(r);
}
}
// for unit tests
int getPeriod() {
return period;
@ -170,9 +180,6 @@ public class SolrClusterReporter extends SolrMetricReporter {
@Override
protected void validate() throws IllegalStateException {
if (period < 1) {
log.info("Turning off node reporter, period=" + period);
}
if (reports.isEmpty()) { // set defaults
reports = DEFAULT_REPORTS;
}
@ -189,12 +196,17 @@ public class SolrClusterReporter extends SolrMetricReporter {
if (reporter != null) {
reporter.close();;
}
if (!enabled) {
log.info("Reporter disabled for registry " + registryName);
return;
}
// start reporter only in cloud mode
if (!cc.isZooKeeperAware()) {
log.warn("Not ZK-aware, not starting...");
return;
}
if (period < 1) { // don't start it
log.info("Turning off node reporter, period=" + period);
return;
}
HttpClient httpClient = cc.getUpdateShardHandler().getHttpClient();

View File

@ -98,7 +98,13 @@ public class SolrShardReporter extends SolrMetricReporter {
if (filterConfig == null || filterConfig.isEmpty()) {
return;
}
filters = filterConfig;
filters.addAll(filterConfig);
}
public void setFilter(String filter) {
if (filter != null && !filter.isEmpty()) {
this.filters.add(filter);
}
}
// for unit tests
@ -108,9 +114,6 @@ public class SolrShardReporter extends SolrMetricReporter {
@Override
protected void validate() throws IllegalStateException {
if (period < 1) {
log.info("Turning off shard reporter, period=" + period);
}
if (filters.isEmpty()) {
filters = DEFAULT_FILTERS;
}
@ -128,13 +131,17 @@ public class SolrShardReporter extends SolrMetricReporter {
if (reporter != null) {
reporter.close();
}
if (!enabled) {
log.info("Reporter disabled for registry " + registryName);
return;
}
if (core.getCoreDescriptor().getCloudDescriptor() == null) {
// not a cloud core
log.warn("Not initializing shard reporter for non-cloud core " + core.getName());
return;
}
if (period < 1) { // don't start it
log.warn("Not starting shard reporter ");
log.warn("period=" + period + ", not starting shard reporter ");
return;
}
// our id is coreNodeName

View File

@ -17,7 +17,7 @@
package org.apache.solr.request;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.response.SolrQueryResponse;
/**
@ -38,7 +38,7 @@ import org.apache.solr.response.SolrQueryResponse;
*
*
*/
public interface SolrRequestHandler extends SolrInfoMBean {
public interface SolrRequestHandler extends SolrInfoBean {
/** <code>init</code> will be called just once, immediately after creation.
* <p>The args are user-level initialization parameters that

View File

@ -15,15 +15,17 @@
* limitations under the License.
*/
package org.apache.solr.search;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.common.SolrException;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.util.ConcurrentLRUCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import java.io.Serializable;
import java.lang.invoke.MethodHandles;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -57,6 +59,10 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>
private long maxRamBytes;
private MetricsMap cacheMap;
private Set<String> metricNames = new HashSet<>();
private MetricRegistry registry;
@Override
public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
super.init(args, regenerator);
@ -215,68 +221,80 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>
}
@Override
public String getSource() {
return null;
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
registry = manager.registry(registryName);
cacheMap = new MetricsMap((detailed, map) -> {
if (cache != null) {
ConcurrentLRUCache.Stats stats = cache.getStats();
long lookups = stats.getCumulativeLookups();
long hits = stats.getCumulativeHits();
long inserts = stats.getCumulativePuts();
long evictions = stats.getCumulativeEvictions();
long size = stats.getCurrentSize();
long clookups = 0;
long chits = 0;
long cinserts = 0;
long cevictions = 0;
// NOTE: It is safe to iterate on a CopyOnWriteArrayList
for (ConcurrentLRUCache.Stats statistiscs : statsList) {
clookups += statistiscs.getCumulativeLookups();
chits += statistiscs.getCumulativeHits();
cinserts += statistiscs.getCumulativePuts();
cevictions += statistiscs.getCumulativeEvictions();
}
map.put("lookups", lookups);
map.put("hits", hits);
map.put("hitratio", calcHitRatio(lookups, hits));
map.put("inserts", inserts);
map.put("evictions", evictions);
map.put("size", size);
map.put("warmupTime", warmupTime);
map.put("cumulative_lookups", clookups);
map.put("cumulative_hits", chits);
map.put("cumulative_hitratio", calcHitRatio(clookups, chits));
map.put("cumulative_inserts", cinserts);
map.put("cumulative_evictions", cevictions);
if (detailed && showItems != 0) {
Map items = cache.getLatestAccessedItems( showItems == -1 ? Integer.MAX_VALUE : showItems );
for (Map.Entry e : (Set <Map.Entry>)items.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
String ks = "item_" + k;
String vs = v.toString();
map.put(ks,vs);
}
}
}
});
manager.registerGauge(this, registryName, cacheMap, true, scope, getCategory().toString());
}
// for unit tests only
MetricsMap getMetricsMap() {
return cacheMap;
}
@Override
public NamedList getStatistics() {
NamedList<Serializable> lst = new SimpleOrderedMap<>();
if (cache == null) return lst;
ConcurrentLRUCache.Stats stats = cache.getStats();
long lookups = stats.getCumulativeLookups();
long hits = stats.getCumulativeHits();
long inserts = stats.getCumulativePuts();
long evictions = stats.getCumulativeEvictions();
long size = stats.getCurrentSize();
long clookups = 0;
long chits = 0;
long cinserts = 0;
long cevictions = 0;
// NOTE: It is safe to iterate on a CopyOnWriteArrayList
for (ConcurrentLRUCache.Stats statistiscs : statsList) {
clookups += statistiscs.getCumulativeLookups();
chits += statistiscs.getCumulativeHits();
cinserts += statistiscs.getCumulativePuts();
cevictions += statistiscs.getCumulativeEvictions();
}
lst.add("lookups", lookups);
lst.add("hits", hits);
lst.add("hitratio", calcHitRatio(lookups, hits));
lst.add("inserts", inserts);
lst.add("evictions", evictions);
lst.add("size", size);
lst.add("warmupTime", warmupTime);
lst.add("cumulative_lookups", clookups);
lst.add("cumulative_hits", chits);
lst.add("cumulative_hitratio", calcHitRatio(clookups, chits));
lst.add("cumulative_inserts", cinserts);
lst.add("cumulative_evictions", cevictions);
if (showItems != 0) {
Map items = cache.getLatestAccessedItems( showItems == -1 ? Integer.MAX_VALUE : showItems );
for (Map.Entry e : (Set <Map.Entry>)items.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
String ks = "item_" + k;
String vs = v.toString();
lst.add(ks,vs);
}
}
return lst;
public MetricRegistry getMetricRegistry() {
return registry;
}
@Override
public String toString() {
return name() + getStatistics().toString();
return name() + cacheMap != null ? cacheMap.getValue().toString() : "";
}
}

View File

@ -15,19 +15,19 @@
* limitations under the License.
*/
package org.apache.solr.search;
import java.io.Serializable;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrCore;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.util.ConcurrentLFUCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -64,6 +64,9 @@ public class LFUCache<K, V> implements SolrCache<K, V> {
private ConcurrentLFUCache<K, V> cache;
private int showItems = 0;
private Boolean timeDecay = true;
private MetricsMap cacheMap;
private Set<String> metricNames = new HashSet<>();
private MetricRegistry registry;
@Override
public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
@ -211,11 +214,6 @@ public class LFUCache<K, V> implements SolrCache<K, V> {
return LFUCache.class.getName();
}
@Override
public String getVersion() {
return SolrCore.version;
}
@Override
public String getDescription() {
return description;
@ -226,16 +224,6 @@ public class LFUCache<K, V> implements SolrCache<K, V> {
return Category.CACHE;
}
@Override
public String getSource() {
return null;
}
@Override
public URL[] getDocs() {
return null;
}
// returns a ratio, not a percent.
private static String calcHitRatio(long lookups, long hits) {
if (lookups == 0) return "0.00";
@ -246,62 +234,81 @@ public class LFUCache<K, V> implements SolrCache<K, V> {
}
@Override
public NamedList getStatistics() {
NamedList<Serializable> lst = new SimpleOrderedMap<>();
if (cache == null) return lst;
ConcurrentLFUCache.Stats stats = cache.getStats();
long lookups = stats.getCumulativeLookups();
long hits = stats.getCumulativeHits();
long inserts = stats.getCumulativePuts();
long evictions = stats.getCumulativeEvictions();
long size = stats.getCurrentSize();
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
registry = manager.registry(registryName);
cacheMap = new MetricsMap((detailed, map) -> {
if (cache != null) {
ConcurrentLFUCache.Stats stats = cache.getStats();
long lookups = stats.getCumulativeLookups();
long hits = stats.getCumulativeHits();
long inserts = stats.getCumulativePuts();
long evictions = stats.getCumulativeEvictions();
long size = stats.getCurrentSize();
lst.add("lookups", lookups);
lst.add("hits", hits);
lst.add("hitratio", calcHitRatio(lookups, hits));
lst.add("inserts", inserts);
lst.add("evictions", evictions);
lst.add("size", size);
map.put("lookups", lookups);
map.put("hits", hits);
map.put("hitratio", calcHitRatio(lookups, hits));
map.put("inserts", inserts);
map.put("evictions", evictions);
map.put("size", size);
lst.add("warmupTime", warmupTime);
lst.add("timeDecay", timeDecay);
map.put("warmupTime", warmupTime);
map.put("timeDecay", timeDecay);
long clookups = 0;
long chits = 0;
long cinserts = 0;
long cevictions = 0;
long clookups = 0;
long chits = 0;
long cinserts = 0;
long cevictions = 0;
// NOTE: It is safe to iterate on a CopyOnWriteArrayList
for (ConcurrentLFUCache.Stats statistics : statsList) {
clookups += statistics.getCumulativeLookups();
chits += statistics.getCumulativeHits();
cinserts += statistics.getCumulativePuts();
cevictions += statistics.getCumulativeEvictions();
}
lst.add("cumulative_lookups", clookups);
lst.add("cumulative_hits", chits);
lst.add("cumulative_hitratio", calcHitRatio(clookups, chits));
lst.add("cumulative_inserts", cinserts);
lst.add("cumulative_evictions", cevictions);
// NOTE: It is safe to iterate on a CopyOnWriteArrayList
for (ConcurrentLFUCache.Stats statistics : statsList) {
clookups += statistics.getCumulativeLookups();
chits += statistics.getCumulativeHits();
cinserts += statistics.getCumulativePuts();
cevictions += statistics.getCumulativeEvictions();
}
map.put("cumulative_lookups", clookups);
map.put("cumulative_hits", chits);
map.put("cumulative_hitratio", calcHitRatio(clookups, chits));
map.put("cumulative_inserts", cinserts);
map.put("cumulative_evictions", cevictions);
if (showItems != 0) {
Map items = cache.getMostUsedItems(showItems == -1 ? Integer.MAX_VALUE : showItems);
for (Map.Entry e : (Set<Map.Entry>) items.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
if (detailed && showItems != 0) {
Map items = cache.getMostUsedItems(showItems == -1 ? Integer.MAX_VALUE : showItems);
for (Map.Entry e : (Set<Map.Entry>) items.entrySet()) {
Object k = e.getKey();
Object v = e.getValue();
String ks = "item_" + k;
String vs = v.toString();
map.put(ks, vs);
}
}
String ks = "item_" + k;
String vs = v.toString();
lst.add(ks, vs);
}
});
manager.registerGauge(this, registryName, cacheMap, true, scope, getCategory().toString());
}
}
// for unit tests only
MetricsMap getMetricsMap() {
return cacheMap;
}
return lst;
@Override
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public MetricRegistry getMetricRegistry() {
return registry;
}
@Override
public String toString() {
return name + getStatistics().toString();
return name + cacheMap != null ? cacheMap.getValue().toString() : "";
}
}

View File

@ -19,18 +19,21 @@ package org.apache.solr.search;
import java.lang.invoke.MethodHandles;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.LongAdder;
import com.codahale.metrics.MetricRegistry;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -55,6 +58,7 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
static final long LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY =
HASHTABLE_RAM_BYTES_PER_ENTRY
+ 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF; // previous & next references
/// End copied code
/* An instance of this class will be shared across multiple instances
@ -82,6 +86,9 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
private Map<K,V> map;
private String description="LRU Cache";
private MetricsMap cacheMap;
private Set<String> metricNames = new HashSet<>();
private MetricRegistry registry;
private long maxRamBytes = Long.MAX_VALUE;
// The synchronization used for the map will be used to update this,
@ -319,45 +326,56 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
}
@Override
public String getSource() {
return null;
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public NamedList getStatistics() {
NamedList lst = new SimpleOrderedMap();
synchronized (map) {
lst.add("lookups", lookups);
lst.add("hits", hits);
lst.add("hitratio", calcHitRatio(lookups,hits));
lst.add("inserts", inserts);
lst.add("evictions", evictions);
lst.add("size", map.size());
if (maxRamBytes != Long.MAX_VALUE) {
lst.add("maxRamMB", maxRamBytes / 1024L / 1024L);
lst.add("ramBytesUsed", ramBytesUsed());
lst.add("evictionsRamUsage", evictionsRamUsage);
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
registry = manager.registry(registryName);
cacheMap = new MetricsMap((detailed, res) -> {
synchronized (map) {
res.put("lookups", lookups);
res.put("hits", hits);
res.put("hitratio", calcHitRatio(lookups,hits));
res.put("inserts", inserts);
res.put("evictions", evictions);
res.put("size", map.size());
if (maxRamBytes != Long.MAX_VALUE) {
res.put("maxRamMB", maxRamBytes / 1024L / 1024L);
res.put("ramBytesUsed", ramBytesUsed());
res.put("evictionsRamUsage", evictionsRamUsage);
}
}
}
lst.add("warmupTime", warmupTime);
long clookups = stats.lookups.longValue();
long chits = stats.hits.longValue();
lst.add("cumulative_lookups", clookups);
lst.add("cumulative_hits", chits);
lst.add("cumulative_hitratio", calcHitRatio(clookups, chits));
lst.add("cumulative_inserts", stats.inserts.longValue());
lst.add("cumulative_evictions", stats.evictions.longValue());
if (maxRamBytes != Long.MAX_VALUE) {
lst.add("cumulative_evictionsRamUsage", stats.evictionsRamUsage.longValue());
}
return lst;
res.put("warmupTime", warmupTime);
long clookups = stats.lookups.longValue();
long chits = stats.hits.longValue();
res.put("cumulative_lookups", clookups);
res.put("cumulative_hits", chits);
res.put("cumulative_hitratio", calcHitRatio(clookups, chits));
res.put("cumulative_inserts", stats.inserts.longValue());
res.put("cumulative_evictions", stats.evictions.longValue());
if (maxRamBytes != Long.MAX_VALUE) {
res.put("cumulative_evictionsRamUsage", stats.evictionsRamUsage.longValue());
}
});
manager.registerGauge(this, registryName, cacheMap, true, scope, getCategory().toString());
}
// for unit tests only
MetricsMap getMetricsMap() {
return cacheMap;
}
@Override
public MetricRegistry getMetricRegistry() {
return registry;
}
@Override
public String toString() {
return name() + getStatistics().toString();
return name() + cacheMap != null ? cacheMap.getValue().toString() : "";
}
@Override

View File

@ -16,14 +16,14 @@
*/
package org.apache.solr.search;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.join.BlockJoinChildQParserPlugin;
import org.apache.solr.search.join.BlockJoinParentQParserPlugin;
@ -31,7 +31,7 @@ import org.apache.solr.search.join.GraphQParserPlugin;
import org.apache.solr.search.mlt.MLTQParserPlugin;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrInfoMBean {
public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrInfoBean {
/** internal use - name of the default parser */
public static final String DEFAULT_QTYPE = LuceneQParserPlugin.NAME;
@ -98,11 +98,6 @@ public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrI
return this.getClass().getName();
}
@Override
public String getVersion() {
return null;
}
@Override
public String getDescription() {
return ""; // UI required non-null to work
@ -114,19 +109,10 @@ public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrI
}
@Override
public String getSource() {
public Set<String> getMetricNames() {
return null;
}
@Override
public URL[] getDocs() {
return new URL[0];
}
@Override
public NamedList getStatistics() {
return null;
}
}

View File

@ -16,7 +16,8 @@
*/
package org.apache.solr.search;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.SolrMetricProducer;
import java.util.Map;
@ -24,7 +25,7 @@ import java.util.Map;
/**
* Primary API for dealing with Solr's internal caches.
*/
public interface SolrCache<K,V> extends SolrInfoMBean {
public interface SolrCache<K,V> extends SolrInfoBean, SolrMetricProducer {
/**
* The initialization routine. Instance specific arguments are passed in

View File

@ -18,11 +18,10 @@ package org.apache.solr.search;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.net.URL;
import java.util.Map;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean.Category;
import org.apache.solr.core.SolrInfoBean.Category;
import org.apache.solr.search.SolrCache.State;
import static org.apache.solr.common.params.CommonParams.NAME;
@ -106,10 +105,6 @@ public abstract class SolrCacheBase {
return Category.CACHE;
}
public URL[] getDocs() {
return null;
}
public void init(Map<String, String> args, CacheRegenerator regenerator) {
this.regenerator = regenerator;
state = State.CREATED;

View File

@ -0,0 +1,77 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import java.util.HashSet;
import java.util.Set;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.uninverting.UninvertingReader;
/**
* A SolrInfoBean that provides introspection of the Solr FieldCache
*
*/
public class SolrFieldCacheBean implements SolrInfoBean, SolrMetricProducer {
private boolean disableEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryList");
private boolean disableJmxEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryListJmx");
private MetricRegistry registry;
private Set<String> metricNames = new HashSet<>();
@Override
public String getName() { return this.getClass().getName(); }
@Override
public String getDescription() {
return "Provides introspection of the Solr FieldCache ";
}
@Override
public Category getCategory() { return Category.CACHE; }
@Override
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public MetricRegistry getMetricRegistry() {
return registry;
}
@Override
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
registry = manager.registry(registryName);
MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
if (detailed && !disableEntryList && !disableJmxEntryList) {
UninvertingReader.FieldCacheStats fieldCacheStats = UninvertingReader.getUninvertedStats();
String[] entries = fieldCacheStats.info;
map.put("entries_count", entries.length);
map.put("total_size", fieldCacheStats.totalSize);
for (int i = 0; i < entries.length; i++) {
final String entry = entries[i];
map.put("entry#" + i, entry);
}
} else {
map.put("entries_count", UninvertingReader.getUninvertedStatsSize());
}
});
manager.register(this, registryName, metricsMap, true, "fieldCache", Category.CACHE.toString(), scope);
}
}

View File

@ -1,78 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import java.net.URL;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.JmxMonitoredMap.JmxAugmentedSolrInfoMBean;
import org.apache.solr.core.SolrCore;
import org.apache.solr.uninverting.UninvertingReader;
/**
* A SolrInfoMBean that provides introspection of the Solr FieldCache
*
*/
public class SolrFieldCacheMBean implements JmxAugmentedSolrInfoMBean {
private boolean disableEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryList");
private boolean disableJmxEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryListJmx");
@Override
public String getName() { return this.getClass().getName(); }
@Override
public String getVersion() { return SolrCore.version; }
@Override
public String getDescription() {
return "Provides introspection of the Solr FieldCache ";
}
@Override
public Category getCategory() { return Category.CACHE; }
@Override
public String getSource() { return null; }
@Override
public URL[] getDocs() {
return null;
}
@Override
public NamedList getStatistics() {
return getStats(!disableEntryList);
}
@Override
public NamedList getStatisticsForJmx() {
return getStats(!disableEntryList && !disableJmxEntryList);
}
private NamedList getStats(boolean listEntries) {
NamedList stats = new SimpleOrderedMap();
if (listEntries) {
UninvertingReader.FieldCacheStats fieldCacheStats = UninvertingReader.getUninvertedStats();
String[] entries = fieldCacheStats.info;
stats.add("entries_count", entries.length);
stats.add("total_size", fieldCacheStats.totalSize);
for (int i = 0; i < entries.length; i++) {
stats.add("entry#" + i, entries[i]);
}
} else {
stats.add("entries_count", UninvertingReader.getUninvertedStatsSize());
}
return stats;
}
}

View File

@ -19,13 +19,13 @@ package org.apache.solr.search;
import java.io.Closeable;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -34,6 +34,7 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.Iterables;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
@ -58,15 +59,15 @@ import org.apache.lucene.util.FixedBitSet;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.ObjectReleaseTracker;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.index.SlowCompositeReaderWrapper;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestInfo;
@ -86,7 +87,7 @@ import org.slf4j.LoggerFactory;
*
* @since solr 0.9
*/
public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrInfoMBean {
public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrInfoBean, SolrMetricProducer {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@ -136,7 +137,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
private final String path;
private boolean releaseDirectory;
private final NamedList<Object> readerStats;
private Set<String> metricNames = new HashSet<>();
private static DirectoryReader getReader(SolrCore core, SolrIndexConfig config, DirectoryFactory directoryFactory,
String path) throws IOException {
@ -302,7 +303,6 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
// We already have our own filter cache
setQueryCache(null);
readerStats = snapStatistics(reader);
// do this at the end since an exception in the constructor means we won't close
numOpens.incrementAndGet();
assert ObjectReleaseTracker.track(this);
@ -404,10 +404,10 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
}
/**
* Register sub-objects such as caches
* Register sub-objects such as caches and our own metrics
*/
public void register() {
final Map<String,SolrInfoMBean> infoRegistry = core.getInfoRegistry();
final Map<String,SolrInfoBean> infoRegistry = core.getInfoRegistry();
// register self
infoRegistry.put(STATISTICS_KEY, this);
infoRegistry.put(name, this);
@ -415,6 +415,12 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
cache.setState(SolrCache.State.LIVE);
infoRegistry.put(cache.name(), cache);
}
SolrMetricManager manager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
String registry = core.getCoreMetricManager().getRegistryName();
for (SolrCache cache : cacheList) {
cache.initializeMetrics(manager, registry, SolrMetricManager.mkName(cache.name(), STATISTICS_KEY));
}
initializeMetrics(manager, registry, STATISTICS_KEY);
registerTime = new Date();
}
@ -2190,7 +2196,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
/////////////////////////////////////////////////////////////////////
// SolrInfoMBean stuff: Statistics and Module Info
// SolrInfoBean stuff: Statistics and Module Info
/////////////////////////////////////////////////////////////////////
@Override
@ -2198,11 +2204,6 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
return SolrIndexSearcher.class.getName();
}
@Override
public String getVersion() {
return SolrCore.version;
}
@Override
public String getDescription() {
return "index searcher";
@ -2214,38 +2215,31 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
}
@Override
public String getSource() {
return null;
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public URL[] getDocs() {
return null;
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
manager.registerGauge(this, registry, () -> name, true, "searcherName", Category.SEARCHER.toString(), scope);
manager.registerGauge(this, registry, () -> cachingEnabled, true, "caching", Category.SEARCHER.toString(), scope);
manager.registerGauge(this, registry, () -> openTime, true, "openedAt", Category.SEARCHER.toString(), scope);
manager.registerGauge(this, registry, () -> warmupTime, true, "warmupTime", Category.SEARCHER.toString(), scope);
manager.registerGauge(this, registry, () -> registerTime, true, "registeredAt", Category.SEARCHER.toString(), scope);
// reader stats
manager.registerGauge(this, registry, () -> reader.numDocs(), true, "numDocs", Category.SEARCHER.toString(), scope);
manager.registerGauge(this, registry, () -> reader.maxDoc(), true, "maxDoc", Category.SEARCHER.toString(), scope);
manager.registerGauge(this, registry, () -> reader.maxDoc() - reader.numDocs(), true, "deletedDocs", Category.SEARCHER.toString(), scope);
manager.registerGauge(this, registry, () -> reader.toString(), true, "reader", Category.SEARCHER.toString(), scope);
manager.registerGauge(this, registry, () -> reader.directory().toString(), true, "readerDir", Category.SEARCHER.toString(), scope);
manager.registerGauge(this, registry, () -> reader.getVersion(), true, "indexVersion", Category.SEARCHER.toString(), scope);
}
@Override
public NamedList<Object> getStatistics() {
final NamedList<Object> lst = new SimpleOrderedMap<>();
lst.add("searcherName", name);
lst.add("caching", cachingEnabled);
lst.addAll(readerStats);
lst.add("openedAt", openTime);
if (registerTime != null) lst.add("registeredAt", registerTime);
lst.add("warmupTime", warmupTime);
return lst;
}
static private NamedList<Object> snapStatistics(DirectoryReader reader) {
final NamedList<Object> lst = new SimpleOrderedMap<>();
lst.add("numDocs", reader.numDocs());
lst.add("maxDoc", reader.maxDoc());
lst.add("deletedDocs", reader.maxDoc() - reader.numDocs());
lst.add("reader", reader.toString());
lst.add("readerDir", reader.directory());
lst.add("indexVersion", reader.getVersion());
return lst;
public MetricRegistry getMetricRegistry() {
return core.getMetricRegistry();
}
private static class FilterImpl extends Filter {

View File

@ -319,12 +319,6 @@ public class FacetModule extends SearchComponent {
public Category getCategory() {
return Category.QUERY;
}
@Override
public String getSource() {
return null;
}
}

View File

@ -16,7 +16,6 @@
*/
package org.apache.solr.servlet;
import javax.management.MBeanServer;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
@ -34,7 +33,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.invoke.MethodHandles;
import java.lang.management.ManagementFactory;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
@ -47,7 +45,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.codahale.metrics.jvm.BufferPoolMetricSet;
import com.codahale.metrics.jvm.ClassLoadingGaugeSet;
import com.codahale.metrics.jvm.GarbageCollectorMetricSet;
import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
@ -66,9 +63,10 @@ import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.NodeConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.core.SolrXmlConfig;
import org.apache.solr.metrics.AltBufferPoolMetricSet;
import org.apache.solr.metrics.OperatingSystemMetricSet;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.request.SolrRequestInfo;
@ -185,13 +183,12 @@ public class SolrDispatchFilter extends BaseSolrFilter {
}
private void setupJvmMetrics() {
MBeanServer platformMBeanServer = ManagementFactory.getPlatformMBeanServer();
SolrMetricManager metricManager = cores.getMetricManager();
try {
String registry = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.jvm);
metricManager.registerAll(registry, new BufferPoolMetricSet(platformMBeanServer), true, "buffers");
String registry = SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm);
metricManager.registerAll(registry, new AltBufferPoolMetricSet(), true, "buffers");
metricManager.registerAll(registry, new ClassLoadingGaugeSet(), true, "classes");
metricManager.registerAll(registry, new OperatingSystemMetricSet(platformMBeanServer), true, "os");
metricManager.registerAll(registry, new OperatingSystemMetricSet(), true, "os");
metricManager.registerAll(registry, new GarbageCollectorMetricSet(), true, "gc");
metricManager.registerAll(registry, new MemoryUsageGaugeSet(), true, "memory");
metricManager.registerAll(registry, new ThreadStatesGaugeSet(), true, "threads"); // todo should we use CachedThreadStatesGaugeSet instead?

View File

@ -16,20 +16,23 @@
*/
package org.apache.solr.store.blockcache;
import java.net.URL;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrInfoMBean;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.search.SolrCacheBase;
/**
* A {@link SolrInfoMBean} that provides metrics on block cache operations.
* A {@link SolrInfoBean} that provides metrics on block cache operations.
*
* @lucene.experimental
*/
public class Metrics extends SolrCacheBase implements SolrInfoMBean {
public class Metrics extends SolrCacheBase implements SolrInfoBean, SolrMetricProducer {
public AtomicLong blockCacheSize = new AtomicLong(0);
@ -50,66 +53,70 @@ public class Metrics extends SolrCacheBase implements SolrInfoMBean {
public AtomicLong shardBuffercacheAllocate = new AtomicLong(0);
public AtomicLong shardBuffercacheLost = new AtomicLong(0);
private MetricsMap metricsMap;
private MetricRegistry registry;
private Set<String> metricNames = new HashSet<>();
private long previous = System.nanoTime();
@Override
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
registry = manager.registry(registryName);
metricsMap = new MetricsMap((detailed, map) -> {
long now = System.nanoTime();
long delta = Math.max(now - previous, 1);
double seconds = delta / 1000000000.0;
public NamedList<Number> getStatistics() {
NamedList<Number> stats = new SimpleOrderedMap<>(21); // room for one method call before growing
long hits_total = blockCacheHit.get();
long hits_delta = hits_total - blockCacheHit_last.get();
blockCacheHit_last.set(hits_total);
long now = System.nanoTime();
long delta = Math.max(now - previous, 1);
double seconds = delta / 1000000000.0;
long miss_total = blockCacheMiss.get();
long miss_delta = miss_total - blockCacheMiss_last.get();
blockCacheMiss_last.set(miss_total);
long hits_total = blockCacheHit.get();
long hits_delta = hits_total - blockCacheHit_last.get();
blockCacheHit_last.set(hits_total);
long evict_total = blockCacheEviction.get();
long evict_delta = evict_total - blockCacheEviction_last.get();
blockCacheEviction_last.set(evict_total);
long miss_total = blockCacheMiss.get();
long miss_delta = miss_total - blockCacheMiss_last.get();
blockCacheMiss_last.set(miss_total);
long storeFail_total = blockCacheStoreFail.get();
long storeFail_delta = storeFail_total - blockCacheStoreFail_last.get();
blockCacheStoreFail_last.set(storeFail_total);
long evict_total = blockCacheEviction.get();
long evict_delta = evict_total - blockCacheEviction_last.get();
blockCacheEviction_last.set(evict_total);
long lookups_delta = hits_delta + miss_delta;
long lookups_total = hits_total + miss_total;
long storeFail_total = blockCacheStoreFail.get();
long storeFail_delta = storeFail_total - blockCacheStoreFail_last.get();
blockCacheStoreFail_last.set(storeFail_total);
map.put("size", blockCacheSize.get());
map.put("lookups", lookups_total);
map.put("hits", hits_total);
map.put("evictions", evict_total);
map.put("storeFails", storeFail_total);
map.put("hitratio_current", calcHitRatio(lookups_delta, hits_delta)); // hit ratio since the last call
map.put("lookups_persec", getPerSecond(lookups_delta, seconds)); // lookups per second since the last call
map.put("hits_persec", getPerSecond(hits_delta, seconds)); // hits per second since the last call
map.put("evictions_persec", getPerSecond(evict_delta, seconds)); // evictions per second since the last call
map.put("storeFails_persec", getPerSecond(storeFail_delta, seconds)); // evictions per second since the last call
map.put("time_delta", seconds); // seconds since last call
long lookups_delta = hits_delta + miss_delta;
long lookups_total = hits_total + miss_total;
// TODO: these aren't really related to the BlockCache
map.put("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
map.put("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
stats.add("size", blockCacheSize.get());
stats.add("lookups", lookups_total);
stats.add("hits", hits_total);
stats.add("evictions", evict_total);
stats.add("storeFails", storeFail_total);
stats.add("hitratio_current", calcHitRatio(lookups_delta, hits_delta)); // hit ratio since the last call
stats.add("lookups_persec", getPerSecond(lookups_delta, seconds)); // lookups per second since the last call
stats.add("hits_persec", getPerSecond(hits_delta, seconds)); // hits per second since the last call
stats.add("evictions_persec", getPerSecond(evict_delta, seconds)); // evictions per second since the last call
stats.add("storeFails_persec", getPerSecond(storeFail_delta, seconds)); // evictions per second since the last call
stats.add("time_delta", seconds); // seconds since last call
previous = now;
// TODO: these aren't really related to the BlockCache
stats.add("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
stats.add("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
previous = now;
return stats;
});
manager.registerGauge(this, registryName, metricsMap, true, getName(), getCategory().toString(), scope);
}
private float getPerSecond(long value, double seconds) {
return (float) (value / seconds);
}
// SolrInfoMBean methods
// SolrInfoBean methods
@Override
public String getName() {
return "HdfsBlockCache";
return "hdfsBlockCache";
}
@Override
@ -118,12 +125,13 @@ public class Metrics extends SolrCacheBase implements SolrInfoMBean {
}
@Override
public String getSource() {
return null;
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public URL[] getDocs() {
return null;
public MetricRegistry getMetricRegistry() {
return registry;
}
}

View File

@ -18,8 +18,8 @@ package org.apache.solr.store.hdfs;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -27,16 +27,18 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.codahale.metrics.MetricRegistry;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.MetricsMap;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HdfsLocalityReporter implements SolrInfoMBean {
public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
public static final String LOCALITY_BYTES_TOTAL = "locality.bytes.total";
public static final String LOCALITY_BYTES_LOCAL = "locality.bytes.local";
public static final String LOCALITY_BYTES_RATIO = "locality.bytes.ratio";
@ -49,6 +51,9 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
private String hostname;
private final ConcurrentMap<HdfsDirectory,ConcurrentMap<FileStatus,BlockLocation[]>> cache;
private final Set<String> metricNames = new HashSet<>();
private MetricRegistry registry;
public HdfsLocalityReporter() {
cache = new ConcurrentHashMap<>();
}
@ -66,11 +71,6 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
return "hdfs-locality";
}
@Override
public String getVersion() {
return getClass().getPackage().getSpecificationVersion();
}
@Override
public String getDescription() {
return "Provides metrics for HDFS data locality.";
@ -82,89 +82,71 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
}
@Override
public String getSource() {
return null;
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public URL[] getDocs() {
return null;
public MetricRegistry getMetricRegistry() {
return registry;
}
/**
* Provide statistics on HDFS block locality, both in terms of bytes and block counts.
*/
@Override
public NamedList getStatistics() {
long totalBytes = 0;
long localBytes = 0;
int totalCount = 0;
int localCount = 0;
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
registry = manager.registry(registryName);
MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
long totalBytes = 0;
long localBytes = 0;
int totalCount = 0;
int localCount = 0;
for (Iterator<HdfsDirectory> iterator = cache.keySet().iterator(); iterator.hasNext();) {
HdfsDirectory hdfsDirectory = iterator.next();
for (Iterator<HdfsDirectory> iterator = cache.keySet().iterator(); iterator.hasNext();) {
HdfsDirectory hdfsDirectory = iterator.next();
if (hdfsDirectory.isClosed()) {
iterator.remove();
} else {
try {
refreshDirectory(hdfsDirectory);
Map<FileStatus,BlockLocation[]> blockMap = cache.get(hdfsDirectory);
if (hdfsDirectory.isClosed()) {
iterator.remove();
} else {
try {
refreshDirectory(hdfsDirectory);
Map<FileStatus,BlockLocation[]> blockMap = cache.get(hdfsDirectory);
// For every block in every file in this directory, count it
for (BlockLocation[] locations : blockMap.values()) {
for (BlockLocation bl : locations) {
totalBytes += bl.getLength();
totalCount++;
// For every block in every file in this directory, count it
for (BlockLocation[] locations : blockMap.values()) {
for (BlockLocation bl : locations) {
totalBytes += bl.getLength();
totalCount++;
if (Arrays.asList(bl.getHosts()).contains(hostname)) {
localBytes += bl.getLength();
localCount++;
if (Arrays.asList(bl.getHosts()).contains(hostname)) {
localBytes += bl.getLength();
localCount++;
}
}
}
} catch (IOException e) {
logger.warn("Could not retrieve locality information for {} due to exception: {}",
hdfsDirectory.getHdfsDirPath(), e);
}
} catch (IOException e) {
logger.warn("Could not retrieve locality information for {} due to exception: {}",
hdfsDirectory.getHdfsDirPath(), e);
}
}
}
return createStatistics(totalBytes, localBytes, totalCount, localCount);
}
/**
* Generate a statistics object based on the given measurements for all files monitored by this reporter.
*
* @param totalBytes
* The total bytes used
* @param localBytes
* The amount of bytes found on local nodes
* @param totalCount
* The total block count
* @param localCount
* The amount of blocks found on local nodes
* @return HDFS block locality statistics
*/
private NamedList<Number> createStatistics(long totalBytes, long localBytes, int totalCount, int localCount) {
NamedList<Number> statistics = new SimpleOrderedMap<Number>();
statistics.add(LOCALITY_BYTES_TOTAL, totalBytes);
statistics.add(LOCALITY_BYTES_LOCAL, localBytes);
if (localBytes == 0) {
statistics.add(LOCALITY_BYTES_RATIO, 0);
} else {
statistics.add(LOCALITY_BYTES_RATIO, localBytes / (double) totalBytes);
}
statistics.add(LOCALITY_BLOCKS_TOTAL, totalCount);
statistics.add(LOCALITY_BLOCKS_LOCAL, localCount);
if (localCount == 0) {
statistics.add(LOCALITY_BLOCKS_RATIO, 0);
} else {
statistics.add(LOCALITY_BLOCKS_RATIO, localCount / (double) totalCount);
}
return statistics;
map.put(LOCALITY_BYTES_TOTAL, totalBytes);
map.put(LOCALITY_BYTES_LOCAL, localBytes);
if (localBytes == 0) {
map.put(LOCALITY_BYTES_RATIO, 0);
} else {
map.put(LOCALITY_BYTES_RATIO, localBytes / (double) totalBytes);
}
map.put(LOCALITY_BLOCKS_TOTAL, totalCount);
map.put(LOCALITY_BLOCKS_LOCAL, localCount);
if (localCount == 0) {
map.put(LOCALITY_BLOCKS_RATIO, 0);
} else {
map.put(LOCALITY_BLOCKS_RATIO, localCount / (double) totalCount);
}
});
manager.registerGauge(this, registryName, metricsMap, true, "hdfsLocality", getCategory().toString(), scope);
}
/**
@ -209,4 +191,5 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
}
}
}
}

View File

@ -18,7 +18,6 @@ package org.apache.solr.update;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
@ -48,8 +47,6 @@ import org.apache.solr.cloud.ZkController;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrConfig.UpdateHandlerInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.metrics.SolrMetricManager;
@ -162,24 +159,40 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
@Override
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
commitCommands = manager.meter(registry, "commits", getCategory().toString(), scope);
manager.registerGauge(registry, () -> commitTracker.getCommitCount(), true, "autoCommits", getCategory().toString(), scope);
manager.registerGauge(registry, () -> softCommitTracker.getCommitCount(), true, "softAutoCommits", getCategory().toString(), scope);
optimizeCommands = manager.meter(registry, "optimizes", getCategory().toString(), scope);
rollbackCommands = manager.meter(registry, "rollbacks", getCategory().toString(), scope);
splitCommands = manager.meter(registry, "splits", getCategory().toString(), scope);
mergeIndexesCommands = manager.meter(registry, "merges", getCategory().toString(), scope);
expungeDeleteCommands = manager.meter(registry, "expungeDeletes", getCategory().toString(), scope);
manager.registerGauge(registry, () -> numDocsPending.longValue(), true, "docsPending", getCategory().toString(), scope);
manager.registerGauge(registry, () -> addCommands.longValue(), true, "adds", getCategory().toString(), scope);
manager.registerGauge(registry, () -> deleteByIdCommands.longValue(), true, "deletesById", getCategory().toString(), scope);
manager.registerGauge(registry, () -> deleteByQueryCommands.longValue(), true, "deletesByQuery", getCategory().toString(), scope);
manager.registerGauge(registry, () -> numErrors.longValue(), true, "errors", getCategory().toString(), scope);
commitCommands = manager.meter(this, registry, "commits", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> commitTracker.getCommitCount(), true, "autoCommits", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> softCommitTracker.getCommitCount(), true, "softAutoCommits", getCategory().toString(), scope);
if (commitTracker.getDocsUpperBound() > 0) {
manager.registerGauge(this, registry, () -> commitTracker.getDocsUpperBound(), true, "autoCommitMaxDocs",
getCategory().toString(), scope);
}
if (commitTracker.getTimeUpperBound() > 0) {
manager.registerGauge(this, registry, () -> "" + commitTracker.getTimeUpperBound() + "ms", true, "autoCommitMaxTime",
getCategory().toString(), scope);
}
if (softCommitTracker.getDocsUpperBound() > 0) {
manager.registerGauge(this, registry, () -> softCommitTracker.getDocsUpperBound(), true, "softAutoCommitMaxDocs",
getCategory().toString(), scope);
}
if (softCommitTracker.getTimeUpperBound() > 0) {
manager.registerGauge(this, registry, () -> "" + softCommitTracker.getTimeUpperBound() + "ms", true, "softAutoCommitMaxTime",
getCategory().toString(), scope);
}
optimizeCommands = manager.meter(this, registry, "optimizes", getCategory().toString(), scope);
rollbackCommands = manager.meter(this, registry, "rollbacks", getCategory().toString(), scope);
splitCommands = manager.meter(this, registry, "splits", getCategory().toString(), scope);
mergeIndexesCommands = manager.meter(this, registry, "merges", getCategory().toString(), scope);
expungeDeleteCommands = manager.meter(this, registry, "expungeDeletes", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> numDocsPending.longValue(), true, "docsPending", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> addCommands.longValue(), true, "adds", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> deleteByIdCommands.longValue(), true, "deletesById", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> deleteByQueryCommands.longValue(), true, "deletesByQuery", getCategory().toString(), scope);
manager.registerGauge(this, registry, () -> numErrors.longValue(), true, "errors", getCategory().toString(), scope);
addCommandsCumulative = manager.meter(registry, "cumulativeAdds", getCategory().toString(), scope);
deleteByIdCommandsCumulative = manager.meter(registry, "cumulativeDeletesById", getCategory().toString(), scope);
deleteByQueryCommandsCumulative = manager.meter(registry, "cumulativeDeletesByQuery", getCategory().toString(), scope);
numErrorsCumulative = manager.meter(registry, "cumulativeErrors", getCategory().toString(), scope);
addCommandsCumulative = manager.meter(this, registry, "cumulativeAdds", getCategory().toString(), scope);
deleteByIdCommandsCumulative = manager.meter(this, registry, "cumulativeDeletesById", getCategory().toString(), scope);
deleteByQueryCommandsCumulative = manager.meter(this, registry, "cumulativeDeletesByQuery", getCategory().toString(), scope);
numErrorsCumulative = manager.meter(this, registry, "cumulativeErrors", getCategory().toString(), scope);
}
private void deleteAll() throws IOException {
@ -951,7 +964,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
/////////////////////////////////////////////////////////////////////
// SolrInfoMBean stuff: Statistics and Module Info
// SolrInfoBean stuff: Statistics and Module Info
/////////////////////////////////////////////////////////////////////
@Override
@ -959,70 +972,11 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
return DirectUpdateHandler2.class.getName();
}
@Override
public String getVersion() {
return SolrCore.version;
}
@Override
public String getDescription() {
return "Update handler that efficiently directly updates the on-disk main lucene index";
}
@Override
public String getSource() {
return null;
}
@Override
public URL[] getDocs() {
return null;
}
@Override
public NamedList getStatistics() {
NamedList lst = new SimpleOrderedMap();
lst.add("commits", commitCommands.getCount());
if (commitTracker.getDocsUpperBound() > 0) {
lst.add("autocommit maxDocs", commitTracker.getDocsUpperBound());
}
if (commitTracker.getTimeUpperBound() > 0) {
lst.add("autocommit maxTime", "" + commitTracker.getTimeUpperBound() + "ms");
}
lst.add("autocommits", commitTracker.getCommitCount());
if (softCommitTracker.getDocsUpperBound() > 0) {
lst.add("soft autocommit maxDocs", softCommitTracker.getDocsUpperBound());
}
if (softCommitTracker.getTimeUpperBound() > 0) {
lst.add("soft autocommit maxTime", "" + softCommitTracker.getTimeUpperBound() + "ms");
}
lst.add("soft autocommits", softCommitTracker.getCommitCount());
lst.add("optimizes", optimizeCommands.getCount());
lst.add("rollbacks", rollbackCommands.getCount());
lst.add("expungeDeletes", expungeDeleteCommands.getCount());
lst.add("docsPending", numDocsPending.longValue());
// pset.size() not synchronized, but it should be fine to access.
// lst.add("deletesPending", pset.size());
lst.add("adds", addCommands.longValue());
lst.add("deletesById", deleteByIdCommands.longValue());
lst.add("deletesByQuery", deleteByQueryCommands.longValue());
lst.add("errors", numErrors.longValue());
lst.add("cumulative_adds", addCommandsCumulative.getCount());
lst.add("cumulative_deletesById", deleteByIdCommandsCumulative.getCount());
lst.add("cumulative_deletesByQuery", deleteByQueryCommandsCumulative.getCount());
lst.add("cumulative_errors", numErrorsCumulative.getCount());
if (this.ulog != null) {
lst.add("transaction_logs_total_size", ulog.getTotalLogsSize());
lst.add("transaction_logs_total_number", ulog.getTotalLogsNumber());
}
return lst;
}
@Override
public String toString() {
return "DirectUpdateHandler2" + getStatistics();
}
@Override
public SolrCoreState getSolrCoreState() {
return solrCoreState;

View File

@ -37,7 +37,7 @@ import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.util.HdfsUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -262,7 +262,7 @@ public class HdfsUpdateLog extends UpdateLog {
}
// initialize metrics
core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.TLOG.toString(), this);
core.getCoreMetricManager().registerMetricProducer(SolrInfoBean.Category.TLOG.toString(), this);
}
@Override

View File

@ -43,7 +43,7 @@ import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.handler.component.HttpShardHandlerFactory;
import org.apache.solr.handler.component.ShardHandler;
import org.apache.solr.handler.component.ShardHandlerFactory;
@ -160,16 +160,16 @@ public class PeerSync implements SolrMetricProducer {
shardHandlerFactory = (HttpShardHandlerFactory) core.getCoreDescriptor().getCoreContainer().getShardHandlerFactory();
shardHandler = shardHandlerFactory.getShardHandler(client);
core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.REPLICATION.toString(), this);
core.getCoreMetricManager().registerMetricProducer(SolrInfoBean.Category.REPLICATION.toString(), this);
}
public static final String METRIC_SCOPE = "peerSync";
@Override
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
syncTime = manager.timer(registry, "time", scope, METRIC_SCOPE);
syncErrors = manager.counter(registry, "errors", scope, METRIC_SCOPE);
syncSkipped = manager.counter(registry, "skipped", scope, METRIC_SCOPE);
syncTime = manager.timer(null, registry, "time", scope, METRIC_SCOPE);
syncErrors = manager.counter(null, registry, "errors", scope, METRIC_SCOPE);
syncSkipped = manager.counter(null, registry, "skipped", scope, METRIC_SCOPE);
}
/** optional list of updates we had before possibly receiving new updates */

View File

@ -39,7 +39,7 @@ import org.apache.solr.common.util.SuppressForbidden;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.schema.IndexSchema;
import org.slf4j.Logger;
@ -151,20 +151,20 @@ public class SolrIndexWriter extends IndexWriter {
}
if (mergeDetails) {
mergeTotals = true; // override
majorMergedDocs = metricManager.meter(registry, "docs", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
majorDeletedDocs = metricManager.meter(registry, "deletedDocs", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
majorMergedDocs = metricManager.meter(null, registry, "docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
majorDeletedDocs = metricManager.meter(null, registry, "deletedDocs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
}
if (mergeTotals) {
minorMerge = metricManager.timer(registry, "minor", SolrInfoMBean.Category.INDEX.toString(), "merge");
majorMerge = metricManager.timer(registry, "major", SolrInfoMBean.Category.INDEX.toString(), "merge");
mergeErrors = metricManager.counter(registry, "errors", SolrInfoMBean.Category.INDEX.toString(), "merge");
metricManager.registerGauge(registry, () -> runningMajorMerges.get(), true, "running", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
metricManager.registerGauge(registry, () -> runningMinorMerges.get(), true, "running", SolrInfoMBean.Category.INDEX.toString(), "merge", "minor");
metricManager.registerGauge(registry, () -> runningMajorMergesDocs.get(), true, "running.docs", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
metricManager.registerGauge(registry, () -> runningMinorMergesDocs.get(), true, "running.docs", SolrInfoMBean.Category.INDEX.toString(), "merge", "minor");
metricManager.registerGauge(registry, () -> runningMajorMergesSegments.get(), true, "running.segments", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
metricManager.registerGauge(registry, () -> runningMinorMergesSegments.get(), true, "running.segments", SolrInfoMBean.Category.INDEX.toString(), "merge", "minor");
flushMeter = metricManager.meter(registry, "flush", SolrInfoMBean.Category.INDEX.toString());
minorMerge = metricManager.timer(null, registry, "minor", SolrInfoBean.Category.INDEX.toString(), "merge");
majorMerge = metricManager.timer(null, registry, "major", SolrInfoBean.Category.INDEX.toString(), "merge");
mergeErrors = metricManager.counter(null, registry, "errors", SolrInfoBean.Category.INDEX.toString(), "merge");
metricManager.registerGauge(null, registry, () -> runningMajorMerges.get(), true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
metricManager.registerGauge(null, registry, () -> runningMinorMerges.get(), true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
metricManager.registerGauge(null, registry, () -> runningMajorMergesDocs.get(), true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
metricManager.registerGauge(null, registry, () -> runningMinorMergesDocs.get(), true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
metricManager.registerGauge(null, registry, () -> runningMajorMergesSegments.get(), true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
metricManager.registerGauge(null, registry, () -> runningMinorMergesSegments.get(), true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
flushMeter = metricManager.meter(null, registry, "flush", SolrInfoBean.Category.INDEX.toString());
}
}
}

View File

@ -19,14 +19,17 @@ package org.apache.solr.update;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.HashSet;
import java.util.Set;
import java.util.Vector;
import com.codahale.metrics.MetricRegistry;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.HdfsDirectoryFactory;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrEventListener;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.util.plugin.SolrCoreAware;
@ -41,7 +44,7 @@ import org.slf4j.LoggerFactory;
* @since solr 0.9
*/
public abstract class UpdateHandler implements SolrInfoMBean {
public abstract class UpdateHandler implements SolrInfoBean {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
protected final SolrCore core;
@ -55,6 +58,9 @@ public abstract class UpdateHandler implements SolrInfoMBean {
protected final UpdateLog ulog;
protected Set<String> metricNames = new HashSet<>();
protected MetricRegistry registry;
private void parseEventListeners() {
final Class<SolrEventListener> clazz = SolrEventListener.class;
final String label = "Event Listener";
@ -221,4 +227,12 @@ public abstract class UpdateHandler implements SolrInfoMBean {
public Category getCategory() {
return Category.UPDATE;
}
@Override
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public MetricRegistry getMetricRegistry() {
return registry;
}
}

View File

@ -57,7 +57,7 @@ import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.request.LocalSolrQueryRequest;
@ -403,7 +403,7 @@ public static final int VERSION_IDX = 1;
}
}
core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.TLOG.toString(), this);
core.getCoreMetricManager().registerMetricProducer(SolrInfoBean.Category.TLOG.toString(), this);
}
@Override
@ -422,12 +422,12 @@ public static final int VERSION_IDX = 1;
}
};
manager.registerGauge(registry, bufferedOpsGauge, true, "ops", scope, "buffered");
manager.registerGauge(registry, () -> logs.size(), true, "logs", scope, "replay", "remaining");
manager.registerGauge(registry, () -> getTotalLogsSize(), true, "bytes", scope, "replay", "remaining");
applyingBufferedOpsMeter = manager.meter(registry, "ops", scope, "applyingBuffered");
replayOpsMeter = manager.meter(registry, "ops", scope, "replay");
manager.registerGauge(registry, () -> state.getValue(), true, "state", scope);
manager.registerGauge(null, registry, bufferedOpsGauge, true, "ops", scope, "buffered");
manager.registerGauge(null, registry, () -> logs.size(), true, "logs", scope, "replay", "remaining");
manager.registerGauge(null, registry, () -> getTotalLogsSize(), true, "bytes", scope, "replay", "remaining");
applyingBufferedOpsMeter = manager.meter(null, registry, "ops", scope, "applyingBuffered");
replayOpsMeter = manager.meter(null, registry, "ops", scope, "replay");
manager.registerGauge(null, registry, () -> state.getValue(), true, "state", scope);
}
/**

View File

@ -17,10 +17,11 @@
package org.apache.solr.update;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import com.codahale.metrics.InstrumentedExecutorService;
import com.codahale.metrics.MetricRegistry;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
@ -29,20 +30,20 @@ import org.apache.solr.cloud.RecoveryStrategy;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.metrics.SolrMetricProducer;
import org.apache.solr.util.stats.HttpClientMetricNameStrategy;
import org.apache.solr.util.stats.InstrumentedHttpRequestExecutor;
import org.apache.solr.util.stats.InstrumentedPoolingHttpClientConnectionManager;
import org.apache.solr.util.stats.MetricUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.util.stats.InstrumentedHttpRequestExecutor.KNOWN_METRIC_NAME_STRATEGIES;
public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
public class UpdateShardHandler implements SolrMetricProducer, SolrInfoBean {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@ -65,6 +66,9 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
private final InstrumentedHttpRequestExecutor httpRequestExecutor;
private final Set<String> metricNames = new HashSet<>();
private MetricRegistry registry;
public UpdateShardHandler(UpdateShardHandlerConfig cfg) {
clientConnectionManager = new InstrumentedPoolingHttpClientConnectionManager(HttpClientUtil.getSchemaRegisteryProvider().getSchemaRegistry());
if (cfg != null ) {
@ -104,20 +108,14 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
}
@Override
public String getVersion() {
return getClass().getPackage().getSpecificationVersion();
}
@Override
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
registry = manager.registry(registryName);
String expandedScope = SolrMetricManager.mkName(scope, getCategory().name());
clientConnectionManager.initializeMetrics(manager, registry, expandedScope);
httpRequestExecutor.initializeMetrics(manager, registry, expandedScope);
updateExecutor = new InstrumentedExecutorService(updateExecutor,
manager.registry(registry),
clientConnectionManager.initializeMetrics(manager, registryName, expandedScope);
httpRequestExecutor.initializeMetrics(manager, registryName, expandedScope);
updateExecutor = MetricUtils.instrumentedExecutorService(updateExecutor, this, registry,
SolrMetricManager.mkName("updateExecutor", expandedScope, "threadPool"));
recoveryExecutor = new InstrumentedExecutorService(recoveryExecutor,
manager.registry(registry),
recoveryExecutor = MetricUtils.instrumentedExecutorService(recoveryExecutor, this, registry,
SolrMetricManager.mkName("recoveryExecutor", expandedScope, "threadPool"));
}
@ -132,18 +130,13 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
}
@Override
public String getSource() {
return null;
public Set<String> getMetricNames() {
return metricNames;
}
@Override
public URL[] getDocs() {
return new URL[0];
}
@Override
public NamedList getStatistics() {
return null;
public MetricRegistry getMetricRegistry() {
return registry;
}
public HttpClient getHttpClient() {

View File

@ -27,9 +27,6 @@ import java.util.List;
/**
* Utility methods to find a MBeanServer.
*
* This was factored out from {@link org.apache.solr.core.JmxMonitoredMap}
* and can eventually replace the logic used there.
*/
public final class JmxUtil {

View File

@ -35,10 +35,10 @@ public class InstrumentedPoolingHttpClientConnectionManager extends PoolingHttpC
@Override
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
manager.registerGauge(registry, () -> getTotalStats().getAvailable(), true, SolrMetricManager.mkName("availableConnections", scope));
manager.registerGauge(null, registry, () -> getTotalStats().getAvailable(), true, SolrMetricManager.mkName("availableConnections", scope));
// this acquires a lock on the connection pool; remove if contention sucks
manager.registerGauge(registry, () -> getTotalStats().getLeased(), true, SolrMetricManager.mkName("leasedConnections", scope));
manager.registerGauge(registry, () -> getTotalStats().getMax(), true, SolrMetricManager.mkName("maxConnections", scope));
manager.registerGauge(registry, () -> getTotalStats().getPending(), true, SolrMetricManager.mkName("pendingConnections", scope));
manager.registerGauge(null, registry, () -> getTotalStats().getLeased(), true, SolrMetricManager.mkName("leasedConnections", scope));
manager.registerGauge(null, registry, () -> getTotalStats().getMax(), true, SolrMetricManager.mkName("maxConnections", scope));
manager.registerGauge(null, registry, () -> getTotalStats().getPending(), true, SolrMetricManager.mkName("pendingConnections", scope));
}
}

View File

@ -16,9 +16,17 @@
*/
package org.apache.solr.util.stats;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.invoke.MethodHandles;
import java.lang.management.OperatingSystemMXBean;
import java.lang.management.PlatformManagedObject;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
@ -40,6 +48,7 @@ import com.codahale.metrics.Timer;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrInfoBean;
import org.apache.solr.metrics.AggregateMetric;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -116,59 +125,42 @@ public class MetricUtils {
* A metric <em>must</em> match this filter to be included in the output.
* @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
* @param compact use compact representation for counters and gauges.
* @param metadata optional metadata. If not null and not empty then this map will be added under a
* {@code _metadata_} key.
* @return a {@link NamedList}
*/
public static NamedList toNamedList(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
MetricFilter mustMatchFilter, boolean skipHistograms,
boolean skipAggregateValues, boolean compact,
Map<String, Object> metadata) {
boolean skipAggregateValues, boolean compact) {
NamedList result = new SimpleOrderedMap();
toMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, compact, (k, v) -> {
toMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, compact, false, (k, v) -> {
result.add(k, v);
});
if (metadata != null && !metadata.isEmpty()) {
result.add("_metadata_", metadata);
}
return result;
}
/**
* Returns a representation of the given metric registry as a list of {@link SolrInputDocument}-s.
* Provides a representation of the given metric registry as {@link SolrInputDocument}-s.
Only those metrics
* are converted to NamedList which match at least one of the given MetricFilter instances.
* are converted which match at least one of the given MetricFilter instances.
*
* @param registry the {@link MetricRegistry} to be converted to NamedList
* @param registry the {@link MetricRegistry} to be converted
* @param shouldMatchFilters a list of {@link MetricFilter} instances.
* A metric must match <em>any one</em> of the filters from this list to be
* included in the output
* @param mustMatchFilter a {@link MetricFilter}.
* A metric <em>must</em> match this filter to be included in the output.
* @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
* @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
* @param compact use compact representation for counters and gauges.
* @param metadata optional metadata. If not null and not empty then this map will be added under a
* {@code _metadata_} key.
* @return a list of {@link SolrInputDocument}-s
* @param consumer consumer that accepts produced {@link SolrInputDocument}-s
*/
public static List<SolrInputDocument> toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
MetricFilter mustMatchFilter, boolean skipHistograms,
boolean skipAggregateValues, boolean compact,
Map<String, Object> metadata) {
List<SolrInputDocument> result = new LinkedList<>();
toSolrInputDocuments(registry, shouldMatchFilters, mustMatchFilter, skipHistograms,
skipAggregateValues, compact, metadata, doc -> {
result.add(doc);
});
return result;
}
public static void toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
MetricFilter mustMatchFilter, boolean skipHistograms,
boolean skipAggregateValues, boolean compact,
Map<String, Object> metadata, Consumer<SolrInputDocument> consumer) {
boolean addMetadata = metadata != null && !metadata.isEmpty();
toMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, compact, (k, v) -> {
toMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, compact, false, (k, v) -> {
SolrInputDocument doc = new SolrInputDocument();
doc.setField(METRIC_NAME, k);
toSolrInputDocument(null, doc, v);
@ -179,7 +171,13 @@ public class MetricUtils {
});
}
public static void toSolrInputDocument(String prefix, SolrInputDocument doc, Object o) {
/**
* Fill in a SolrInputDocument with values from a converted metric, recursively.
* @param prefix prefix to add to generated field names, or null if none.
* @param doc document to fill
* @param o an instance of converted metric, either a Map or a flat Object
*/
static void toSolrInputDocument(String prefix, SolrInputDocument doc, Object o) {
if (!(o instanceof Map)) {
String key = prefix != null ? prefix : VALUE;
doc.addField(key, o);
@ -196,77 +194,170 @@ public class MetricUtils {
}
}
public static void toMaps(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
/**
* Convert selected metrics to maps or to flattened objects.
* @param registry source of metrics
* @param shouldMatchFilters metrics must match any of these filters
* @param mustMatchFilter metrics must match this filter
* @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
* @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
* @param compact use compact representation for counters and gauges.
* @param simple use simplified representation for complex metrics - instead of a (name, map)
* only the selected (name "." key, value) pairs will be produced.
* @param consumer consumer that accepts produced objects
*/
static void toMaps(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
MetricFilter mustMatchFilter, boolean skipHistograms, boolean skipAggregateValues,
boolean compact,
boolean compact, boolean simple,
BiConsumer<String, Object> consumer) {
Map<String, Metric> metrics = registry.getMetrics();
SortedSet<String> names = registry.getNames();
final Map<String, Metric> metrics = registry.getMetrics();
final SortedSet<String> names = registry.getNames();
names.stream()
.filter(s -> shouldMatchFilters.stream().anyMatch(metricFilter -> metricFilter.matches(s, metrics.get(s))))
.filter(s -> mustMatchFilter.matches(s, metrics.get(s)))
.forEach(n -> {
Metric metric = metrics.get(n);
if (metric instanceof Counter) {
Counter counter = (Counter) metric;
consumer.accept(n, convertCounter(counter, compact));
} else if (metric instanceof Gauge) {
Gauge gauge = (Gauge) metric;
try {
consumer.accept(n, convertGauge(gauge, compact));
} catch (InternalError ie) {
if (n.startsWith("memory.") && ie.getMessage().contains("Memory Pool not found")) {
LOG.warn("Error converting gauge '" + n + "', possible JDK bug: SOLR-10362", ie);
consumer.accept(n, null);
} else {
throw ie;
}
}
} else if (metric instanceof Meter) {
Meter meter = (Meter) metric;
consumer.accept(n, convertMeter(meter));
} else if (metric instanceof Timer) {
Timer timer = (Timer) metric;
consumer.accept(n, convertTimer(timer, skipHistograms));
} else if (metric instanceof Histogram) {
if (!skipHistograms) {
Histogram histogram = (Histogram) metric;
consumer.accept(n, convertHistogram(histogram));
}
} else if (metric instanceof AggregateMetric) {
consumer.accept(n, convertAggregateMetric((AggregateMetric)metric, skipAggregateValues));
}
convertMetric(n, metric, skipHistograms, skipAggregateValues, compact, simple, consumer);
});
}
static Map<String, Object> convertAggregateMetric(AggregateMetric metric, boolean skipAggregateValues) {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", metric.size());
response.put(MAX, metric.getMax());
response.put(MIN, metric.getMin());
response.put(MEAN, metric.getMean());
response.put(STDDEV, metric.getStdDev());
response.put(SUM, metric.getSum());
if (!(metric.isEmpty() || skipAggregateValues)) {
Map<String, Object> values = new LinkedHashMap<>();
response.put(VALUES, values);
metric.getValues().forEach((k, v) -> {
Map<String, Object> map = new LinkedHashMap<>();
map.put("value", v.value);
map.put("updateCount", v.updateCount.get());
values.put(k, map);
});
}
return response;
/**
* Convert selected metrics from a registry into a map, with metrics in a compact AND simple format.
* @param registry registry
* @param names metric names
* @return map where keys are metric names (if they were present in the registry) and values are
* converted metrics in simplified format.
*/
public static Map<String, Object> convertMetrics(MetricRegistry registry, Collection<String> names) {
final Map<String, Object> metrics = new HashMap<>();
convertMetrics(registry, names, false, true, true, true, (k, v) -> metrics.put(k, v));
return metrics;
}
static Map<String, Object> convertHistogram(Histogram histogram) {
Map<String, Object> response = new LinkedHashMap<>();
/**
* Convert selected metrics from a registry into maps (when <code>compact==false</code>) or
* flattened objects.
* @param registry registry
* @param names metric names
* @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
* @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
* @param compact use compact representation for counters and gauges.
* @param simple use simplified representation for complex metrics - instead of a (name, map)
* only the selected (name "." key, value) pairs will be produced.
* @param consumer consumer that accepts produced objects
*/
public static void convertMetrics(MetricRegistry registry, Collection<String> names,
boolean skipHistograms, boolean skipAggregateValues,
boolean compact, boolean simple,
BiConsumer<String, Object> consumer) {
final Map<String, Metric> metrics = registry.getMetrics();
names.stream()
.forEach(n -> {
Metric metric = metrics.get(n);
convertMetric(n, metric, skipHistograms, skipAggregateValues, compact, simple, consumer);
});
}
/**
* Convert a single instance of metric into a map or flattened object.
* @param n metric name
* @param metric metric instance
* @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
* @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
* @param compact use compact representation for counters and gauges.
* @param simple use simplified representation for complex metrics - instead of a (name, map)
* only the selected (name "." key, value) pairs will be produced.
* @param consumer consumer that accepts produced objects
*/
static void convertMetric(String n, Metric metric, boolean skipHistograms, boolean skipAggregateValues,
boolean compact, boolean simple, BiConsumer<String, Object> consumer) {
if (metric instanceof Counter) {
Counter counter = (Counter) metric;
consumer.accept(n, convertCounter(counter, compact));
} else if (metric instanceof Gauge) {
Gauge gauge = (Gauge) metric;
try {
convertGauge(n, gauge, simple, compact, consumer);
} catch (InternalError ie) {
if (n.startsWith("memory.") && ie.getMessage().contains("Memory Pool not found")) {
LOG.warn("Error converting gauge '" + n + "', possible JDK bug: SOLR-10362", ie);
consumer.accept(n, null);
} else {
throw ie;
}
}
} else if (metric instanceof Meter) {
Meter meter = (Meter) metric;
convertMeter(n, meter, simple, consumer);
} else if (metric instanceof Timer) {
Timer timer = (Timer) metric;
convertTimer(n, timer, skipHistograms, simple, consumer);
} else if (metric instanceof Histogram) {
if (!skipHistograms) {
Histogram histogram = (Histogram) metric;
convertHistogram(n, histogram, simple, consumer);
}
} else if (metric instanceof AggregateMetric) {
convertAggregateMetric(n, (AggregateMetric)metric, skipAggregateValues, simple, consumer);
}
}
/**
* Convert an instance of {@link AggregateMetric}.
* @param name metric name
* @param metric an instance of {@link AggregateMetric}
* @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
* @param simple use simplified representation for complex metrics - instead of a (name, map)
* only the selected (name "." key, value) pairs will be produced.
* @param consumer consumer that accepts produced objects
*/
static void convertAggregateMetric(String name, AggregateMetric metric,
boolean skipAggregateValues, boolean simple, BiConsumer<String, Object> consumer) {
if (simple) {
consumer.accept(name + "." + MEAN, metric.getMean());
} else {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", metric.size());
response.put(MAX, metric.getMax());
response.put(MIN, metric.getMin());
response.put(MEAN, metric.getMean());
response.put(STDDEV, metric.getStdDev());
response.put(SUM, metric.getSum());
if (!(metric.isEmpty() || skipAggregateValues)) {
Map<String, Object> values = new LinkedHashMap<>();
response.put(VALUES, values);
metric.getValues().forEach((k, v) -> {
Map<String, Object> map = new LinkedHashMap<>();
map.put("value", v.value);
map.put("updateCount", v.updateCount.get());
values.put(k, map);
});
}
consumer.accept(name, response);
}
}
/**
* Convert an instance of {@link Histogram}. NOTE: it's assumed that histogram contains non-time
* based values that don't require unit conversion.
* @param name metric name
* @param histogram an instance of {@link Histogram}
* @param simple use simplified representation for complex metrics - instead of a (name, map)
* only the selected (name "." key, value) pairs will be produced.
* @param consumer consumer that accepts produced objects
*/
static void convertHistogram(String name, Histogram histogram,
boolean simple, BiConsumer<String, Object> consumer) {
Snapshot snapshot = histogram.getSnapshot();
response.put("count", histogram.getCount());
// non-time based values
addSnapshot(response, snapshot, false);
return response;
if (simple) {
consumer.accept(name + "." + MEAN, snapshot.getMean());
} else {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", histogram.getCount());
// non-time based values
addSnapshot(response, snapshot, false);
consumer.accept(name, response);
}
}
// optionally convert ns to ms
@ -291,40 +382,91 @@ public class MetricUtils {
response.put((ms ? P999_MS: P999), nsToMs(ms, snapshot.get999thPercentile()));
}
static Map<String,Object> convertTimer(Timer timer, boolean skipHistograms) {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", timer.getCount());
response.put("meanRate", timer.getMeanRate());
response.put("1minRate", timer.getOneMinuteRate());
response.put("5minRate", timer.getFiveMinuteRate());
response.put("15minRate", timer.getFifteenMinuteRate());
if (!skipHistograms) {
// time-based values in nanoseconds
addSnapshot(response, timer.getSnapshot(), true);
/**
* Convert a {@link Timer} to a map.
* @param name metric name
* @param timer timer instance
* @param skipHistograms if true then discard the histogram part of the timer.
* @param simple use simplified representation for complex metrics - instead of a (name, map)
* only the selected (name "." key, value) pairs will be produced.
* @param consumer consumer that accepts produced objects
*/
public static void convertTimer(String name, Timer timer, boolean skipHistograms,
boolean simple, BiConsumer<String, Object> consumer) {
if (simple) {
consumer.accept(name + ".meanRate", timer.getMeanRate());
} else {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", timer.getCount());
response.put("meanRate", timer.getMeanRate());
response.put("1minRate", timer.getOneMinuteRate());
response.put("5minRate", timer.getFiveMinuteRate());
response.put("15minRate", timer.getFifteenMinuteRate());
if (!skipHistograms) {
// time-based values in nanoseconds
addSnapshot(response, timer.getSnapshot(), true);
}
consumer.accept(name, response);
}
return response;
}
static Map<String, Object> convertMeter(Meter meter) {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", meter.getCount());
response.put("meanRate", meter.getMeanRate());
response.put("1minRate", meter.getOneMinuteRate());
response.put("5minRate", meter.getFiveMinuteRate());
response.put("15minRate", meter.getFifteenMinuteRate());
return response;
/**
* Convert a {@link Meter} to a map.
* @param name metric name
* @param meter meter instance
* @param simple use simplified representation for complex metrics - instead of a (name, map)
* only the selected (name "." key, value) pairs will be produced.
* @param consumer consumer that accepts produced objects
*/
static void convertMeter(String name, Meter meter, boolean simple, BiConsumer<String, Object> consumer) {
if (simple) {
consumer.accept(name + ".count", meter.getCount());
} else {
Map<String, Object> response = new LinkedHashMap<>();
response.put("count", meter.getCount());
response.put("meanRate", meter.getMeanRate());
response.put("1minRate", meter.getOneMinuteRate());
response.put("5minRate", meter.getFiveMinuteRate());
response.put("15minRate", meter.getFifteenMinuteRate());
consumer.accept(name, response);
}
}
static Object convertGauge(Gauge gauge, boolean compact) {
if (compact) {
return gauge.getValue();
/**
* Convert a {@link Gauge}.
* @param name metric name
* @param gauge gauge instance
* @param simple use simplified representation for complex metrics - instead of a (name, map)
* only the selected (name "." key, value) pairs will be produced.
* @param compact if true then only return {@link Gauge#getValue()}. If false
* then return a map with a "value" field.
* @param consumer consumer that accepts produced objects
*/
static void convertGauge(String name, Gauge gauge, boolean simple, boolean compact,
BiConsumer<String, Object> consumer) {
if (compact || simple) {
Object o = gauge.getValue();
if (simple && (o instanceof Map)) {
for (Map.Entry<?, ?> entry : ((Map<?, ?>)o).entrySet()) {
consumer.accept(name + "." + entry.getKey().toString(), entry.getValue());
}
} else {
consumer.accept(name, o);
}
} else {
Map<String, Object> response = new LinkedHashMap<>();
response.put("value", gauge.getValue());
return response;
consumer.accept(name, response);
}
}
/**
* Convert a {@link Counter}
* @param counter counter instance
* @param compact if true then only return {@link Counter#getCount()}. If false
* then return a map with a "count" field.
* @return map or object
*/
static Object convertCounter(Counter counter, boolean compact) {
if (compact) {
return counter.getCount();
@ -338,7 +480,88 @@ public class MetricUtils {
/**
* Returns an instrumented wrapper over the given executor service.
*/
public static ExecutorService instrumentedExecutorService(ExecutorService delegate, MetricRegistry metricRegistry, String scope) {
public static ExecutorService instrumentedExecutorService(ExecutorService delegate, SolrInfoBean info, MetricRegistry metricRegistry, String scope) {
if (info != null && info.getMetricNames() != null) {
info.getMetricNames().add(MetricRegistry.name(scope, "submitted"));
info.getMetricNames().add(MetricRegistry.name(scope, "running"));
info.getMetricNames().add(MetricRegistry.name(scope, "completed"));
info.getMetricNames().add(MetricRegistry.name(scope, "duration"));
}
return new InstrumentedExecutorService(delegate, metricRegistry, scope);
}
/**
* Creates a set of metrics (gauges) that correspond to available bean properties for the provided MXBean.
* @param obj an instance of MXBean
* @param intf MXBean interface, one of {@link PlatformManagedObject}-s
* @param consumer consumer for created names and metrics
* @param <T> formal type
*/
public static <T extends PlatformManagedObject> void addMXBeanMetrics(T obj, Class<? extends T> intf,
String prefix, BiConsumer<String, Metric> consumer) {
if (intf.isInstance(obj)) {
BeanInfo beanInfo;
try {
beanInfo = Introspector.getBeanInfo(intf, intf.getSuperclass(), Introspector.IGNORE_ALL_BEANINFO);
} catch (IntrospectionException e) {
LOG.warn("Unable to fetch properties of MXBean " + obj.getClass().getName());
return;
}
for (final PropertyDescriptor desc : beanInfo.getPropertyDescriptors()) {
final String name = desc.getName();
// test if it works at all
try {
desc.getReadMethod().invoke(obj);
// worked - consume it
final Gauge<?> gauge = () -> {
try {
return desc.getReadMethod().invoke(obj);
} catch (InvocationTargetException ite) {
// ignore (some properties throw UOE)
return null;
} catch (IllegalAccessException e) {
return null;
}
};
String metricName = MetricRegistry.name(prefix, name);
consumer.accept(metricName, gauge);
} catch (Exception e) {
// didn't work, skip it...
}
}
}
}
/**
* These are well-known implementations of {@link java.lang.management.OperatingSystemMXBean}.
* Some of them provide additional useful properties beyond those declared by the interface.
*/
public static String[] OS_MXBEAN_CLASSES = new String[] {
OperatingSystemMXBean.class.getName(),
"com.sun.management.OperatingSystemMXBean",
"com.sun.management.UnixOperatingSystemMXBean",
"com.ibm.lang.management.OperatingSystemMXBean"
};
/**
* Creates a set of metrics (gauges) that correspond to available bean properties for the provided MXBean.
* @param obj an instance of MXBean
* @param interfaces interfaces that it may implement. Each interface will be tried in turn, and only
* if it exists and if it contains unique properties then they will be added as metrics.
* @param prefix optional prefix for metric names
* @param consumer consumer for created names and metrics
* @param <T> formal type
*/
public static <T extends PlatformManagedObject> void addMXBeanMetrics(T obj, String[] interfaces,
String prefix, BiConsumer<String, Metric> consumer) {
for (String clazz : interfaces) {
try {
final Class<? extends PlatformManagedObject> intf = Class.forName(clazz)
.asSubclass(PlatformManagedObject.class);
MetricUtils.addMXBeanMetrics(obj, intf, null, consumer);
} catch (ClassNotFoundException e) {
// ignore
}
}
}
}

View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<solr>
<shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
<str name="urlScheme">${urlScheme:}</str>
<int name="socketTimeout">${socketTimeout:90000}</int>
<int name="connTimeout">${connTimeout:15000}</int>
</shardHandlerFactory>
<solrcloud>
<str name="host">127.0.0.1</str>
<int name="hostPort">${hostPort:8983}</int>
<str name="hostContext">${hostContext:solr}</str>
<int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
<bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
<int name="leaderVoteWait">${leaderVoteWait:10000}</int>
<int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
<int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
<int name="autoReplicaFailoverWaitAfterExpiration">${autoReplicaFailoverWaitAfterExpiration:10000}</int>
<int name="autoReplicaFailoverWorkLoopDelay">${autoReplicaFailoverWorkLoopDelay:10000}</int>
<int name="autoReplicaFailoverBadNodeExpiration">${autoReplicaFailoverBadNodeExpiration:60000}</int>
</solrcloud>
<metrics>
<reporter name="defaultJmx" class="org.apache.solr.metrics.reporters.SolrJmxReporter"/>
</metrics>
</solr>

Some files were not shown because too many files have changed in this diff Show More