mirror of https://github.com/apache/lucene.git
SOLR-6766: Expose HdfsDirectoryFactory Block Cache statistics via JMX.
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1649939 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9b3d792bd8
commit
90dd4ab345
|
@ -292,6 +292,9 @@ New Features
|
|||
distErr, d, geodist, score=distance|area|area2d. score now accepts these units as well. It does
|
||||
NOT affect distances embedded in WKT strings like BUFFER(POINT(200 10),0.2)).
|
||||
(Ishan Chattopadhyaya, David Smiley)
|
||||
|
||||
* SOLR-6766: Expose HdfsDirectoryFactory Block Cache statistics via JMX.
|
||||
(Mike Drob, Mark Miller)
|
||||
|
||||
Bug Fixes
|
||||
----------------------
|
||||
|
|
|
@ -21,6 +21,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.Locale;
|
||||
|
||||
|
@ -51,7 +52,7 @@ import org.apache.solr.util.IOUtils;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class HdfsDirectoryFactory extends CachingDirectoryFactory {
|
||||
public class HdfsDirectoryFactory extends CachingDirectoryFactory implements SolrInfoMBean {
|
||||
public static Logger LOG = LoggerFactory
|
||||
.getLogger(HdfsDirectoryFactory.class);
|
||||
|
||||
|
@ -86,6 +87,12 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory {
|
|||
public static Metrics metrics;
|
||||
private static Boolean kerberosInit;
|
||||
|
||||
private final static class MetricsHolder {
|
||||
// [JCIP SE, Goetz, 16.6] Lazy initialization
|
||||
// Won't load until MetricsHolder is referenced
|
||||
public static final Metrics metrics = new Metrics();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(NamedList args) {
|
||||
params = SolrParams.toSolrParams(args);
|
||||
|
@ -129,7 +136,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory {
|
|||
Configuration conf = getConf();
|
||||
|
||||
if (metrics == null) {
|
||||
metrics = new Metrics(conf);
|
||||
metrics = MetricsHolder.metrics;
|
||||
}
|
||||
|
||||
boolean blockCacheEnabled = params.getBool(BLOCKCACHE_ENABLED, true);
|
||||
|
@ -359,4 +366,45 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SolrInfoMBean methods
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return getClass().getSimpleName() + "BlockCache";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getVersion() {
|
||||
return SolrCore.version;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Provides metrics for the HdfsDirectoryFactory BlockCache.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Category getCategory() {
|
||||
return Category.CACHE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public URL[] getDocs() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NamedList<?> getStatistics() {
|
||||
if (metrics == null) {
|
||||
return new NamedList<Object>();
|
||||
}
|
||||
|
||||
return metrics.getStatistics();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,17 +22,16 @@ import java.util.Map.Entry;
|
|||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.metrics.MetricsContext;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.hadoop.metrics.MetricsUtil;
|
||||
import org.apache.hadoop.metrics.Updater;
|
||||
import org.apache.hadoop.metrics.jvm.JvmMetrics;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.core.SolrInfoMBean;
|
||||
import org.apache.solr.search.SolrCacheBase;
|
||||
|
||||
/**
|
||||
* A {@link SolrInfoMBean} that provides metrics on block cache operations.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Metrics implements Updater {
|
||||
public class Metrics extends SolrCacheBase {
|
||||
|
||||
public static class MethodCall {
|
||||
public AtomicLong invokes = new AtomicLong();
|
||||
|
@ -60,14 +59,12 @@ public class Metrics implements Updater {
|
|||
public AtomicLong indexMemoryUsage = new AtomicLong(0);
|
||||
public AtomicLong segmentCount = new AtomicLong(0);
|
||||
|
||||
private MetricsRecord metricsRecord;
|
||||
private long previous = System.nanoTime();
|
||||
|
||||
public static void main(String[] args) throws InterruptedException {
|
||||
Configuration conf = new Configuration();
|
||||
Metrics metrics = new Metrics(conf);
|
||||
Metrics metrics = new Metrics();
|
||||
MethodCall methodCall = new MethodCall();
|
||||
metrics.methodCalls.put("test",methodCall);
|
||||
metrics.methodCalls.put("test", methodCall);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
metrics.blockCacheHit.incrementAndGet();
|
||||
metrics.blockCacheMiss.incrementAndGet();
|
||||
|
@ -77,53 +74,50 @@ public class Metrics implements Updater {
|
|||
}
|
||||
}
|
||||
|
||||
public Metrics(Configuration conf) {
|
||||
JvmMetrics.init("blockcache", Long.toString(System.currentTimeMillis()));
|
||||
MetricsContext metricsContext = MetricsUtil.getContext("blockcache");
|
||||
metricsRecord = MetricsUtil.createRecord(metricsContext, "metrics");
|
||||
metricsContext.registerUpdater(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doUpdates(MetricsContext context) {
|
||||
synchronized (this) {
|
||||
long now = System.nanoTime();
|
||||
float seconds = (now - previous) / 1000000000.0f;
|
||||
metricsRecord.setMetric("blockcache.hit", getPerSecond(blockCacheHit.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("blockcache.miss", getPerSecond(blockCacheMiss.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("blockcache.eviction", getPerSecond(blockCacheEviction.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("blockcache.size", blockCacheSize.get());
|
||||
metricsRecord.setMetric("row.reads", getPerSecond(rowReads.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("row.writes", getPerSecond(rowWrites.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("record.reads", getPerSecond(recordReads.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("record.writes", getPerSecond(recordWrites.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("query.external", getPerSecond(queriesExternal.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("query.internal", getPerSecond(queriesInternal.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
|
||||
metricsRecord.setMetric("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
|
||||
for (Entry<String,MethodCall> entry : methodCalls.entrySet()) {
|
||||
String key = entry.getKey();
|
||||
MethodCall value = entry.getValue();
|
||||
long invokes = value.invokes.getAndSet(0);
|
||||
long times = value.times.getAndSet(0);
|
||||
|
||||
float avgTimes = (times / (float) invokes) / 1000000000.0f;
|
||||
metricsRecord.setMetric("methodcalls." + key + ".count", getPerSecond(invokes, seconds));
|
||||
metricsRecord.setMetric("methodcalls." + key + ".time", avgTimes);
|
||||
}
|
||||
metricsRecord.setMetric("tables", tableCount.get());
|
||||
metricsRecord.setMetric("rows", rowCount.get());
|
||||
metricsRecord.setMetric("records", recordCount.get());
|
||||
metricsRecord.setMetric("index.count", indexCount.get());
|
||||
metricsRecord.setMetric("index.memoryusage", indexMemoryUsage.get());
|
||||
metricsRecord.setMetric("index.segments", segmentCount.get());
|
||||
previous = now;
|
||||
public NamedList<Number> getStatistics() {
|
||||
NamedList<Number> stats = new NamedList<Number>();
|
||||
|
||||
long now = System.nanoTime();
|
||||
float seconds = (now - previous) / 1000000000.0f;
|
||||
|
||||
long hits = blockCacheHit.getAndSet(0);
|
||||
long lookups = hits + blockCacheMiss.getAndSet(0);
|
||||
|
||||
stats.add("lookups", getPerSecond(lookups, seconds));
|
||||
stats.add("hits", getPerSecond(hits, seconds));
|
||||
stats.add("hitratio", calcHitRatio(lookups, hits));
|
||||
stats.add("evictions", getPerSecond(blockCacheEviction.getAndSet(0), seconds));
|
||||
stats.add("size", blockCacheSize.get());
|
||||
stats.add("row.reads", getPerSecond(rowReads.getAndSet(0), seconds));
|
||||
stats.add("row.writes", getPerSecond(rowWrites.getAndSet(0), seconds));
|
||||
stats.add("record.reads", getPerSecond(recordReads.getAndSet(0), seconds));
|
||||
stats.add("record.writes", getPerSecond(recordWrites.getAndSet(0), seconds));
|
||||
stats.add("query.external", getPerSecond(queriesExternal.getAndSet(0), seconds));
|
||||
stats.add("query.internal", getPerSecond(queriesInternal.getAndSet(0), seconds));
|
||||
stats.add("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
|
||||
stats.add("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
|
||||
for (Entry<String,MethodCall> entry : methodCalls.entrySet()) {
|
||||
String key = entry.getKey();
|
||||
MethodCall value = entry.getValue();
|
||||
long invokes = value.invokes.getAndSet(0);
|
||||
long times = value.times.getAndSet(0);
|
||||
|
||||
float avgTimes = (times / (float) invokes) / 1000000000.0f;
|
||||
stats.add("methodcalls." + key + ".count", getPerSecond(invokes, seconds));
|
||||
stats.add("methodcalls." + key + ".time", avgTimes);
|
||||
}
|
||||
metricsRecord.update();
|
||||
stats.add("tables", tableCount.get());
|
||||
stats.add("rows", rowCount.get());
|
||||
stats.add("records", recordCount.get());
|
||||
stats.add("index.count", indexCount.get());
|
||||
stats.add("index.memoryusage", indexMemoryUsage.get());
|
||||
stats.add("index.segments", segmentCount.get());
|
||||
previous = now;
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
private float getPerSecond(long value, float seconds) {
|
||||
return (float) (value / seconds);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ import java.util.Arrays;
|
|||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -34,7 +33,7 @@ public class BlockCacheTest extends LuceneTestCase {
|
|||
int slabSize = blockSize * 4096;
|
||||
long totalMemory = 2 * slabSize;
|
||||
|
||||
BlockCache blockCache = new BlockCache(new Metrics(new Configuration()), true,totalMemory,slabSize,blockSize);
|
||||
BlockCache blockCache = new BlockCache(new Metrics(), true, totalMemory, slabSize, blockSize);
|
||||
byte[] buffer = new byte[1024];
|
||||
Random random = random();
|
||||
byte[] newData = new byte[blockSize];
|
||||
|
@ -87,8 +86,7 @@ public class BlockCacheTest extends LuceneTestCase {
|
|||
int slabSize = blockSize * 1024;
|
||||
long totalMemory = 2 * slabSize;
|
||||
|
||||
BlockCache blockCache = new BlockCache(new Metrics(new Configuration()),
|
||||
true, totalMemory, slabSize);
|
||||
BlockCache blockCache = new BlockCache(new Metrics(), true, totalMemory, slabSize);
|
||||
BlockCacheKey blockCacheKey = new BlockCacheKey();
|
||||
blockCacheKey.setBlock(0);
|
||||
blockCacheKey.setFile(0);
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
package org.apache.solr.store.blockcache;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.math.BigDecimal;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class BufferStoreTest extends LuceneTestCase {
|
||||
private final static int blockSize = 1024;
|
||||
|
||||
private Metrics metrics;
|
||||
|
||||
private Store store;
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
metrics = new Metrics();
|
||||
BufferStore.initNewBuffer(blockSize, blockSize, metrics);
|
||||
store = BufferStore.instance(blockSize);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBufferTakePut() {
|
||||
byte[] b1 = store.takeBuffer(blockSize);
|
||||
|
||||
assertGaugeMetricsChanged(false, false);
|
||||
|
||||
byte[] b2 = store.takeBuffer(blockSize);
|
||||
byte[] b3 = store.takeBuffer(blockSize);
|
||||
|
||||
assertRawMetricCounts(2, 0);
|
||||
assertGaugeMetricsChanged(true, false);
|
||||
|
||||
store.putBuffer(b1);
|
||||
|
||||
assertGaugeMetricsChanged(false, false);
|
||||
|
||||
store.putBuffer(b2);
|
||||
store.putBuffer(b3);
|
||||
|
||||
assertRawMetricCounts(0, 2);
|
||||
assertGaugeMetricsChanged(false, true);
|
||||
}
|
||||
|
||||
private void assertRawMetricCounts(int allocated, int lost) {
|
||||
assertEquals("Buffer allocation count is wrong.", allocated,
|
||||
metrics.shardBuffercacheAllocate.get());
|
||||
assertEquals("Lost buffer count is wrong", lost,
|
||||
metrics.shardBuffercacheLost.get());
|
||||
}
|
||||
|
||||
/**
|
||||
* Stateful method to verify whether the amount of buffers allocated and lost
|
||||
* since the last call has changed.
|
||||
*
|
||||
* @param allocated
|
||||
* whether buffers should have been allocated since the last call
|
||||
* @param lost
|
||||
* whether buffers should have been lost since the last call
|
||||
*/
|
||||
private void assertGaugeMetricsChanged(boolean allocated, boolean lost) {
|
||||
NamedList<Number> stats = metrics.getStatistics();
|
||||
|
||||
assertEquals("Buffer allocation metric not updating correctly.",
|
||||
allocated, isMetricPositive(stats, "buffercache.allocations"));
|
||||
assertEquals("Buffer lost metric not updating correctly.",
|
||||
lost, isMetricPositive(stats, "buffercache.lost"));
|
||||
}
|
||||
|
||||
private boolean isMetricPositive(NamedList<Number> stats, String metric) {
|
||||
return new BigDecimal(stats.get(metric).toString()).compareTo(BigDecimal.ZERO) > 0;
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue