HBASE-14534 Bump yammer/coda/dropwizard metrics dependency version

This commit is contained in:
Mikhail Antonov 2015-12-15 12:11:27 -08:00
parent 3e26063161
commit abe30b52a8
24 changed files with 311 additions and 186 deletions

View File

@ -190,7 +190,7 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.yammer.metrics</groupId> <groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.Descriptors.MethodDescriptor; import com.google.protobuf.Descriptors.MethodDescriptor;
import com.google.protobuf.Message; import com.google.protobuf.Message;
import com.yammer.metrics.core.Counter; import com.codahale.metrics.Counter;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.Histogram;
import com.yammer.metrics.core.MetricsRegistry; import com.codahale.metrics.MetricRegistry;
import com.yammer.metrics.core.Timer; import com.codahale.metrics.Timer;
import com.yammer.metrics.reporting.JmxReporter; import com.codahale.metrics.JmxReporter;
import com.yammer.metrics.util.RatioGauge; import com.codahale.metrics.RatioGauge;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
@ -40,11 +40,13 @@ import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static com.codahale.metrics.MetricRegistry.name;
/** /**
* This class is for maintaining the various connection statistics and publishing them through * This class is for maintaining the various connection statistics and publishing them through
* the metrics interfaces. * the metrics interfaces.
* *
* This class manages its own {@link MetricsRegistry} and {@link JmxReporter} so as to not * This class manages its own {@link MetricRegistry} and {@link JmxReporter} so as to not
* conflict with other uses of Yammer Metrics within the client application. Instantiating * conflict with other uses of Yammer Metrics within the client application. Instantiating
* this class implicitly creates and "starts" instances of these classes; be sure to call * this class implicitly creates and "starts" instances of these classes; be sure to call
* {@link #shutdown()} to terminate the thread pools they allocate. * {@link #shutdown()} to terminate the thread pools they allocate.
@ -109,18 +111,21 @@ public class MetricsConnection {
@VisibleForTesting final Histogram reqHist; @VisibleForTesting final Histogram reqHist;
@VisibleForTesting final Histogram respHist; @VisibleForTesting final Histogram respHist;
private CallTracker(MetricsRegistry registry, String name, String subName, String scope) { private CallTracker(MetricRegistry registry, String name, String subName, String scope) {
StringBuilder sb = new StringBuilder(CLIENT_SVC).append("_").append(name); StringBuilder sb = new StringBuilder(CLIENT_SVC).append("_").append(name);
if (subName != null) { if (subName != null) {
sb.append("(").append(subName).append(")"); sb.append("(").append(subName).append(")");
} }
this.name = sb.toString(); this.name = sb.toString();
this.callTimer = registry.newTimer(MetricsConnection.class, DRTN_BASE + this.name, scope); this.callTimer = registry.timer(name(MetricsConnection.class,
this.reqHist = registry.newHistogram(MetricsConnection.class, REQ_BASE + this.name, scope); DRTN_BASE + this.name, scope));
this.respHist = registry.newHistogram(MetricsConnection.class, RESP_BASE + this.name, scope); this.reqHist = registry.histogram(name(MetricsConnection.class,
REQ_BASE + this.name, scope));
this.respHist = registry.histogram(name(MetricsConnection.class,
RESP_BASE + this.name, scope));
} }
private CallTracker(MetricsRegistry registry, String name, String scope) { private CallTracker(MetricRegistry registry, String name, String scope) {
this(registry, name, null, scope); this(registry, name, null, scope);
} }
@ -141,12 +146,12 @@ public class MetricsConnection {
final Histogram memstoreLoadHist; final Histogram memstoreLoadHist;
final Histogram heapOccupancyHist; final Histogram heapOccupancyHist;
public RegionStats(MetricsRegistry registry, String name) { public RegionStats(MetricRegistry registry, String name) {
this.name = name; this.name = name;
this.memstoreLoadHist = registry.newHistogram(MetricsConnection.class, this.memstoreLoadHist = registry.histogram(name(MetricsConnection.class,
MEMLOAD_BASE + this.name); MEMLOAD_BASE + this.name));
this.heapOccupancyHist = registry.newHistogram(MetricsConnection.class, this.heapOccupancyHist = registry.histogram(name(MetricsConnection.class,
HEAP_BASE + this.name); HEAP_BASE + this.name));
} }
public void update(ClientProtos.RegionLoadStats regionStatistics) { public void update(ClientProtos.RegionLoadStats regionStatistics) {
@ -161,10 +166,13 @@ public class MetricsConnection {
final Counter delayRunners; final Counter delayRunners;
final Histogram delayIntevalHist; final Histogram delayIntevalHist;
public RunnerStats(MetricsRegistry registry) { public RunnerStats(MetricRegistry registry) {
this.normalRunners = registry.newCounter(MetricsConnection.class, "normalRunnersCount"); this.normalRunners = registry.counter(
this.delayRunners = registry.newCounter(MetricsConnection.class, "delayRunnersCount"); name(MetricsConnection.class, "normalRunnersCount"));
this.delayIntevalHist = registry.newHistogram(MetricsConnection.class, "delayIntervalHist"); this.delayRunners = registry.counter(
name(MetricsConnection.class, "delayRunnersCount"));
this.delayIntevalHist = registry.histogram(
name(MetricsConnection.class, "delayIntervalHist"));
} }
public void incrNormalRunners() { public void incrNormalRunners() {
@ -233,19 +241,19 @@ public class MetricsConnection {
*/ */
private static final int CONCURRENCY_LEVEL = 256; private static final int CONCURRENCY_LEVEL = 256;
private final MetricsRegistry registry; private final MetricRegistry registry;
private final JmxReporter reporter; private final JmxReporter reporter;
private final String scope; private final String scope;
private final NewMetric<Timer> timerFactory = new NewMetric<Timer>() { private final NewMetric<Timer> timerFactory = new NewMetric<Timer>() {
@Override public Timer newMetric(Class<?> clazz, String name, String scope) { @Override public Timer newMetric(Class<?> clazz, String name, String scope) {
return registry.newTimer(clazz, name, scope); return registry.timer(name(clazz, name, scope));
} }
}; };
private final NewMetric<Histogram> histogramFactory = new NewMetric<Histogram>() { private final NewMetric<Histogram> histogramFactory = new NewMetric<Histogram>() {
@Override public Histogram newMetric(Class<?> clazz, String name, String scope) { @Override public Histogram newMetric(Class<?> clazz, String name, String scope) {
return registry.newHistogram(clazz, name, scope); return registry.histogram(name(clazz, name, scope));
} }
}; };
@ -275,30 +283,26 @@ public class MetricsConnection {
public MetricsConnection(final ConnectionImplementation conn) { public MetricsConnection(final ConnectionImplementation conn) {
this.scope = conn.toString(); this.scope = conn.toString();
this.registry = new MetricsRegistry(); this.registry = new MetricRegistry();
final ThreadPoolExecutor batchPool = (ThreadPoolExecutor) conn.getCurrentBatchPool(); final ThreadPoolExecutor batchPool = (ThreadPoolExecutor) conn.getCurrentBatchPool();
final ThreadPoolExecutor metaPool = (ThreadPoolExecutor) conn.getCurrentMetaLookupPool(); final ThreadPoolExecutor metaPool = (ThreadPoolExecutor) conn.getCurrentMetaLookupPool();
this.registry.newGauge(this.getClass(), "executorPoolActiveThreads", scope, this.registry.register(name(this.getClass(), "executorPoolActiveThreads", scope),
new RatioGauge() { new RatioGauge() {
@Override protected double getNumerator() { @Override
return batchPool.getActiveCount(); protected Ratio getRatio() {
} return Ratio.of(batchPool.getActiveCount(), batchPool.getMaximumPoolSize());
@Override protected double getDenominator() {
return batchPool.getMaximumPoolSize();
} }
}); });
this.registry.newGauge(this.getClass(), "metaPoolActiveThreads", scope, this.registry.register(name(this.getClass(), "metaPoolActiveThreads", scope),
new RatioGauge() { new RatioGauge() {
@Override protected double getNumerator() { @Override
return metaPool.getActiveCount(); protected Ratio getRatio() {
} return Ratio.of(metaPool.getActiveCount(), metaPool.getMaximumPoolSize());
@Override protected double getDenominator() {
return metaPool.getMaximumPoolSize();
} }
}); });
this.metaCacheHits = registry.newCounter(this.getClass(), "metaCacheHits", scope); this.metaCacheHits = registry.counter(name(this.getClass(), "metaCacheHits", scope));
this.metaCacheMisses = registry.newCounter(this.getClass(), "metaCacheMisses", scope); this.metaCacheMisses = registry.counter(name(this.getClass(), "metaCacheMisses", scope));
this.getTracker = new CallTracker(this.registry, "Get", scope); this.getTracker = new CallTracker(this.registry, "Get", scope);
this.scanTracker = new CallTracker(this.registry, "Scan", scope); this.scanTracker = new CallTracker(this.registry, "Scan", scope);
this.appendTracker = new CallTracker(this.registry, "Mutate", "Append", scope); this.appendTracker = new CallTracker(this.registry, "Mutate", "Append", scope);
@ -308,13 +312,12 @@ public class MetricsConnection {
this.multiTracker = new CallTracker(this.registry, "Multi", scope); this.multiTracker = new CallTracker(this.registry, "Multi", scope);
this.runnerStats = new RunnerStats(this.registry); this.runnerStats = new RunnerStats(this.registry);
this.reporter = new JmxReporter(this.registry); this.reporter = JmxReporter.forRegistry(this.registry).build();
this.reporter.start(); this.reporter.start();
} }
public void shutdown() { public void shutdown() {
this.reporter.shutdown(); this.reporter.stop();
this.registry.shutdown();
} }
/** Produce an instance of {@link CallStats} for clients to attach to RPCs. */ /** Produce an instance of {@link CallStats} for clients to attach to RPCs. */

View File

@ -112,9 +112,9 @@ public class TestMetricsConnection {
METRICS.getTracker, METRICS.scanTracker, METRICS.multiTracker, METRICS.appendTracker, METRICS.getTracker, METRICS.scanTracker, METRICS.multiTracker, METRICS.appendTracker,
METRICS.deleteTracker, METRICS.incrementTracker, METRICS.putTracker METRICS.deleteTracker, METRICS.incrementTracker, METRICS.putTracker
}) { }) {
Assert.assertEquals("Failed to invoke callTimer on " + t, loop, t.callTimer.count()); Assert.assertEquals("Failed to invoke callTimer on " + t, loop, t.callTimer.getCount());
Assert.assertEquals("Failed to invoke reqHist on " + t, loop, t.reqHist.count()); Assert.assertEquals("Failed to invoke reqHist on " + t, loop, t.reqHist.getCount());
Assert.assertEquals("Failed to invoke respHist on " + t, loop, t.respHist.count()); Assert.assertEquals("Failed to invoke respHist on " + t, loop, t.respHist.getCount());
} }
} }
} }

View File

@ -182,7 +182,7 @@ limitations under the License.
<version>${hadoop-two.version}</version> <version>${hadoop-two.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.yammer.metrics</groupId> <groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -26,9 +26,9 @@ import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import com.yammer.metrics.stats.ExponentiallyDecayingSample; import com.codahale.metrics.ExponentiallyDecayingReservoir;
import com.yammer.metrics.stats.Sample; import com.codahale.metrics.Reservoir;
import com.yammer.metrics.stats.Snapshot; import com.codahale.metrics.Snapshot;
/** /**
* A histogram implementation that runs in constant space, and exports to hadoop2's metrics2 system. * A histogram implementation that runs in constant space, and exports to hadoop2's metrics2 system.
@ -43,7 +43,7 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
protected final String name; protected final String name;
protected final String desc; protected final String desc;
private final Sample sample; private final Reservoir reservoir;
private final AtomicLong min; private final AtomicLong min;
private final AtomicLong max; private final AtomicLong max;
private final AtomicLong sum; private final AtomicLong sum;
@ -56,7 +56,7 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
public MutableHistogram(String name, String description) { public MutableHistogram(String name, String description) {
this.name = StringUtils.capitalize(name); this.name = StringUtils.capitalize(name);
this.desc = StringUtils.uncapitalize(description); this.desc = StringUtils.uncapitalize(description);
sample = new ExponentiallyDecayingSample(DEFAULT_SAMPLE_SIZE, DEFAULT_ALPHA); reservoir = new ExponentiallyDecayingReservoir(DEFAULT_SAMPLE_SIZE, DEFAULT_ALPHA);
count = new AtomicLong(); count = new AtomicLong();
min = new AtomicLong(Long.MAX_VALUE); min = new AtomicLong(Long.MAX_VALUE);
max = new AtomicLong(Long.MIN_VALUE); max = new AtomicLong(Long.MIN_VALUE);
@ -66,7 +66,7 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
public void add(final long val) { public void add(final long val) {
setChanged(); setChanged();
count.incrementAndGet(); count.incrementAndGet();
sample.update(val); reservoir.update(val);
setMax(val); setMax(val);
setMin(val); setMin(val);
sum.getAndAdd(val); sum.getAndAdd(val);
@ -119,9 +119,9 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
updateSnapshotMetrics(metricsRecordBuilder); updateSnapshotMetrics(metricsRecordBuilder);
} }
} }
public void updateSnapshotMetrics(MetricsRecordBuilder metricsRecordBuilder) { public void updateSnapshotMetrics(MetricsRecordBuilder metricsRecordBuilder) {
final Snapshot s = sample.getSnapshot(); final Snapshot s = reservoir.getSnapshot();
metricsRecordBuilder.addCounter(Interns.info(name + NUM_OPS_METRIC_NAME, desc), count.get()); metricsRecordBuilder.addCounter(Interns.info(name + NUM_OPS_METRIC_NAME, desc), count.get());
metricsRecordBuilder.addGauge(Interns.info(name + MIN_METRIC_NAME, desc), getMin()); metricsRecordBuilder.addGauge(Interns.info(name + MIN_METRIC_NAME, desc), getMin());

View File

@ -234,7 +234,7 @@
<version>${jersey.version}</version> <version>${jersey.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.yammer.metrics</groupId> <groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import com.google.common.base.Objects; import com.google.common.base.Objects;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.Histogram;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -73,12 +73,12 @@ public class IntegrationTestRegionReplicaPerf extends IntegrationTestBase {
private static final String NUM_RS_KEY = "numRs"; private static final String NUM_RS_KEY = "numRs";
private static final String NUM_RS_DEFAULT = "" + 3; private static final String NUM_RS_DEFAULT = "" + 3;
/** Extract a descriptive statistic from a {@link com.yammer.metrics.core.Histogram}. */ /** Extract a descriptive statistic from a {@link com.codahale.metrics.Histogram}. */
private enum Stat { private enum Stat {
STDEV { STDEV {
@Override @Override
double apply(Histogram hist) { double apply(Histogram hist) {
return hist.stdDev(); return hist.getSnapshot().getStdDev();
} }
}, },
FOUR_9S { FOUR_9S {

View File

@ -219,7 +219,7 @@ under the License.
</supplement> </supplement>
<supplement> <supplement>
<project> <project>
<groupId>com.yammer.metrics</groupId> <groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
<licenses> <licenses>

View File

@ -435,7 +435,7 @@
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.yammer.metrics</groupId> <groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -37,7 +37,7 @@ org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator; org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator;
org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.Bucket; org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator.Bucket;
org.apache.hadoop.util.StringUtils; org.apache.hadoop.util.StringUtils;
com.yammer.metrics.stats.Snapshot; com.codahale.metrics.Snapshot;
</%import> </%import>
<%java> <%java>
BlockCache bc = cacheConfig == null ? null : cacheConfig.getBlockCache(); BlockCache bc = cacheConfig == null ? null : cacheConfig.getBlockCache();

View File

@ -32,7 +32,7 @@ org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo;
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad; org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
org.apache.hadoop.hbase.util.DirectMemoryUtils; org.apache.hadoop.hbase.util.DirectMemoryUtils;
org.apache.hadoop.util.StringUtils; org.apache.hadoop.util.StringUtils;
com.yammer.metrics.stats.Snapshot; com.codahale.metrics.Snapshot;
java.lang.management.ManagementFactory; java.lang.management.ManagementFactory;
</%import> </%import>
<div class="tabbable"> <div class="tabbable">

View File

@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io.hfile;
import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.Histogram;
import com.yammer.metrics.stats.Snapshot; import com.codahale.metrics.Snapshot;
/** /**
* Snapshot of block cache age in cache. * Snapshot of block cache age in cache.
@ -28,11 +28,9 @@ import com.yammer.metrics.stats.Snapshot;
*/ */
@JsonIgnoreProperties({"ageHistogram", "snapshot"}) @JsonIgnoreProperties({"ageHistogram", "snapshot"})
public class AgeSnapshot { public class AgeSnapshot {
private final Histogram ageHistogram;
private final Snapshot snapshot; private final Snapshot snapshot;
AgeSnapshot(final Histogram ageHistogram) { AgeSnapshot(final Histogram ageHistogram) {
this.ageHistogram = ageHistogram;
this.snapshot = ageHistogram.getSnapshot(); this.snapshot = ageHistogram.getSnapshot();
} }
@ -57,18 +55,18 @@ public class AgeSnapshot {
} }
public double getMean() { public double getMean() {
return this.ageHistogram.mean(); return this.snapshot.getMean();
} }
public double getMax() { public double getMax() {
return ageHistogram.max(); return snapshot.getMax();
} }
public double getMin() { public double getMin() {
return ageHistogram.min(); return snapshot.getMin();
} }
public double getStdDev() { public double getStdDev() {
return ageHistogram.stdDev(); return snapshot.getStdDev();
} }
} }

View File

@ -31,9 +31,11 @@ import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig; import org.codehaus.jackson.map.SerializationConfig;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.Histogram;
import com.yammer.metrics.core.MetricsRegistry; import com.codahale.metrics.MetricRegistry;
import com.yammer.metrics.stats.Snapshot; import com.codahale.metrics.Snapshot;
import static com.codahale.metrics.MetricRegistry.name;
/** /**
* Utilty for aggregating counts in CachedBlocks and toString/toJSON CachedBlocks and BlockCaches. * Utilty for aggregating counts in CachedBlocks and toString/toJSON CachedBlocks and BlockCaches.
@ -44,7 +46,7 @@ public class BlockCacheUtil {
/** /**
* Needed making histograms. * Needed making histograms.
*/ */
private static final MetricsRegistry METRICS = new MetricsRegistry(); private static final MetricRegistry METRICS = new MetricRegistry();
/** /**
* Needed generating JSON. * Needed generating JSON.
@ -189,7 +191,7 @@ public class BlockCacheUtil {
private final long now = System.nanoTime(); private final long now = System.nanoTime();
private final int max; private final int max;
public static final int DEFAULT_MAX = 100000; public static final int DEFAULT_MAX = 100000;
CachedBlocksByFile() { CachedBlocksByFile() {
this(null); this(null);
} }
@ -204,7 +206,7 @@ public class BlockCacheUtil {
*/ */
private NavigableMap<String, NavigableSet<CachedBlock>> cachedBlockByFile = private NavigableMap<String, NavigableSet<CachedBlock>> cachedBlockByFile =
new ConcurrentSkipListMap<String, NavigableSet<CachedBlock>>(); new ConcurrentSkipListMap<String, NavigableSet<CachedBlock>>();
Histogram age = METRICS.newHistogram(CachedBlocksByFile.class, "age"); Histogram age = METRICS.histogram(name(CachedBlocksByFile.class, "age"));
/** /**
* @param cb * @param cb
@ -274,11 +276,11 @@ public class BlockCacheUtil {
@Override @Override
public String toString() { public String toString() {
Snapshot snapshot = this.age.getSnapshot(); Snapshot snapshot = age.getSnapshot();
return "count=" + count + ", dataBlockCount=" + this.dataBlockCount + ", size=" + size + return "count=" + count + ", dataBlockCount=" + dataBlockCount + ", size=" + size +
", dataSize=" + getDataSize() + ", dataSize=" + getDataSize() +
", mean age=" + this.age.mean() + ", stddev age=" + this.age.stdDev() + ", mean age=" + snapshot.getMean() + ", stddev age=" + snapshot.getStdDev() +
", min age=" + this.age.min() + ", max age=" + this.age.max() + ", min age=" + snapshot.getMin() + ", max age=" + snapshot.getMax() +
", 95th percentile age=" + snapshot.get95thPercentile() + ", 95th percentile age=" + snapshot.get95thPercentile() +
", 99th percentile age=" + snapshot.get99thPercentile(); ", 99th percentile age=" + snapshot.get99thPercentile();
} }

View File

@ -22,8 +22,10 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.Histogram;
import com.yammer.metrics.core.MetricsRegistry; import com.codahale.metrics.MetricRegistry;
import static com.codahale.metrics.MetricRegistry.name;
/** /**
* Class that implements cache metrics. * Class that implements cache metrics.
@ -33,7 +35,7 @@ public class CacheStats {
/** /**
* Needed making histograms. * Needed making histograms.
*/ */
private static final MetricsRegistry METRICS = new MetricsRegistry(); private static final MetricRegistry METRICS = new MetricRegistry();
/** Sliding window statistics. The number of metric periods to include in /** Sliding window statistics. The number of metric periods to include in
* sliding window hit ratio calculations. * sliding window hit ratio calculations.
@ -113,7 +115,7 @@ public class CacheStats {
this.hitCachingCounts = initializeZeros(numPeriodsInWindow); this.hitCachingCounts = initializeZeros(numPeriodsInWindow);
this.requestCounts = initializeZeros(numPeriodsInWindow); this.requestCounts = initializeZeros(numPeriodsInWindow);
this.requestCachingCounts = initializeZeros(numPeriodsInWindow); this.requestCachingCounts = initializeZeros(numPeriodsInWindow);
this.ageAtEviction = METRICS.newHistogram(CacheStats.class, name + ".ageAtEviction"); this.ageAtEviction = METRICS.histogram(name(CacheStats.class, name + ".ageAtEviction"));
} }
@Override @Override

View File

@ -23,6 +23,7 @@ import java.io.ByteArrayOutputStream;
import java.io.DataInput; import java.io.DataInput;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.text.DateFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
@ -32,6 +33,8 @@ import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.SortedMap; import java.util.SortedMap;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
@ -41,6 +44,7 @@ import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -73,12 +77,18 @@ import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.Histogram;
import com.yammer.metrics.core.Metric; import com.codahale.metrics.Counter;
import com.yammer.metrics.core.MetricName; import com.codahale.metrics.Gauge;
import com.yammer.metrics.core.MetricPredicate; import com.codahale.metrics.Meter;
import com.yammer.metrics.core.MetricsRegistry; import com.codahale.metrics.MetricFilter;
import com.yammer.metrics.reporting.ConsoleReporter; import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.ScheduledReporter;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import static com.codahale.metrics.MetricRegistry.name;
/** /**
* Implements pretty-printing functionality for {@link HFile}s. * Implements pretty-printing functionality for {@link HFile}s.
@ -544,13 +554,17 @@ public class HFilePrettyPrinter extends Configured implements Tool {
} }
private static class KeyValueStatsCollector { private static class KeyValueStatsCollector {
private final MetricsRegistry metricsRegistry = new MetricsRegistry(); private final MetricRegistry metricsRegistry = new MetricRegistry();
private final ByteArrayOutputStream metricsOutput = new ByteArrayOutputStream(); private final ByteArrayOutputStream metricsOutput = new ByteArrayOutputStream();
private final SimpleReporter simpleReporter = new SimpleReporter(metricsRegistry, new PrintStream(metricsOutput)); private final SimpleReporter simpleReporter = SimpleReporter.forRegistry(metricsRegistry).
Histogram keyLen = metricsRegistry.newHistogram(HFilePrettyPrinter.class, "Key length"); outputTo(new PrintStream(metricsOutput)).filter(MetricFilter.ALL).build();
Histogram valLen = metricsRegistry.newHistogram(HFilePrettyPrinter.class, "Val length");
Histogram rowSizeBytes = metricsRegistry.newHistogram(HFilePrettyPrinter.class, "Row size (bytes)"); Histogram keyLen = metricsRegistry.histogram(name(HFilePrettyPrinter.class, "Key length"));
Histogram rowSizeCols = metricsRegistry.newHistogram(HFilePrettyPrinter.class, "Row size (columns)"); Histogram valLen = metricsRegistry.histogram(name(HFilePrettyPrinter.class, "Val length"));
Histogram rowSizeBytes = metricsRegistry.histogram(
name(HFilePrettyPrinter.class, "Row size (bytes)"));
Histogram rowSizeCols = metricsRegistry.histogram(
name(HFilePrettyPrinter.class, "Row size (columns)"));
long curRowBytes = 0; long curRowBytes = 0;
long curRowCols = 0; long curRowCols = 0;
@ -600,9 +614,8 @@ public class HFilePrettyPrinter extends Configured implements Tool {
return "no data available for statistics"; return "no data available for statistics";
// Dump the metrics to the output stream // Dump the metrics to the output stream
simpleReporter.shutdown(); simpleReporter.stop();
simpleReporter.run(); simpleReporter.report();
metricsRegistry.shutdown();
return return
metricsOutput.toString() + metricsOutput.toString() +
@ -610,35 +623,137 @@ public class HFilePrettyPrinter extends Configured implements Tool {
} }
} }
private static class SimpleReporter extends ConsoleReporter { /**
private final PrintStream out; * Almost identical to ConsoleReporter, but extending ScheduledReporter,
* as extending ConsoleReporter in this version of dropwizard is now too much trouble.
public SimpleReporter(MetricsRegistry metricsRegistry, PrintStream out) { */
super(metricsRegistry, out, MetricPredicate.ALL); private static class SimpleReporter extends ScheduledReporter {
this.out = out; /**
* Returns a new {@link Builder} for {@link ConsoleReporter}.
*
* @param registry the registry to report
* @return a {@link Builder} instance for a {@link ConsoleReporter}
*/
public static Builder forRegistry(MetricRegistry registry) {
return new Builder(registry);
} }
@Override /**
public void run() { * A builder for {@link SimpleReporter} instances. Defaults to using the default locale and
for (Map.Entry<String, SortedMap<MetricName, Metric>> entry : getMetricsRegistry().groupedMetrics( * time zone, writing to {@code System.out}, converting rates to events/second, converting
MetricPredicate.ALL).entrySet()) { * durations to milliseconds, and not filtering metrics.
try { */
for (Map.Entry<MetricName, Metric> subEntry : entry.getValue().entrySet()) { public static class Builder {
out.print(" " + subEntry.getKey().getName()); private final MetricRegistry registry;
out.println(':'); private PrintStream output;
private Locale locale;
private TimeZone timeZone;
private TimeUnit rateUnit;
private TimeUnit durationUnit;
private MetricFilter filter;
subEntry.getValue().processWith(this, subEntry.getKey(), out); private Builder(MetricRegistry registry) {
} this.registry = registry;
} catch (Exception e) { this.output = System.out;
e.printStackTrace(out); this.locale = Locale.getDefault();
} this.timeZone = TimeZone.getDefault();
this.rateUnit = TimeUnit.SECONDS;
this.durationUnit = TimeUnit.MILLISECONDS;
this.filter = MetricFilter.ALL;
}
/**
* Write to the given {@link PrintStream}.
*
* @param output a {@link PrintStream} instance.
* @return {@code this}
*/
public Builder outputTo(PrintStream output) {
this.output = output;
return this;
}
/**
* Only report metrics which match the given filter.
*
* @param filter a {@link MetricFilter}
* @return {@code this}
*/
public Builder filter(MetricFilter filter) {
this.filter = filter;
return this;
}
/**
* Builds a {@link ConsoleReporter} with the given properties.
*
* @return a {@link ConsoleReporter}
*/
public SimpleReporter build() {
return new SimpleReporter(registry,
output,
locale,
timeZone,
rateUnit,
durationUnit,
filter);
} }
} }
private final PrintStream output;
private final Locale locale;
private final DateFormat dateFormat;
private SimpleReporter(MetricRegistry registry,
PrintStream output,
Locale locale,
TimeZone timeZone,
TimeUnit rateUnit,
TimeUnit durationUnit,
MetricFilter filter) {
super(registry, "simple-reporter", filter, rateUnit, durationUnit);
this.output = output;
this.locale = locale;
this.dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT,
DateFormat.MEDIUM,
locale);
dateFormat.setTimeZone(timeZone);
}
@Override @Override
public void processHistogram(MetricName name, Histogram histogram, PrintStream stream) { public void report(SortedMap<String, Gauge> gauges,
super.processHistogram(name, histogram, stream); SortedMap<String, Counter> counters,
stream.printf(Locale.getDefault(), " count = %d%n", histogram.count()); SortedMap<String, Histogram> histograms,
SortedMap<String, Meter> meters,
SortedMap<String, Timer> timers) {
// we know we only have histograms
if (!histograms.isEmpty()) {
for (Map.Entry<String, Histogram> entry : histograms.entrySet()) {
output.print(" " + StringUtils.substringAfterLast(entry.getKey(), "."));
output.println(':');
printHistogram(entry.getValue());
}
output.println();
}
output.println();
output.flush();
}
private void printHistogram(Histogram histogram) {
Snapshot snapshot = histogram.getSnapshot();
output.printf(locale, " min = %d%n", snapshot.getMin());
output.printf(locale, " max = %d%n", snapshot.getMax());
output.printf(locale, " mean = %2.2f%n", snapshot.getMean());
output.printf(locale, " stddev = %2.2f%n", snapshot.getStdDev());
output.printf(locale, " median = %2.2f%n", snapshot.getMedian());
output.printf(locale, " 75%% <= %2.2f%n", snapshot.get75thPercentile());
output.printf(locale, " 95%% <= %2.2f%n", snapshot.get95thPercentile());
output.printf(locale, " 98%% <= %2.2f%n", snapshot.get98thPercentile());
output.printf(locale, " 99%% <= %2.2f%n", snapshot.get99thPercentile());
output.printf(locale, " 99.9%% <= %2.2f%n", snapshot.get999thPercentile());
output.printf(locale, " count = %d%n", histogram.getCount());
} }
} }

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.mapreduce; package org.apache.hadoop.hbase.mapreduce;
import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.InvalidProtocolBufferException;
import com.yammer.metrics.core.MetricsRegistry; import com.codahale.metrics.MetricRegistry;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -339,7 +339,7 @@ public class TableMapReduceUtil {
if (addDependencyJars) { if (addDependencyJars) {
addDependencyJars(job); addDependencyJars(job);
addDependencyJars(job.getConfiguration(), MetricsRegistry.class); addDependencyJars(job.getConfiguration(), MetricRegistry.class);
} }
resetCacheConfig(job.getConfiguration()); resetCacheConfig(job.getConfiguration());
@ -785,7 +785,7 @@ public class TableMapReduceUtil {
com.google.protobuf.Message.class, com.google.protobuf.Message.class,
com.google.common.collect.Lists.class, com.google.common.collect.Lists.class,
org.apache.htrace.Trace.class, org.apache.htrace.Trace.class,
com.yammer.metrics.core.MetricsRegistry.class); com.codahale.metrics.MetricRegistry.class);
} }
/** /**

View File

@ -18,9 +18,9 @@
*/ */
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.Histogram;
import com.yammer.metrics.stats.Sample; import com.codahale.metrics.Reservoir;
import com.yammer.metrics.stats.Snapshot; import com.codahale.metrics.Snapshot;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.text.DecimalFormat; import java.text.DecimalFormat;
@ -37,13 +37,13 @@ public final class YammerHistogramUtils {
private static DecimalFormat DOUBLE_FORMAT = new DecimalFormat("#0.00"); private static DecimalFormat DOUBLE_FORMAT = new DecimalFormat("#0.00");
/** /**
* Create a new {@link com.yammer.metrics.core.Histogram} instance. These constructors are * Create a new {@link com.codahale.metrics.Histogram} instance. These constructors are
* not public in 2.2.0, so we use reflection to find them. * not public in 2.2.0, so we use reflection to find them.
*/ */
public static Histogram newHistogram(Sample sample) { public static Histogram newHistogram(Reservoir sample) {
try { try {
Constructor<?> ctor = Constructor<?> ctor =
Histogram.class.getDeclaredConstructor(Sample.class); Histogram.class.getDeclaredConstructor(Reservoir.class);
ctor.setAccessible(true); ctor.setAccessible(true);
return (Histogram) ctor.newInstance(sample); return (Histogram) ctor.newInstance(sample);
} catch (Exception e) { } catch (Exception e) {
@ -54,10 +54,10 @@ public final class YammerHistogramUtils {
/** @return an abbreviated summary of {@code hist}. */ /** @return an abbreviated summary of {@code hist}. */
public static String getShortHistogramReport(final Histogram hist) { public static String getShortHistogramReport(final Histogram hist) {
Snapshot sn = hist.getSnapshot(); Snapshot sn = hist.getSnapshot();
return "mean=" + DOUBLE_FORMAT.format(hist.mean()) + return "mean=" + DOUBLE_FORMAT.format(sn.getMean()) +
", min=" + DOUBLE_FORMAT.format(hist.min()) + ", min=" + DOUBLE_FORMAT.format(sn.getMin()) +
", max=" + DOUBLE_FORMAT.format(hist.max()) + ", max=" + DOUBLE_FORMAT.format(sn.getMax()) +
", stdDev=" + DOUBLE_FORMAT.format(hist.stdDev()) + ", stdDev=" + DOUBLE_FORMAT.format(sn.getStdDev()) +
", 95th=" + DOUBLE_FORMAT.format(sn.get95thPercentile()) + ", 95th=" + DOUBLE_FORMAT.format(sn.get95thPercentile()) +
", 99th=" + DOUBLE_FORMAT.format(sn.get99thPercentile()); ", 99th=" + DOUBLE_FORMAT.format(sn.get99thPercentile());
} }
@ -65,10 +65,10 @@ public final class YammerHistogramUtils {
/** @return a summary of {@code hist}. */ /** @return a summary of {@code hist}. */
public static String getHistogramReport(final Histogram hist) { public static String getHistogramReport(final Histogram hist) {
Snapshot sn = hist.getSnapshot(); Snapshot sn = hist.getSnapshot();
return ", mean=" + DOUBLE_FORMAT.format(hist.mean()) + return ", mean=" + DOUBLE_FORMAT.format(sn.getMean()) +
", min=" + DOUBLE_FORMAT.format(hist.min()) + ", min=" + DOUBLE_FORMAT.format(sn.getMin()) +
", max=" + DOUBLE_FORMAT.format(hist.max()) + ", max=" + DOUBLE_FORMAT.format(sn.getMax()) +
", stdDev=" + DOUBLE_FORMAT.format(hist.stdDev()) + ", stdDev=" + DOUBLE_FORMAT.format(sn.getStdDev()) +
", 50th=" + DOUBLE_FORMAT.format(sn.getMedian()) + ", 50th=" + DOUBLE_FORMAT.format(sn.getMedian()) +
", 75th=" + DOUBLE_FORMAT.format(sn.get75thPercentile()) + ", 75th=" + DOUBLE_FORMAT.format(sn.get75thPercentile()) +
", 95th=" + DOUBLE_FORMAT.format(sn.get95thPercentile()) + ", 95th=" + DOUBLE_FORMAT.format(sn.get95thPercentile()) +

View File

@ -93,9 +93,9 @@ import org.apache.htrace.impl.ProbabilitySampler;
import com.google.common.base.Objects; import com.google.common.base.Objects;
import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.Histogram;
import com.yammer.metrics.stats.Snapshot; import com.codahale.metrics.Snapshot;
import com.yammer.metrics.stats.UniformSample; import com.codahale.metrics.UniformReservoir;
/** /**
* Script used evaluating HBase performance and scalability. Runs a HBase * Script used evaluating HBase performance and scalability. Runs a HBase
@ -1054,8 +1054,8 @@ public class PerformanceEvaluation extends Configured implements Tool {
this.connection = ConnectionFactory.createConnection(conf); this.connection = ConnectionFactory.createConnection(conf);
} }
onStartup(); onStartup();
latency = YammerHistogramUtils.newHistogram(new UniformSample(1024 * 500)); latency = YammerHistogramUtils.newHistogram(new UniformReservoir(1024 * 500));
valueSize = YammerHistogramUtils.newHistogram(new UniformSample(1024 * 500)); valueSize = YammerHistogramUtils.newHistogram(new UniformReservoir(1024 * 500));
} }
abstract void onStartup() throws IOException; abstract void onStartup() throws IOException;
@ -1121,21 +1121,21 @@ public class PerformanceEvaluation extends Configured implements Tool {
*/ */
private void reportLatency() throws IOException { private void reportLatency() throws IOException {
status.setStatus(testName + " latency log (microseconds), on " + status.setStatus(testName + " latency log (microseconds), on " +
latency.count() + " measures"); latency.getCount() + " measures");
reportHistogram(this.latency); reportHistogram(this.latency);
} }
private void reportValueSize() throws IOException { private void reportValueSize() throws IOException {
status.setStatus(testName + " valueSize after " + status.setStatus(testName + " valueSize after " +
valueSize.count() + " measures"); valueSize.getCount() + " measures");
reportHistogram(this.valueSize); reportHistogram(this.valueSize);
} }
private void reportHistogram(final Histogram h) throws IOException { private void reportHistogram(final Histogram h) throws IOException {
Snapshot sn = h.getSnapshot(); Snapshot sn = h.getSnapshot();
status.setStatus(testName + " Min = " + h.min()); status.setStatus(testName + " Min = " + sn.getMin());
status.setStatus(testName + " Avg = " + h.mean()); status.setStatus(testName + " Avg = " + sn.getMean());
status.setStatus(testName + " StdDev = " + h.stdDev()); status.setStatus(testName + " StdDev = " + sn.getStdDev());
status.setStatus(testName + " 50th = " + sn.getMedian()); status.setStatus(testName + " 50th = " + sn.getMedian());
status.setStatus(testName + " 75th = " + sn.get75thPercentile()); status.setStatus(testName + " 75th = " + sn.get75thPercentile());
status.setStatus(testName + " 95th = " + sn.get95thPercentile()); status.setStatus(testName + " 95th = " + sn.get95thPercentile());
@ -1143,7 +1143,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
status.setStatus(testName + " 99.9th = " + sn.get999thPercentile()); status.setStatus(testName + " 99.9th = " + sn.get999thPercentile());
status.setStatus(testName + " 99.99th = " + sn.getValue(0.9999)); status.setStatus(testName + " 99.99th = " + sn.getValue(0.9999));
status.setStatus(testName + " 99.999th = " + sn.getValue(0.99999)); status.setStatus(testName + " 99.999th = " + sn.getValue(0.99999));
status.setStatus(testName + " Max = " + h.max()); status.setStatus(testName + " Max = " + sn.getMax());
} }
/** /**

View File

@ -40,9 +40,9 @@ import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.Histogram;
import com.yammer.metrics.stats.Snapshot; import com.codahale.metrics.Snapshot;
import com.yammer.metrics.stats.UniformSample; import com.codahale.metrics.UniformReservoir;
@Category({MiscTests.class, SmallTests.class}) @Category({MiscTests.class, SmallTests.class})
public class TestPerformanceEvaluation { public class TestPerformanceEvaluation {
@ -125,16 +125,16 @@ public class TestPerformanceEvaluation {
opts.setValueSize(valueSize); opts.setValueSize(valueSize);
RandomReadTest rrt = new RandomReadTest(null, opts, null); RandomReadTest rrt = new RandomReadTest(null, opts, null);
Constructor<?> ctor = Constructor<?> ctor =
Histogram.class.getDeclaredConstructor(com.yammer.metrics.stats.Sample.class); Histogram.class.getDeclaredConstructor(com.codahale.metrics.Reservoir.class);
ctor.setAccessible(true); ctor.setAccessible(true);
Histogram histogram = (Histogram)ctor.newInstance(new UniformSample(1024 * 500)); Histogram histogram = (Histogram)ctor.newInstance(new UniformReservoir(1024 * 500));
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
histogram.update(rrt.getValueLength(null)); histogram.update(rrt.getValueLength(null));
} }
double stddev = histogram.stdDev();
assertTrue(stddev != 0 && stddev != 1.0);
assertTrue(histogram.stdDev() != 0);
Snapshot snapshot = histogram.getSnapshot(); Snapshot snapshot = histogram.getSnapshot();
double stddev = snapshot.getStdDev();
assertTrue(stddev != 0 && stddev != 1.0);
assertTrue(snapshot.getStdDev() != 0);
double median = snapshot.getMedian(); double median = snapshot.getMedian();
assertTrue(median != 0 && median != 1 && median != valueSize); assertTrue(median != 0 && median != 1 && median != valueSize);
} }

View File

@ -150,16 +150,17 @@ public class TestClientPushback {
MetricsConnection.RegionStats rsStats = conn.getConnectionMetrics(). MetricsConnection.RegionStats rsStats = conn.getConnectionMetrics().
serverStats.get(server).get(regionName); serverStats.get(server).get(regionName);
assertEquals(name, rsStats.name); assertEquals(name, rsStats.name);
assertEquals(rsStats.heapOccupancyHist.mean(), assertEquals(rsStats.heapOccupancyHist.getSnapshot().getMean(),
(double)regionStats.getHeapOccupancyPercent(), 0.1 ); (double)regionStats.getHeapOccupancyPercent(), 0.1 );
assertEquals(rsStats.memstoreLoadHist.mean(), assertEquals(rsStats.memstoreLoadHist.getSnapshot().getMean(),
(double)regionStats.getMemstoreLoadPercent(), 0.1); (double)regionStats.getMemstoreLoadPercent(), 0.1);
MetricsConnection.RunnerStats runnerStats = conn.getConnectionMetrics().runnerStats; MetricsConnection.RunnerStats runnerStats = conn.getConnectionMetrics().runnerStats;
assertEquals(runnerStats.delayRunners.count(), 1); assertEquals(runnerStats.delayRunners.getCount(), 1);
assertEquals(runnerStats.normalRunners.count(), 1); assertEquals(runnerStats.normalRunners.getCount(), 1);
assertEquals("", runnerStats.delayIntevalHist.mean(), (double)backoffTime, 0.1); assertEquals("", runnerStats.delayIntevalHist.getSnapshot().getMean(),
(double)backoffTime, 0.1);
latch.await(backoffTime * 2, TimeUnit.MILLISECONDS); latch.await(backoffTime * 2, TimeUnit.MILLISECONDS);
assertNotEquals("AsyncProcess did not submit the work time", endTime.get(), 0); assertNotEquals("AsyncProcess did not submit the work time", endTime.get(), 0);

View File

@ -62,10 +62,11 @@ import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope; import org.apache.htrace.TraceScope;
import org.apache.htrace.impl.ProbabilitySampler; import org.apache.htrace.impl.ProbabilitySampler;
import com.yammer.metrics.core.Histogram; import com.codahale.metrics.ConsoleReporter;
import com.yammer.metrics.core.Meter; import com.codahale.metrics.Histogram;
import com.yammer.metrics.core.MetricsRegistry; import com.codahale.metrics.Meter;
import com.yammer.metrics.reporting.ConsoleReporter; import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.MetricFilter;
// imports for things that haven't moved from regionserver.wal yet. // imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader; import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
@ -73,6 +74,8 @@ import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import static com.codahale.metrics.MetricRegistry.name;
/** /**
* This class runs performance benchmarks for {@link WAL}. * This class runs performance benchmarks for {@link WAL}.
* See usage for this tool by running: * See usage for this tool by running:
@ -81,20 +84,18 @@ import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
@InterfaceAudience.Private @InterfaceAudience.Private
public final class WALPerformanceEvaluation extends Configured implements Tool { public final class WALPerformanceEvaluation extends Configured implements Tool {
private static final Log LOG = LogFactory.getLog(WALPerformanceEvaluation.class.getName()); private static final Log LOG = LogFactory.getLog(WALPerformanceEvaluation.class.getName());
private final MetricsRegistry metrics = new MetricsRegistry(); private final MetricRegistry metrics = new MetricRegistry();
private final Meter syncMeter = private final Meter syncMeter =
metrics.newMeter(WALPerformanceEvaluation.class, "syncMeter", "syncs", TimeUnit.MILLISECONDS); metrics.meter(name(WALPerformanceEvaluation.class, "syncMeter", "syncs"));
private final Histogram syncHistogram =
metrics.newHistogram(WALPerformanceEvaluation.class, "syncHistogram", "nanos-between-syncs", private final Histogram syncHistogram = metrics.histogram(
true); name(WALPerformanceEvaluation.class, "syncHistogram", "nanos-between-syncs"));
private final Histogram syncCountHistogram = private final Histogram syncCountHistogram = metrics.histogram(
metrics.newHistogram(WALPerformanceEvaluation.class, "syncCountHistogram", "countPerSync", name(WALPerformanceEvaluation.class, "syncCountHistogram", "countPerSync"));
true); private final Meter appendMeter = metrics.meter(
private final Meter appendMeter = name(WALPerformanceEvaluation.class, "appendMeter", "bytes"));
metrics.newMeter(WALPerformanceEvaluation.class, "appendMeter", "bytes",
TimeUnit.MILLISECONDS);
private final Histogram latencyHistogram = private final Histogram latencyHistogram =
metrics.newHistogram(WALPerformanceEvaluation.class, "latencyHistogram", "nanos", true); metrics.histogram(name(WALPerformanceEvaluation.class, "latencyHistogram", "nanos"));
private final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); private final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
@ -333,7 +334,10 @@ public final class WALPerformanceEvaluation extends Configured implements Tool {
benchmarks[i] = Trace.wrap(new WALPutBenchmark(regions[i], htd, numIterations, noSync, benchmarks[i] = Trace.wrap(new WALPutBenchmark(regions[i], htd, numIterations, noSync,
syncInterval, traceFreq)); syncInterval, traceFreq));
} }
ConsoleReporter.enable(this.metrics, 30, TimeUnit.SECONDS); ConsoleReporter reporter = ConsoleReporter.forRegistry(metrics).
outputTo(System.out).convertRatesTo(TimeUnit.SECONDS).filter(MetricFilter.ALL).build();
reporter.start(30, TimeUnit.SECONDS);
long putTime = runBenchmark(benchmarks, numThreads); long putTime = runBenchmark(benchmarks, numThreads);
logBenchmarkResult("Summary: threads=" + numThreads + ", iterations=" + numIterations + logBenchmarkResult("Summary: threads=" + numThreads + ", iterations=" + numIterations +
", syncInterval=" + syncInterval, numIterations * numThreads, putTime); ", syncInterval=" + syncInterval, numIterations * numThreads, putTime);

View File

@ -154,8 +154,8 @@
<shadedPattern>org.apache.hadoop.hbase.shaded.com.lmax</shadedPattern> <shadedPattern>org.apache.hadoop.hbase.shaded.com.lmax</shadedPattern>
</relocation> </relocation>
<relocation> <relocation>
<pattern>com.yammer</pattern> <pattern>com.dropwizard</pattern>
<shadedPattern>org.apache.hadoop.hbase.shaded.com.yammer</shadedPattern> <shadedPattern>org.apache.hadoop.hbase.shaded.com.dropwizard</shadedPattern>
</relocation> </relocation>
<!-- top level io --> <!-- top level io -->

View File

@ -235,7 +235,7 @@
</dependency> </dependency>
<!-- General dependencies --> <!-- General dependencies -->
<dependency> <dependency>
<groupId>com.yammer.metrics</groupId> <groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -1181,7 +1181,7 @@
<!-- Do not use versions earlier than 3.2.2 due to a security vulnerability --> <!-- Do not use versions earlier than 3.2.2 due to a security vulnerability -->
<collections.version>3.2.2</collections.version> <collections.version>3.2.2</collections.version>
<httpclient.version>3.1</httpclient.version> <httpclient.version>3.1</httpclient.version>
<metrics-core.version>2.2.0</metrics-core.version> <metrics-core.version>3.1.2</metrics-core.version>
<guava.version>12.0.1</guava.version> <guava.version>12.0.1</guava.version>
<jsr305.version>1.3.9</jsr305.version> <jsr305.version>1.3.9</jsr305.version>
<jackson.version>1.9.13</jackson.version> <jackson.version>1.9.13</jackson.version>
@ -1429,14 +1429,14 @@
<version>${log4j.version}</version> <version>${log4j.version}</version>
</dependency> </dependency>
<!--This is not used by hbase directly. Used by thrift, <!--This is not used by hbase directly. Used by thrift,
yammer and zk.--> dropwizard and zk.-->
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version> <version>${slf4j.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.yammer.metrics</groupId> <groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId> <artifactId>metrics-core</artifactId>
<version>${metrics-core.version}</version> <version>${metrics-core.version}</version>
</dependency> </dependency>