HBASE-12067 Remove deprecated metrics classes.

This commit is contained in:
Elliott Clark 2014-09-23 11:11:54 -07:00
parent 564b3d8bc2
commit d94f24b901
8 changed files with 0 additions and 1041 deletions

View File

@ -30,7 +30,6 @@ org.apache.hadoop.hbase.HBaseConfiguration;
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo;
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
org.apache.hadoop.hbase.metrics.histogram.MetricsHistogram;
org.apache.hadoop.hbase.util.DirectMemoryUtils;
org.apache.hadoop.util.StringUtils;
com.yammer.metrics.stats.Snapshot;

View File

@ -1,155 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.hadoop.hbase.util.Counter;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.util.MetricsBase;
import org.apache.hadoop.metrics.util.MetricsRegistry;
import com.google.common.collect.Lists;
@Deprecated
public class ExactCounterMetric extends MetricsBase {
private static final int DEFAULT_TOP_N = 5;
// only publish stats on the topN items (default to DEFAULT_TOP_N)
private final int topN;
private final ConcurrentMap<String, Counter> counts = new ConcurrentHashMap<String, Counter>();
// all access to the 'counts' map should use this lock.
// take a write lock iff you want to guarantee exclusive access
// (the map stripes locks internally, so it's already thread safe -
// this lock is just so you can take a consistent snapshot of data)
private final ReadWriteLock lock;
/**
* Constructor to create a new counter metric
* @param nam the name to publish this metric under
* @param registry where the metrics object will be registered
* @param description metrics description
* @param topN how many 'keys' to publish metrics on
*/
public ExactCounterMetric(final String nam, final MetricsRegistry registry,
final String description, int topN) {
super(nam, description);
this.lock = new ReentrantReadWriteLock();
this.topN = topN;
if (registry != null) {
registry.add(nam, this);
}
}
/**
* Constructor creates a new ExactCounterMetric
* @param nam the name of the metrics to be used to publish the metric
* @param registry where the metrics object will be registered
*/
public ExactCounterMetric(final String nam, MetricsRegistry registry) {
this(nam, registry, NO_DESCRIPTION, DEFAULT_TOP_N);
}
/**
* Relies on an external lock on {@link #lock} for thread safety.
*/
private Counter getOrCreateCounter(String type){
Counter cnt = counts.get(type);
if (cnt == null){
cnt = new Counter();
counts.put(type, cnt);
}
return cnt;
}
public void update(String type) {
this.lock.readLock().lock();
try {
getOrCreateCounter(type).increment();
} finally {
this.lock.readLock().unlock();
}
}
public void update(String type, long count) {
this.lock.readLock().lock();
try {
getOrCreateCounter(type).add(count);
} finally {
this.lock.readLock().unlock();
}
}
public List<Pair<String, Long>> getTop(int n) {
final List<Pair<String, Long>> countsSnapshot =
Lists.newArrayListWithCapacity(this.counts.size());
// no updates are allowed while I'm holding this lock, so move fast
this.lock.writeLock().lock();
try {
for(Entry<String, Counter> entry : this.counts.entrySet()) {
countsSnapshot.add(Pair.newPair(entry.getKey(),
entry.getValue().get()));
}
} finally {
this.lock.writeLock().unlock();
}
Collections.sort(countsSnapshot, new Comparator<Pair<String, Long>>() {
@Override
public int compare(Pair<String, Long> a, Pair<String, Long> b) {
return b.getSecond().compareTo(a.getSecond());
}
});
return countsSnapshot.subList(0, Math.min(n, countsSnapshot.size()));
}
@Override
public void pushMetric(MetricsRecord mr) {
final List<Pair<String, Long>> topKeys = getTop(Integer.MAX_VALUE);
int sum = 0;
int counter = 0;
for (Pair<String, Long> keyCount : topKeys) {
counter++;
// only push stats on the topN keys
if (counter <= this.topN) {
mr.setMetric(getName() + "_" + keyCount.getFirst(),
keyCount.getSecond());
}
sum += keyCount.getSecond();
}
mr.setMetric(getName() + "_map_size", this.counts.size());
mr.setMetric(getName() + "_total_count", sum);
}
}

View File

@ -1,243 +0,0 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.management.AttributeNotFoundException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.ReflectionException;
import com.yammer.metrics.stats.Snapshot;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.histogram.MetricsHistogram;
import org.apache.hadoop.metrics.util.MetricsBase;
import org.apache.hadoop.metrics.util.MetricsDynamicMBeanBase;
import org.apache.hadoop.metrics.util.MetricsRegistry;
/**
* Extends the Hadoop MetricsDynamicMBeanBase class to provide JMX support for
* custom HBase MetricsBase implementations. MetricsDynamicMBeanBase ignores
* registered MetricsBase instance that are not instances of one of the
* org.apache.hadoop.metrics.util implementations.
*
*/
@Deprecated
@InterfaceAudience.Private
public class MetricsMBeanBase extends MetricsDynamicMBeanBase {
private static final Log LOG = LogFactory.getLog("org.apache.hadoop.hbase.metrics");
protected final MetricsRegistry registry;
protected final String description;
protected int registryLength;
/** HBase MetricsBase implementations that MetricsDynamicMBeanBase does
* not understand
*/
protected Map<String, MetricsBase> extendedAttributes =
new ConcurrentHashMap<String, MetricsBase>();
protected MBeanInfo extendedInfo;
protected MetricsMBeanBase( MetricsRegistry mr, String description ) {
super(copyMinusHBaseMetrics(mr), description);
this.registry = mr;
this.description = description;
this.init();
}
/*
* @param mr MetricsRegistry.
* @return A copy of the passed MetricsRegistry minus the hbase metrics
*/
private static MetricsRegistry copyMinusHBaseMetrics(final MetricsRegistry mr) {
MetricsRegistry copy = new MetricsRegistry();
for (MetricsBase metric : mr.getMetricsList()) {
if (metric instanceof MetricsRate || metric instanceof MetricsString ||
metric instanceof MetricsHistogram || metric instanceof ExactCounterMetric) {
continue;
}
copy.add(metric.getName(), metric);
}
return copy;
}
protected void init() {
List<MBeanAttributeInfo> attributes = new ArrayList<MBeanAttributeInfo>();
MBeanInfo parentInfo = super.getMBeanInfo();
List<String> parentAttributes = new ArrayList<String>();
for (MBeanAttributeInfo attr : parentInfo.getAttributes()) {
attributes.add(attr);
parentAttributes.add(attr.getName());
}
this.registryLength = this.registry.getMetricsList().size();
for (MetricsBase metric : this.registry.getMetricsList()) {
if (metric.getName() == null || parentAttributes.contains(metric.getName()))
continue;
// add on custom HBase metric types
if (metric instanceof MetricsRate) {
attributes.add( new MBeanAttributeInfo(metric.getName(),
"java.lang.Float", metric.getDescription(), true, false, false) );
extendedAttributes.put(metric.getName(), metric);
} else if (metric instanceof MetricsString) {
attributes.add( new MBeanAttributeInfo(metric.getName(),
"java.lang.String", metric.getDescription(), true, false, false) );
extendedAttributes.put(metric.getName(), metric);
LOG.info("MetricsString added: " + metric.getName());
} else if (metric instanceof MetricsHistogram) {
String metricName = metric.getName() + MetricsHistogram.NUM_OPS_METRIC_NAME;
attributes.add(new MBeanAttributeInfo(metricName,
"java.lang.Long", metric.getDescription(), true, false, false));
extendedAttributes.put(metricName, metric);
metricName = metric.getName() + MetricsHistogram.MIN_METRIC_NAME;
attributes.add(new MBeanAttributeInfo(metricName,
"java.lang.Long", metric.getDescription(), true, false, false));
extendedAttributes.put(metricName, metric);
metricName = metric.getName() + MetricsHistogram.MAX_METRIC_NAME;
attributes.add(new MBeanAttributeInfo(metricName,
"java.lang.Long", metric.getDescription(), true, false, false));
extendedAttributes.put(metricName, metric);
metricName = metric.getName() + MetricsHistogram.MEAN_METRIC_NAME;
attributes.add(new MBeanAttributeInfo(metricName,
"java.lang.Float", metric.getDescription(), true, false, false));
extendedAttributes.put(metricName, metric);
metricName = metric.getName() + MetricsHistogram.STD_DEV_METRIC_NAME;
attributes.add(new MBeanAttributeInfo(metricName,
"java.lang.Float", metric.getDescription(), true, false, false));
extendedAttributes.put(metricName, metric);
metricName = metric.getName() + MetricsHistogram.MEDIAN_METRIC_NAME;
attributes.add(new MBeanAttributeInfo(metricName,
"java.lang.Float", metric.getDescription(), true, false, false));
extendedAttributes.put(metricName, metric);
metricName = metric.getName() + MetricsHistogram.SEVENTY_FIFTH_PERCENTILE_METRIC_NAME;
attributes.add(new MBeanAttributeInfo(metricName,
"java.lang.Float", metric.getDescription(), true, false, false));
extendedAttributes.put(metricName, metric);
metricName = metric.getName() + MetricsHistogram.NINETY_FIFTH_PERCENTILE_METRIC_NAME;
attributes.add(new MBeanAttributeInfo(metricName,
"java.lang.Float", metric.getDescription(), true, false, false));
extendedAttributes.put(metricName, metric);
metricName = metric.getName() + MetricsHistogram.NINETY_NINETH_PERCENTILE_METRIC_NAME;
attributes.add(new MBeanAttributeInfo(metricName,
"java.lang.Float", metric.getDescription(), true, false, false));
extendedAttributes.put(metricName, metric);
}
// else, its probably a hadoop metric already registered. Skip it.
}
LOG.info("new MBeanInfo");
this.extendedInfo = new MBeanInfo( this.getClass().getName(),
this.description, attributes.toArray(new MBeanAttributeInfo[attributes.size()]),
parentInfo.getConstructors(), parentInfo.getOperations(),
parentInfo.getNotifications() );
}
private void checkAndUpdateAttributes() {
if (this.registryLength != this.registry.getMetricsList().size())
this.init();
}
@Override
public Object getAttribute( String name )
throws AttributeNotFoundException, MBeanException,
ReflectionException {
if (name == null) {
throw new IllegalArgumentException("Attribute name is NULL");
}
/*
* Ugly. Since MetricsDynamicMBeanBase implementation is private,
* we need to first check the parent class for the attribute.
* In case that the MetricsRegistry contents have changed, this will
* allow the parent to update it's internal structures (which we rely on
* to update our own.
*/
try {
return super.getAttribute(name);
} catch (AttributeNotFoundException ex) {
checkAndUpdateAttributes();
MetricsBase metric = this.extendedAttributes.get(name);
if (metric != null) {
if (metric instanceof MetricsRate) {
return ((MetricsRate) metric).getPreviousIntervalValue();
} else if (metric instanceof MetricsString) {
return ((MetricsString)metric).getValue();
} else if (metric instanceof MetricsHistogram) {
MetricsHistogram hist = (MetricsHistogram) metric;
if (name.endsWith(MetricsHistogram.NUM_OPS_METRIC_NAME)) {
return hist.getCount();
} else if (name.endsWith(MetricsHistogram.MIN_METRIC_NAME)) {
return hist.getMin();
} else if (name.endsWith(MetricsHistogram.MAX_METRIC_NAME)) {
return hist.getMax();
} else if (name.endsWith(MetricsHistogram.MEAN_METRIC_NAME)) {
return (float) hist.getMean();
} else if (name.endsWith(MetricsHistogram.STD_DEV_METRIC_NAME)) {
return (float) hist.getStdDev();
} else if (name.endsWith(MetricsHistogram.MEDIAN_METRIC_NAME)) {
Snapshot s = hist.getSnapshot();
return (float) s.getMedian();
} else if (name.endsWith(MetricsHistogram.SEVENTY_FIFTH_PERCENTILE_METRIC_NAME)) {
Snapshot s = hist.getSnapshot();
return (float) s.get75thPercentile();
} else if (name.endsWith(MetricsHistogram.NINETY_FIFTH_PERCENTILE_METRIC_NAME)) {
Snapshot s = hist.getSnapshot();
return (float) s.get95thPercentile();
} else if (name.endsWith(MetricsHistogram.NINETY_NINETH_PERCENTILE_METRIC_NAME)) {
Snapshot s = hist.getSnapshot();
return (float) s.get99thPercentile();
}
} else {
LOG.warn( String.format("unknown metrics type %s for attribute %s",
metric.getClass().getName(), name) );
}
}
}
throw new AttributeNotFoundException();
}
@Override
public MBeanInfo getMBeanInfo() {
return this.extendedInfo;
}
}

View File

@ -1,89 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.util.MetricsBase;
import org.apache.hadoop.metrics.util.MetricsRegistry;
import org.apache.hadoop.util.StringUtils;
/**
* Publishes a rate based on a counter - you increment the counter each
* time an event occurs (eg: an RPC call) and this publishes a rate.
*/
@Deprecated
@InterfaceAudience.Private
public class MetricsRate extends MetricsBase {
private static final Log LOG = LogFactory.getLog("org.apache.hadoop.hbase.metrics");
private int value;
private float prevRate;
private long ts;
public MetricsRate(final String name, final MetricsRegistry registry,
final String description) {
super(name, description);
this.value = 0;
this.prevRate = 0;
this.ts = System.currentTimeMillis();
registry.add(name, this);
}
public MetricsRate(final String name, final MetricsRegistry registry) {
this(name, registry, NO_DESCRIPTION);
}
public synchronized void inc(final int incr) {
value += incr;
}
public synchronized void inc() {
value++;
}
public synchronized void intervalHeartBeat() {
long now = System.currentTimeMillis();
long diff = (now-ts) / 1000;
if (diff < 1){
// To make sure our averages aren't skewed by fast repeated calls,
// we simply ignore fast repeated calls.
return;
}
this.prevRate = (float)value / diff;
this.value = 0;
this.ts = now;
}
@Override
public synchronized void pushMetric(final MetricsRecord mr) {
intervalHeartBeat();
try {
mr.setMetric(getName(), getPreviousIntervalValue());
} catch (Exception e) {
LOG.info("pushMetric failed for " + getName() + "\n" +
StringUtils.stringifyException(e));
}
}
public synchronized float getPreviousIntervalValue() {
return this.prevRate;
}
}

View File

@ -1,59 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.util.MetricsBase;
import org.apache.hadoop.metrics.util.MetricsRegistry;
/**
* Publishes a string to the metrics collector
*/
@Deprecated
@InterfaceAudience.Private
public class MetricsString extends MetricsBase {
private static final Log LOG = LogFactory.getLog("org.apache.hadoop.hbase.metrics");
private String value;
public MetricsString(final String name, final MetricsRegistry registry,
final String value) {
super(name, NO_DESCRIPTION);
this.value = value;
registry.add(name, this);
}
public MetricsString(final String name, final String description,
final MetricsRegistry registry, final String value) {
super(name, description);
this.value = value;
registry.add(name, this);
}
public String getValue() {
return this.value;
}
@Override
public synchronized void pushMetric(final MetricsRecord mr) {
// NOOP
// MetricsMBeanBase.getAttribute is where we actually fill the data
}
}

View File

@ -1,141 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.util.MetricsRegistry;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
import org.apache.hadoop.util.StringUtils;
/**
* This class extends MetricsTimeVaryingRate to let the metrics
* persist past a pushMetric() call
*/
@Deprecated
@InterfaceAudience.Private
public class PersistentMetricsTimeVaryingRate extends MetricsTimeVaryingRate {
protected static final Log LOG =
LogFactory.getLog("org.apache.hadoop.hbase.metrics");
protected boolean reset = false;
protected long lastOper = 0;
protected long totalOps = 0;
/**
* Constructor - create a new metric
* @param nam the name of the metrics to be used to publish the metric
* @param registry - where the metrics object will be registered
* @param description metrics description
*/
public PersistentMetricsTimeVaryingRate(final String nam,
final MetricsRegistry registry,
final String description) {
super(nam, registry, description);
}
/**
* Constructor - create a new metric
* @param nam the name of the metrics to be used to publish the metric
* @param registry - where the metrics object will be registered
*/
public PersistentMetricsTimeVaryingRate(final String nam,
MetricsRegistry registry) {
this(nam, registry, NO_DESCRIPTION);
}
/**
* Push updated metrics to the mr.
*
* Note this does NOT push to JMX
* (JMX gets the info via {@link #getPreviousIntervalAverageTime()} and
* {@link #getPreviousIntervalNumOps()}
*
* @param mr owner of this metric
*/
@Override
public synchronized void pushMetric(final MetricsRecord mr) {
// this will reset the currentInterval & num_ops += prevInterval()
super.pushMetric(mr);
// since we're retaining prevInterval(), we don't want to do the incr
// instead, we want to set that value because we have absolute ops
try {
mr.setMetric(getName() + "_num_ops", totalOps);
} catch (Exception e) {
LOG.info("pushMetric failed for " + getName() + "\n" +
StringUtils.stringifyException(e));
}
if (reset) {
// use the previous avg as our starting min/max/avg
super.inc(getPreviousIntervalAverageTime());
reset = false;
} else {
// maintain the stats that pushMetric() cleared
maintainStats();
}
}
/**
* Increment the metrics for numOps operations
* @param numOps - number of operations
* @param time - time for numOps operations
*/
@Override
public synchronized void inc(final int numOps, final long time) {
super.inc(numOps, time);
totalOps += numOps;
}
/**
* Increment the metrics for numOps operations
* @param time - time for numOps operations
*/
@Override
public synchronized void inc(final long time) {
super.inc(time);
++totalOps;
}
/**
* Rollover to a new interval
* NOTE: does not reset numOps. this is an absolute value
*/
public synchronized void resetMinMaxAvg() {
reset = true;
}
/* MetricsTimeVaryingRate will reset every time pushMetric() is called
* This is annoying for long-running stats that might not get a single
* operation in the polling period. This function ensures that values
* for those stat entries don't get reset.
*/
protected void maintainStats() {
int curOps = this.getPreviousIntervalNumOps();
if (curOps > 0) {
long curTime = this.getPreviousIntervalAverageTime();
long totalTime = curTime * curOps;
if (curTime == 0 || totalTime / curTime == curOps) {
super.inc(curOps, totalTime);
} else {
LOG.info("Stats for " + this.getName() + " overflowed! resetting");
}
}
}
}

View File

@ -1,240 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics.histogram;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.util.MetricsBase;
import org.apache.hadoop.metrics.util.MetricsRegistry;
import com.yammer.metrics.stats.Sample;
import com.yammer.metrics.stats.Snapshot;
import com.yammer.metrics.stats.UniformSample;
import com.yammer.metrics.stats.ExponentiallyDecayingSample;
@Deprecated
public class MetricsHistogram extends MetricsBase {
// 1028 items implies 99.9% CI w/ 5% margin of error
// (assuming a normal distribution on the underlying data)
private static final int DEFAULT_SAMPLE_SIZE = 1028;
// the bias towards sampling from more recent data.
// Per Cormode et al. an alpha of 0.015 strongly biases to the last 5 minutes
private static final double DEFAULT_ALPHA = 0.015;
public static final String NUM_OPS_METRIC_NAME = "_num_ops";
public static final String MIN_METRIC_NAME = "_min";
public static final String MAX_METRIC_NAME = "_max";
public static final String MEAN_METRIC_NAME = "_mean";
public static final String STD_DEV_METRIC_NAME = "_std_dev";
public static final String MEDIAN_METRIC_NAME = "_median";
public static final String SEVENTY_FIFTH_PERCENTILE_METRIC_NAME = "_75th_percentile";
public static final String NINETY_FIFTH_PERCENTILE_METRIC_NAME = "_95th_percentile";
public static final String NINETY_NINETH_PERCENTILE_METRIC_NAME = "_99th_percentile";
/**
* Constructor to create a new histogram metric
* @param nam the name to publish the metric under
* @param registry where the metrics object will be registered
* @param description the metric's description
* @param forwardBiased true if you want this histogram to give more
* weight to recent data,
* false if you want all data to have uniform weight
*/
public MetricsHistogram(final String nam, final MetricsRegistry registry,
final String description, boolean forwardBiased) {
super(nam, description);
this.min = new AtomicLong();
this.max = new AtomicLong();
this.sum = new AtomicLong();
this.sample = forwardBiased ?
new ExponentiallyDecayingSample(DEFAULT_SAMPLE_SIZE, DEFAULT_ALPHA)
: new UniformSample(DEFAULT_SAMPLE_SIZE);
this.variance = new AtomicReference<double[]>(new double[]{-1, 0});
this.count = new AtomicLong();
this.clear();
if (registry != null) {
registry.add(nam, this);
}
}
/**
* Constructor create a new (forward biased) histogram metric
* @param nam the name to publish the metric under
* @param registry where the metrics object will be registered
* @param description the metric's description
*/
public MetricsHistogram(final String nam, MetricsRegistry registry,
final String description) {
this(nam, registry, NO_DESCRIPTION, true);
}
/**
* Constructor - create a new (forward biased) histogram metric
* @param nam the name of the metrics to be used to publish the metric
* @param registry - where the metrics object will be registered
*/
public MetricsHistogram(final String nam, MetricsRegistry registry) {
this(nam, registry, NO_DESCRIPTION);
}
private final Sample sample;
private final AtomicLong min;
private final AtomicLong max;
private final AtomicLong sum;
// these are for computing a running-variance,
// without letting floating point errors accumulate via Welford's algorithm
private final AtomicReference<double[]> variance;
private final AtomicLong count;
/**
* Clears all recorded values.
*/
public void clear() {
this.sample.clear();
this.count.set(0);
this.max.set(Long.MIN_VALUE);
this.min.set(Long.MAX_VALUE);
this.sum.set(0);
variance.set(new double[]{-1, 0});
}
public void update(int val) {
update((long) val);
}
public void update(final long val) {
count.incrementAndGet();
sample.update(val);
setMax(val);
setMin(val);
sum.getAndAdd(val);
updateVariance(val);
}
private void setMax(final long potentialMax) {
boolean done = false;
while (!done) {
final long currentMax = max.get();
done = currentMax >= potentialMax
|| max.compareAndSet(currentMax, potentialMax);
}
}
private void setMin(long potentialMin) {
boolean done = false;
while (!done) {
final long currentMin = min.get();
done = currentMin <= potentialMin
|| min.compareAndSet(currentMin, potentialMin);
}
}
private void updateVariance(long value) {
boolean done = false;
while (!done) {
final double[] oldValues = variance.get();
final double[] newValues = new double[2];
if (oldValues[0] == -1) {
newValues[0] = value;
newValues[1] = 0;
} else {
final double oldM = oldValues[0];
final double oldS = oldValues[1];
final double newM = oldM + ((value - oldM) / getCount());
final double newS = oldS + ((value - oldM) * (value - newM));
newValues[0] = newM;
newValues[1] = newS;
}
done = variance.compareAndSet(oldValues, newValues);
}
}
public long getCount() {
return count.get();
}
public long getMax() {
if (getCount() > 0) {
return max.get();
}
return 0L;
}
public long getMin() {
if (getCount() > 0) {
return min.get();
}
return 0L;
}
public double getMean() {
if (getCount() > 0) {
return sum.get() / (double) getCount();
}
return 0.0;
}
public double getStdDev() {
if (getCount() > 0) {
return Math.sqrt(getVariance());
}
return 0.0;
}
public Snapshot getSnapshot() {
return sample.getSnapshot();
}
private double getVariance() {
if (getCount() <= 1) {
return 0.0;
}
return variance.get()[1] / (getCount() - 1);
}
@Override
public void pushMetric(MetricsRecord mr) {
final Snapshot s = this.getSnapshot();
mr.setMetric(getName() + NUM_OPS_METRIC_NAME, this.getCount());
mr.setMetric(getName() + MIN_METRIC_NAME, this.getMin());
mr.setMetric(getName() + MAX_METRIC_NAME, this.getMax());
mr.setMetric(getName() + MEAN_METRIC_NAME, (float) this.getMean());
mr.setMetric(getName() + STD_DEV_METRIC_NAME, (float) this.getStdDev());
mr.setMetric(getName() + MEDIAN_METRIC_NAME, (float) s.getMedian());
mr.setMetric(getName() + SEVENTY_FIFTH_PERCENTILE_METRIC_NAME,
(float) s.get75thPercentile());
mr.setMetric(getName() + NINETY_FIFTH_PERCENTILE_METRIC_NAME,
(float) s.get95thPercentile());
mr.setMetric(getName() + NINETY_NINETH_PERCENTILE_METRIC_NAME,
(float) s.get99thPercentile());
}
}

View File

@ -1,113 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import static org.mockito.Matchers.anyFloat;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import java.util.Random;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.metrics.histogram.MetricsHistogram;
import org.apache.hadoop.metrics.MetricsRecord;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.yammer.metrics.stats.Snapshot;
@SuppressWarnings("deprecation")
@Category({MiscTests.class, SmallTests.class})
public class TestMetricsHistogram {
@Test
public void testBasicUniform() {
MetricsHistogram h = new MetricsHistogram("testHistogram", null);
for (int i = 0; i < 100; i++) {
h.update(i);
}
Assert.assertEquals(100, h.getCount());
Assert.assertEquals(0, h.getMin());
Assert.assertEquals(99, h.getMax());
Assert.assertEquals(49.5d, h.getMean(), 0.01);
}
@Test
public void testSnapshotPercentiles() {
final MetricsHistogram h = new MetricsHistogram("testHistogram", null);
final long[] data = genRandomData(h);
final Snapshot s = h.getSnapshot();
assertPercentile(data, 50, s.getMedian());
assertPercentile(data, 75, s.get75thPercentile());
assertPercentile(data, 95, s.get95thPercentile());
assertPercentile(data, 98, s.get98thPercentile());
assertPercentile(data, 99, s.get99thPercentile());
assertPercentile(data, 99.9, s.get999thPercentile());
}
@Test
public void testPushMetric() {
final MetricsHistogram h = new MetricsHistogram("testHistogram", null);
genRandomData(h);
MetricsRecord mr = mock(MetricsRecord.class);
h.pushMetric(mr);
verify(mr).setMetric("testHistogram_num_ops", 10000L);
verify(mr).setMetric(eq("testHistogram_min"), anyLong());
verify(mr).setMetric(eq("testHistogram_max"), anyLong());
verify(mr).setMetric(eq("testHistogram_mean"), anyFloat());
verify(mr).setMetric(eq("testHistogram_std_dev"), anyFloat());
verify(mr).setMetric(eq("testHistogram_median"), anyFloat());
verify(mr).setMetric(eq("testHistogram_75th_percentile"), anyFloat());
verify(mr).setMetric(eq("testHistogram_95th_percentile"), anyFloat());
verify(mr).setMetric(eq("testHistogram_99th_percentile"), anyFloat());
}
private void assertPercentile(long[] data, double percentile, double value) {
int count = 0;
for (long v : data) {
if (v < value) {
count++;
}
}
Assert.assertEquals("Wrong " + percentile + " percentile",
(int)(percentile / 100), count / data.length);
}
private long[] genRandomData(final MetricsHistogram h) {
final Random r = new Random();
final long[] data = new long[10000];
for (int i = 0; i < data.length; i++) {
data[i] = (long) (r.nextGaussian() * 10000);
h.update(data[i]);
}
return data;
}
}