HBASE-15742 Reduce allocation of objects in metrics (Phil Yang)

This commit is contained in:
tedyu 2016-05-03 09:13:38 -07:00
parent a7b31f74db
commit 8f1deac1f8
14 changed files with 225 additions and 21 deletions

View File

@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableFastCounter;
@InterfaceAudience.Private

View File

@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns;
/**
* Hadoop2 implementation of MetricsMasterSource.

View File

@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableFastCounter;
/**

View File

@ -23,9 +23,9 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns;
@InterfaceAudience.Private
public class MetricsStochasticBalancerSourceImpl extends MetricsBalancerSourceImpl implements

View File

@ -0,0 +1,104 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsTag;
/**
* Helpers to create interned metrics info
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class Interns {
private static LoadingCache<String, ConcurrentHashMap<String, MetricsInfo>> infoCache =
CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.DAYS)
.build(new CacheLoader<String, ConcurrentHashMap<String, MetricsInfo>>() {
public ConcurrentHashMap<String, MetricsInfo> load(String key) {
return new ConcurrentHashMap<String, MetricsInfo>();
}
});
private static LoadingCache<MetricsInfo, ConcurrentHashMap<String, MetricsTag>> tagCache =
CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.DAYS)
.build(new CacheLoader<MetricsInfo, ConcurrentHashMap<String, MetricsTag>>() {
public ConcurrentHashMap<String, MetricsTag> load(MetricsInfo key) {
return new ConcurrentHashMap<String, MetricsTag>();
}
});
private Interns(){}
/**
* Get a metric info object
*
* @return an interned metric info object
*/
public static MetricsInfo info(String name, String description) {
Map<String, MetricsInfo> map = infoCache.getUnchecked(name);
MetricsInfo info = map.get(description);
if (info == null) {
info = new MetricsInfoImpl(name, description);
map.put(description, info);
}
return info;
}
/**
* Get a metrics tag
*
* @param info of the tag
* @param value of the tag
* @return an interned metrics tag
*/
public static MetricsTag tag(MetricsInfo info, String value) {
Map<String, MetricsTag> map = tagCache.getUnchecked(info);
MetricsTag tag = map.get(value);
if (tag == null) {
tag = new MetricsTag(info, value);
map.put(value, tag);
}
return tag;
}
/**
* Get a metrics tag
*
* @param name of the tag
* @param description of the tag
* @param value of the tag
* @return an interned metrics tag
*/
public static MetricsTag tag(String name, String description, String value) {
return tag(info(name, description), value);
}
}

View File

@ -0,0 +1,65 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsInfo;
/**
* Making implementing metric info a little easier
*/
@InterfaceAudience.Private
class MetricsInfoImpl implements MetricsInfo {
private final String name, description;
MetricsInfoImpl(String name, String description) {
this.name = Preconditions.checkNotNull(name, "name");
this.description = Preconditions.checkNotNull(description, "description");
}
@Override public String name() {
return name;
}
@Override public String description() {
return description;
}
@Override public boolean equals(Object obj) {
if (obj instanceof MetricsInfo) {
MetricsInfo other = (MetricsInfo) obj;
return Objects.equal(name, other.name()) &&
Objects.equal(description, other.description());
}
return false;
}
@Override public int hashCode() {
return Objects.hashCode(name, description);
}
@Override public String toString() {
return Objects.toStringHelper(this)
.add("name", name).add("description", description)
.toString();
}
}

View File

@ -27,10 +27,10 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.impl.JmxCacheBuster;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
@InterfaceAudience.Private

View File

@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableFastCounter;
/**

View File

@ -23,10 +23,10 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableFastCounter;
@InterfaceAudience.Private

View File

@ -24,9 +24,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns;
@InterfaceAudience.Private
public class MetricsTableAggregateSourceImpl extends BaseSourceImpl

View File

@ -24,9 +24,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
import org.apache.hadoop.metrics2.lib.Interns;
@InterfaceAudience.Private
public class MetricsTableSourceImpl implements MetricsTableSource {

View File

@ -24,6 +24,7 @@ import java.util.concurrent.ConcurrentMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.metrics2.lib;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.hbase.util.Counter;
import org.apache.hadoop.hbase.util.FastLongHistogram;
import org.apache.hadoop.metrics2.MetricHistogram;
@ -41,6 +42,20 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
protected final Counter counter = new Counter(0);
private boolean metricsInfoStringInited = false;
private String NUM_OPS_METRIC;
private String MIN_METRIC;
private String MAX_METRIC;
private String MEAN_METRIC;
private String MEDIAN_METRIC;
private String TWENTY_FIFTH_PERCENTILE_METRIC;
private String SEVENTY_FIFTH_PERCENTILE_METRIC;
private String NINETIETH_PERCENTILE_METRIC;
private String NINETY_FIFTH_PERCENTILE_METRIC;
private String NINETY_EIGHTH_PERCENTILE_METRIC;
private String NINETY_NINETH_PERCENTILE_METRIC;
private String NINETY_NINE_POINT_NINETH_PERCENTILE_METRIC;
public MutableHistogram(MetricsInfo info) {
this(info.name(), info.description());
}
@ -72,29 +87,47 @@ public class MutableHistogram extends MutableMetric implements MetricHistogram {
protected void updateSnapshotMetrics(MetricsRecordBuilder metricsRecordBuilder,
FastLongHistogram histo) {
metricsRecordBuilder.addCounter(Interns.info(name + NUM_OPS_METRIC_NAME, desc), counter.get());
metricsRecordBuilder.addGauge(Interns.info(name + MIN_METRIC_NAME, desc), histo.getMin());
metricsRecordBuilder.addGauge(Interns.info(name + MAX_METRIC_NAME, desc), histo.getMax());
metricsRecordBuilder.addGauge(Interns.info(name + MEAN_METRIC_NAME, desc), histo.getMean());
if (!metricsInfoStringInited) {
NUM_OPS_METRIC = name + NUM_OPS_METRIC_NAME;
MIN_METRIC = name + MIN_METRIC_NAME;
MAX_METRIC = name + MAX_METRIC_NAME;
MEAN_METRIC = name + MEAN_METRIC_NAME;
MEDIAN_METRIC = name + MEDIAN_METRIC_NAME;
TWENTY_FIFTH_PERCENTILE_METRIC = name + TWENTY_FIFTH_PERCENTILE_METRIC_NAME;
SEVENTY_FIFTH_PERCENTILE_METRIC = name + SEVENTY_FIFTH_PERCENTILE_METRIC_NAME;
NINETIETH_PERCENTILE_METRIC = name + NINETIETH_PERCENTILE_METRIC_NAME;
NINETY_FIFTH_PERCENTILE_METRIC = name + NINETY_FIFTH_PERCENTILE_METRIC_NAME;
NINETY_EIGHTH_PERCENTILE_METRIC = name + NINETY_EIGHTH_PERCENTILE_METRIC_NAME;
NINETY_NINETH_PERCENTILE_METRIC = name + NINETY_NINETH_PERCENTILE_METRIC_NAME;
NINETY_NINE_POINT_NINETH_PERCENTILE_METRIC = name +
NINETY_NINE_POINT_NINETH_PERCENTILE_METRIC_NAME;
metricsInfoStringInited = true;
}
metricsRecordBuilder.addCounter(Interns.info(NUM_OPS_METRIC, desc), counter.get());
metricsRecordBuilder.addGauge(Interns.info(MIN_METRIC, desc), histo.getMin());
metricsRecordBuilder.addGauge(Interns.info(MAX_METRIC, desc), histo.getMax());
metricsRecordBuilder.addGauge(Interns.info(MEAN_METRIC, desc), histo.getMean());
long[] percentiles = histo.getQuantiles();
metricsRecordBuilder.addGauge(Interns.info(name + TWENTY_FIFTH_PERCENTILE_METRIC_NAME, desc),
metricsRecordBuilder.addGauge(Interns.info(TWENTY_FIFTH_PERCENTILE_METRIC, desc),
percentiles[0]);
metricsRecordBuilder.addGauge(Interns.info(name + MEDIAN_METRIC_NAME, desc),
metricsRecordBuilder.addGauge(Interns.info(MEDIAN_METRIC, desc),
percentiles[1]);
metricsRecordBuilder.addGauge(Interns.info(name + SEVENTY_FIFTH_PERCENTILE_METRIC_NAME, desc),
metricsRecordBuilder.addGauge(Interns.info(SEVENTY_FIFTH_PERCENTILE_METRIC, desc),
percentiles[2]);
metricsRecordBuilder.addGauge(Interns.info(name + NINETIETH_PERCENTILE_METRIC_NAME, desc),
metricsRecordBuilder.addGauge(Interns.info(NINETIETH_PERCENTILE_METRIC, desc),
percentiles[3]);
metricsRecordBuilder.addGauge(Interns.info(name + NINETY_FIFTH_PERCENTILE_METRIC_NAME, desc),
metricsRecordBuilder.addGauge(Interns.info(NINETY_FIFTH_PERCENTILE_METRIC, desc),
percentiles[4]);
metricsRecordBuilder.addGauge(Interns.info(name + NINETY_EIGHTH_PERCENTILE_METRIC_NAME, desc),
metricsRecordBuilder.addGauge(Interns.info(NINETY_EIGHTH_PERCENTILE_METRIC, desc),
percentiles[5]);
metricsRecordBuilder.addGauge(Interns.info(name + NINETY_NINETH_PERCENTILE_METRIC_NAME, desc),
metricsRecordBuilder.addGauge(Interns.info(NINETY_NINETH_PERCENTILE_METRIC, desc),
percentiles[6]);
metricsRecordBuilder.addGauge(
Interns.info(name + NINETY_NINE_POINT_NINETH_PERCENTILE_METRIC_NAME, desc),
Interns.info(NINETY_NINE_POINT_NINETH_PERCENTILE_METRIC, desc),
percentiles[7]);
}
}

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.hbase.util.FastLongHistogram;
import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsInfo;