HBASE-6411 Move Master Metrics to metrics 2 (Alex Baranau)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1368598 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Zhihong Yu 2012-08-02 16:58:35 +00:00
parent 6f542792e3
commit efe471f74b
37 changed files with 1511 additions and 490 deletions

View File

@ -0,0 +1,11 @@
# syntax: [prefix].[source|sink].[instance].[options]
# See javadoc of package-info.java for org.apache.hadoop.metrics2 for details
*.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink
# default sampling period
*.period=10
# syntax: [prefix].[source|sink|jmx].[instance].[options]
# See package.html for org.apache.hadoop.metrics2 for details
*.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink

View File

@ -58,6 +58,11 @@
</build>
<dependencies>
<!-- General dependencies -->
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,83 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.master.metrics.MasterMetricsSource;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.ServiceLoader;
/**
* Factory for classes supplied by hadoop compatibility modules.
*/
public class CompatibilitySingletonFactory {
private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class);
public static final String EXCEPTION_START = "Could not create ";
public static final String EXCEPTION_END = " Is the hadoop compatibility jar on the classpath?";
private static final Map<Class, Object> instances = new HashMap<Class, Object>();
/**
* Get the singleton instance of Any classes defined by compatibiliy jar's
*
* @return the singleton
*/
public static synchronized <T> T getInstance(Class<T> klass) {
T instance = (T) instances.get(klass);
if (instance == null) {
try {
ServiceLoader<T> loader = ServiceLoader.load(klass);
Iterator<T> it = loader.iterator();
instance = it.next();
if (it.hasNext()) {
StringBuilder msg = new StringBuilder();
msg.append("ServiceLoader provided more than one implementation for class: ")
.append(klass)
.append(", using implementation: ").append(instance.getClass())
.append(", other implementations: {");
while (it.hasNext()) {
msg.append(it.next()).append(" ");
}
msg.append("}");
LOG.warn(msg);
}
} catch (Exception e) {
throw new RuntimeException(createExceptionString(klass), e);
} catch (Error e) {
throw new RuntimeException(createExceptionString(klass), e);
}
// If there was nothing returned and no exception then throw an exception.
if (instance == null) {
throw new RuntimeException(createExceptionString(klass));
}
instances.put(klass, instance);
}
return instance;
}
private static String createExceptionString(Class klass) {
return EXCEPTION_START + klass.toString() + EXCEPTION_END;
}
}

View File

@ -0,0 +1,67 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
import org.apache.hadoop.hbase.metrics.BaseMetricsSource;
/**
* Interface that classes that expose metrics about the master will implement.
*/
public interface MasterMetricsSource extends BaseMetricsSource {
/**
* The name of the metrics
*/
public static final String METRICS_NAME = "HMaster";
/**
* The name of the metrics context that metrics will be under.
*/
public static final String METRICS_CONTEXT = "HMaster,sub=Dynamic";
/**
* Description
*/
public static final String METRICS_DESCRIPTION = "Metrics about HBase master server";
/**
* Increment the number of requests the cluster has seen.
* @param inc Ammount to increment the total by.
*/
public void incRequests(final int inc);
/**
* Set the number of regions in transition.
* @param ritCount count of the regions in transition.
*/
public void setRIT(int ritCount);
/**
* Set the count of the number of regions that have been in transition over the threshold time.
* @param ritCountOverThreshold number of regions in transition for longer than threshold.
*/
public void setRITCountOverThreshold(int ritCountOverThreshold);
/**
* Set the oldest region in transition.
* @param age age of the oldest RIT.
*/
public void setRITOldestAge(long age);
}

View File

@ -0,0 +1,38 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import javax.management.ObjectName;
/**
* Object that will register an mbean with the underlying metrics implementation.
*/
public interface MBeanSource {
/**
* Register an mbean with the underlying metrics system
* @param serviceName Metrics service/system name
* @param metricsName name of the metrics object to expose
* @param theMbean the actual MBean
* @return ObjectName from jmx
*/
public ObjectName register(String serviceName, String metricsName,
Object theMbean);
}

View File

@ -25,6 +25,19 @@ import org.apache.hadoop.hbase.metrics.BaseMetricsSource;
* hadoop2's metrics2 classes and publishing.
*/
public interface ReplicationMetricsSource extends BaseMetricsSource {
//Empty interface so that ServiceLoader can find the right implementation.
/**
* The name of the metrics
*/
public static final String METRICS_NAME = "ReplicationMetrics";
/**
* The name of the metrics context that metrics will be under.
*/
public static final String METRICS_CONTEXT = "replicationmetrics";
/**
* A description.
*/
public static final String METRICS_DESCRIPTION = "Metrics about HBase replication";
}

View File

@ -1,55 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication.regionserver.metrics;
import java.util.ServiceLoader;
/**
* Class to load ReplicationMetricsSource from the class path. Will only return a singleton
* instance.
*/
public class ReplicationMetricsSourceFactory {
private static ReplicationMetricsSource rms = null;
public static final String EXCEPTION_STRING = "Could not create a Replication metrics source. " +
"Is the hadoop compatibility jar on the classpath?";
/**
* Get the singleton instance of ReplicationMetricsSource
*
* @return the singleton
*/
public static synchronized ReplicationMetricsSource getInstance() {
if (rms == null) {
try {
rms = ServiceLoader.load(ReplicationMetricsSource.class).iterator().next();
} catch (Exception e) {
throw new RuntimeException(EXCEPTION_STRING, e);
} catch (Error e) {
throw new RuntimeException(EXCEPTION_STRING, e);
}
// If there was nothing returned and no exception then throw an exception.
if (rms == null) {
throw new RuntimeException(EXCEPTION_STRING);
}
}
return rms;
}
}

View File

@ -0,0 +1,35 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.junit.Test;
/**
* Test for the CompatibilitySingletonFactory and building MasterMetricsSource
*/
public class MasterMetricsSourceFactoryTest {
@Test(expected=RuntimeException.class)
public void testGetInstanceNoHadoopCompat() throws Exception {
//This should throw an exception because there is no compat lib on the class path.
CompatibilitySingletonFactory.getInstance(MasterMetricsSource.class);
}
}

View File

@ -18,17 +18,17 @@
package org.apache.hadoop.hbase.replication.regionserver.metrics;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.junit.Test;
/**
* Test for the ReplicationMetricsSourceFactory
* Test for the CompatibilitySingletonFactory and building ReplicationMetricsSource
*/
public class ReplicationMetricsSourceFactoryTest {
@Test(expected=RuntimeException.class)
public void testGetInstanceNoHadoopCompat() throws Exception {
//This should throw an exception because there is no compat lib on the class path.
ReplicationMetricsSourceFactory.getInstance();
CompatibilitySingletonFactory.getInstance(ReplicationMetricsSource.class);
}
}

View File

@ -0,0 +1,67 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
import org.apache.hadoop.hbase.metrics.BaseMetricsSourceImpl;
import org.apache.hadoop.metrics2.lib.MetricMutableCounterLong;
import org.apache.hadoop.metrics2.lib.MetricMutableGaugeLong;
/**
* Hadoop1 implementation of MasterMetricsSource.
*/
public class MasterMetricsSourceImpl
extends BaseMetricsSourceImpl implements MasterMetricsSource {
MetricMutableCounterLong clusterRequestsCounter;
MetricMutableGaugeLong ritGauge;
MetricMutableGaugeLong ritCountOverThresholdGauge;
MetricMutableGaugeLong ritOldestAgeGauge;
public MasterMetricsSourceImpl() {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT);
}
public MasterMetricsSourceImpl(String metricsName,
String metricsDescription,
String metricsContext) {
super(metricsName, metricsDescription, metricsContext);
clusterRequestsCounter = getLongCounter("cluster_requests", 0);
ritGauge = getLongGauge("ritCount", 0);
ritCountOverThresholdGauge = getLongGauge("ritCountOverThreshold", 0);
ritOldestAgeGauge = getLongGauge("ritOldestAge", 0);
}
public void incRequests(final int inc) {
this.clusterRequestsCounter.incr(inc);
}
public void setRIT(int ritCount) {
ritGauge.set(ritCount);
}
public void setRITCountOverThreshold(int ritCount) {
ritCountOverThresholdGauge.set(ritCount);
}
public void setRITOldestAge(long ritCount) {
ritCountOverThresholdGauge.set(ritCount);
}
}

View File

@ -19,49 +19,73 @@
package org.apache.hadoop.hbase.metrics;
import org.apache.hadoop.metrics2.MetricsBuilder;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
import org.apache.hadoop.metrics2.lib.MetricMutable;
import org.apache.hadoop.metrics2.lib.MetricMutableCounterLong;
import org.apache.hadoop.metrics2.lib.MetricMutableGaugeLong;
import org.apache.hadoop.metrics2.source.JvmMetricsSource;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* Hadoop 1 implementation of BaseMetricsSource
* Hadoop 1 implementation of BaseMetricsSource (using metrics2 framework)
*/
public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
private static boolean defaultMetricsSystemInited = false;
public static final String HBASE_METRICS_SYSTEM_NAME = "hbase";
public ConcurrentMap<String, MetricMutableGaugeLong>
gauges = new ConcurrentHashMap<String, MetricMutableGaugeLong>();
public ConcurrentMap<String, MetricMutableCounterLong> counters =
new ConcurrentHashMap<String, MetricMutableCounterLong>();
final DynamicMetricsRegistry metricsRegistry;
protected String metricsContext;
protected String metricsName;
protected String metricsDescription;
private JvmMetricsSource jvmMetricsSource;
public BaseMetricsSourceImpl(
String metricsName,
String metricsDescription,
String metricsContext) {
this.metricsContext = metricsContext;
this.metricsName = metricsName;
this.metricsDescription = metricsDescription;
metricsRegistry = new DynamicMetricsRegistry(metricsName).setContext(metricsContext);
if (!defaultMetricsSystemInited) {
//Not too worried about mutli-threaded here as all it does is spam the logs.
defaultMetricsSystemInited = true;
DefaultMetricsSystem.initialize(HBASE_METRICS_SYSTEM_NAME);
//If this is the first time through register a jvm source.
jvmMetricsSource = JvmMetricsSource.create(metricsName, "");
}
//Register this instance.
DefaultMetricsSystem.registerSource(this.metricsContext, this.metricsDescription, this);
DefaultMetricsSystem.INSTANCE.registerSource(metricsContext, metricsDescription, this);
}
/**
* Get a MetricMutableGaugeLong from the storage. If it is not there atomically put it.
*
* @param gaugeName name of the gauge to create or get.
* @param potentialStartingValue value of the new gauge if we have to create it.
* @return a metric object
*/
protected MetricMutableGaugeLong getLongGauge(String gaugeName, long potentialStartingValue) {
return metricsRegistry.getLongGauge(gaugeName, potentialStartingValue);
}
/**
* Get a MetricMutableCounterLong from the storage. If it is not there atomically put it.
*
* @param counterName Name of the counter to get
* @param potentialStartingValue starting value if we have to create a new counter
* @return a metric object
*/
protected MetricMutableCounterLong getLongCounter(String counterName,
long potentialStartingValue) {
return metricsRegistry.getLongCounter(counterName, potentialStartingValue);
}
/**
@ -71,7 +95,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param value the new value of the gauge.
*/
public void setGauge(String gaugeName, long value) {
MetricMutableGaugeLong gaugeInt = getLongGauge(gaugeName, value);
MetricMutableGaugeLong gaugeInt = metricsRegistry.getLongGauge(gaugeName, value);
gaugeInt.set(value);
}
@ -82,7 +106,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param delta The amount to increment the gauge by.
*/
public void incGauge(String gaugeName, long delta) {
MetricMutableGaugeLong gaugeInt = getLongGauge(gaugeName, 0l);
MetricMutableGaugeLong gaugeInt = metricsRegistry.getLongGauge(gaugeName, 0l);
gaugeInt.incr(delta);
}
@ -93,7 +117,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param delta the ammount to subtract from a gauge value.
*/
public void decGauge(String gaugeName, long delta) {
MetricMutableGaugeLong gaugeInt = getLongGauge(gaugeName, 0l);
MetricMutableGaugeLong gaugeInt = metricsRegistry.getLongGauge(gaugeName, 0l);
gaugeInt.decr(delta);
}
@ -104,7 +128,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param delta the ammount to increment
*/
public void incCounters(String key, long delta) {
MetricMutableCounterLong counter = getLongCounter(key, 0l);
MetricMutableCounterLong counter = metricsRegistry.getLongCounter(key, 0l);
counter.incr(delta);
}
@ -115,7 +139,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param key
*/
public void removeGauge(String key) {
gauges.remove(key);
metricsRegistry.removeMetric(key);
}
/**
@ -124,7 +148,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param key
*/
public void removeCounter(String key) {
counters.remove(key);
metricsRegistry.removeMetric(key);
}
/**
@ -135,67 +159,6 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
*/
@Override
public void getMetrics(MetricsBuilder metricsBuilder, boolean all) {
MetricsRecordBuilder rb = metricsBuilder.addRecord(metricsName).setContext(metricsContext);
for (Map.Entry<String, MetricMutableCounterLong> entry : counters.entrySet()) {
entry.getValue().snapshot(rb, all);
}
for (Map.Entry<String, MetricMutableGaugeLong> entry : gauges.entrySet()) {
entry.getValue().snapshot(rb, all);
}
}
/**
* Get a MetricMutableGaugeLong from the storage. If it is not there atomically put it.
*
* @param gaugeName name of the gauge to create or get.
* @param potentialStartingValue value of the new counter if we have to create it.
* @return
*/
private MetricMutableGaugeLong getLongGauge(String gaugeName, long potentialStartingValue) {
//Try and get the guage.
MetricMutableGaugeLong gauge = gauges.get(gaugeName);
//If it's not there then try and put a new one in the storage.
if (gauge == null) {
//Create the potential new gauge.
MetricMutableGaugeLong newGauge = new MetricMutableGaugeLong(gaugeName, "",
potentialStartingValue);
// Try and put the gauge in. This is atomic.
gauge = gauges.putIfAbsent(gaugeName, newGauge);
//If the value we get back is null then the put was successful and we will return that.
//otherwise gaugeLong should contain the thing that was in before the put could be completed.
if (gauge == null) {
gauge = newGauge;
metricsRegistry.snapshot(metricsBuilder.addRecord(metricsRegistry.name()), all);
}
}
return gauge;
}
/**
* Get a MetricMutableCounterLong from the storage. If it is not there atomically put it.
*
* @param counterName Name of the counter to get
* @param potentialStartingValue starting value if we have to create a new counter
* @return
*/
private MetricMutableCounterLong getLongCounter(String counterName, long potentialStartingValue) {
//See getLongGauge for description on how this works.
MetricMutableCounterLong counter = counters.get(counterName);
if (counter == null) {
MetricMutableCounterLong newCounter =
new MetricMutableCounterLong(counterName, "", potentialStartingValue);
counter = counters.putIfAbsent(counterName, newCounter);
if (counter == null) {
counter = newCounter;
}
}
return counter;
}
}

View File

@ -0,0 +1,41 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import org.apache.hadoop.metrics2.util.MBeans;
import javax.management.ObjectName;
/**
* Hadoop1 metrics2 implementation of an object that registers MBeans.
*/
public class MBeanSourceImpl implements MBeanSource {
/**
* Register an mbean with the underlying metrics system
* @param serviceName Metrics service/system name
* @param metricsName name of the metrics obejct to expose
* @param theMbean the actual MBean
* @return ObjectName from jmx
*/
@Override
public ObjectName register(String serviceName, String metricsName, Object theMbean) {
return MBeans.register(serviceName, metricsName, theMbean);
}
}

View File

@ -28,9 +28,7 @@ import org.apache.hadoop.metrics2.MetricsSource;
public class ReplicationMetricsSourceImpl extends BaseMetricsSourceImpl implements
ReplicationMetricsSource {
public static final String METRICS_NAME = "ReplicationMetrics";
public static final String METRICS_CONTEXT = "replicationmetrics";
public static final String METRICS_DESCRIPTION = "Metrics about HBase replication";
public ReplicationMetricsSourceImpl() {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT);

View File

@ -0,0 +1,355 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsTag;
/**
* An optional metrics registry class for creating and maintaining a
* collection of MetricsMutables, making writing metrics source easier.
* NOTE: this is a copy of org.apache.hadoop.metrics2.lib.MetricsRegistry with added one
* feature: metrics can be removed. When HADOOP-8313 is fixed, usages of this class
* should be substituted with org.apache.hadoop.metrics2.lib.MetricsRegistry.
* This implementation also provides handy methods for creating metrics dynamically.
* Another difference is that metricsMap & tagsMap implementation is substituted with
* concurrent map, as we allow dynamic metrics additions/removals.
*/
public class DynamicMetricsRegistry {
/** key for the context tag */
public static final String CONTEXT_KEY = "context";
/** description for the context tag */
public static final String CONTEXT_DESC = "Metrics context";
private final ConcurrentMap<String, MetricMutable> metricsMap =
new ConcurrentHashMap<String, MetricMutable>();
private final ConcurrentMap<String, MetricsTag> tagsMap =
new ConcurrentHashMap<String, MetricsTag>();
private final String name;
private final MetricMutableFactory mf;
/**
* Construct the registry with a record name
* @param name of the record of the metrics
*/
public DynamicMetricsRegistry(String name) {
this.name = name;
this.mf = new MetricMutableFactory();
}
/**
* Construct the registry with a name and a metric factory
* @param name of the record of the metrics
* @param factory for creating new mutable metrics
*/
public DynamicMetricsRegistry(String name, MetricMutableFactory factory) {
this.name = name;
this.mf = factory;
}
/**
* @return the name of the metrics registry
*/
public String name() {
return name;
}
/**
* Get a metric by name
* @param name of the metric
* @return the metric object
*/
public MetricMutable get(String name) {
return metricsMap.get(name);
}
/**
* Create a mutable integer counter
* @param name of the metric
* @param description of the metric
* @param initValue of the metric
* @return a new counter object
*/
public MetricMutableCounterInt
newCounter(String name, String description, int initValue) {
MetricMutableCounterInt ret = mf.newCounter(name, description, initValue);
return addNewMetricIfAbsent(name, ret, MetricMutableCounterInt.class);
}
/**
* Create a mutable long integer counter
* @param name of the metric
* @param description of the metric
* @param initValue of the metric
* @return a new counter object
*/
public MetricMutableCounterLong
newCounter(String name, String description, long initValue) {
MetricMutableCounterLong ret = mf.newCounter(name, description, initValue);
return addNewMetricIfAbsent(name, ret, MetricMutableCounterLong.class);
}
/**
* Create a mutable integer gauge
* @param name of the metric
* @param description of the metric
* @param initValue of the metric
* @return a new gauge object
*/
public MetricMutableGaugeInt
newGauge(String name, String description, int initValue) {
MetricMutableGaugeInt ret = mf.newGauge(name, description, initValue);
return addNewMetricIfAbsent(name, ret, MetricMutableGaugeInt.class);
}
/**
* Create a mutable long integer gauge
* @param name of the metric
* @param description of the metric
* @param initValue of the metric
* @return a new gauge object
*/
public MetricMutableGaugeLong
newGauge(String name, String description, long initValue) {
MetricMutableGaugeLong ret = mf.newGauge(name, description, initValue);
return addNewMetricIfAbsent(name, ret, MetricMutableGaugeLong.class);
}
/**
* Create a mutable metric with stats
* @param name of the metric
* @param description of the metric
* @param sampleName of the metric (e.g., "ops")
* @param valueName of the metric (e.g., "time" or "latency")
* @param extended produce extended stat (stdev, min/max etc.) if true.
* @return a new metric object
*/
public MetricMutableStat newStat(String name, String description,
String sampleName, String valueName,
boolean extended) {
MetricMutableStat ret =
mf.newStat(name, description, sampleName, valueName, extended);
return addNewMetricIfAbsent(name, ret, MetricMutableStat.class);
}
/**
* Create a mutable metric with stats
* @param name of the metric
* @param description of the metric
* @param sampleName of the metric (e.g., "ops")
* @param valueName of the metric (e.g., "time" or "latency")
* @return a new metric object
*/
public MetricMutableStat newStat(String name, String description,
String sampleName, String valueName) {
return newStat(name, description, sampleName, valueName, false);
}
/**
* Create a mutable metric with stats using the name only
* @param name of the metric
* @return a new metric object
*/
public MetricMutableStat newStat(String name) {
return newStat(name, "", "ops", "time", false);
}
/**
* Set the metrics context tag
* @param name of the context
* @return the registry itself as a convenience
*/
public DynamicMetricsRegistry setContext(String name) {
return tag(CONTEXT_KEY, CONTEXT_DESC, name);
}
/**
* Add a tag to the metrics
* @param name of the tag
* @param description of the tag
* @param value of the tag
* @return the registry (for keep adding tags)
*/
public DynamicMetricsRegistry tag(String name, String description, String value) {
return tag(name, description, value, false);
}
/**
* Add a tag to the metrics
* @param name of the tag
* @param description of the tag
* @param value of the tag
* @param override existing tag if true
* @return the registry (for keep adding tags)
*/
public DynamicMetricsRegistry tag(String name, String description, String value,
boolean override) {
MetricsTag tag = new MetricsTag(name, description, value);
if (!override) {
MetricsTag existing = tagsMap.putIfAbsent(name, tag);
if (existing != null) {
throw new MetricsException("Tag "+ name +" already exists!");
}
return this;
}
tagsMap.put(name, tag);
return this;
}
/**
* Get the tags
* @return the tags set
*/
public Set<Entry<String, MetricsTag>> tags() {
return tagsMap.entrySet();
}
/**
* Get the metrics
* @return the metrics set
*/
public Set<Entry<String, MetricMutable>> metrics() {
return metricsMap.entrySet();
}
/**
* Sample all the mutable metrics and put the snapshot in the builder
* @param builder to contain the metrics snapshot
* @param all get all the metrics even if the values are not changed.
*/
public void snapshot(MetricsRecordBuilder builder, boolean all) {
for (Entry<String, MetricsTag> entry : tags()) {
builder.add(entry.getValue());
}
for (Entry<String, MetricMutable> entry : metrics()) {
entry.getValue().snapshot(builder, all);
}
}
/**
* Removes metric by name
* @param name name of the metric to remove
*/
public void removeMetric(String name) {
metricsMap.remove(name);
}
/**
* Get a MetricMutableGaugeLong from the storage. If it is not there
* atomically put it.
*
* @param gaugeName name of the gauge to create or get.
* @param potentialStartingValue value of the new counter if we have to create it.
* @return a metric object
*/
public MetricMutableGaugeLong getLongGauge(String gaugeName,
long potentialStartingValue) {
//Try and get the guage.
MetricMutable metric = metricsMap.get(gaugeName);
//If it's not there then try and put a new one in the storage.
if (metric == null) {
//Create the potential new gauge.
MetricMutableGaugeLong newGauge = new MetricMutableGaugeLong(gaugeName, "",
potentialStartingValue);
// Try and put the gauge in. This is atomic.
metric = metricsMap.putIfAbsent(gaugeName, newGauge);
//If the value we get back is null then the put was successful and we will
// return that. Otherwise gaugeLong should contain the thing that was in
// before the put could be completed.
if (metric == null) {
return newGauge;
}
}
if (!(metric instanceof MetricMutableGaugeLong)) {
throw new MetricsException("Metric already exists in registry for metric name: " +
name + " and not of type MetricMutableGaugeLong");
}
return (MetricMutableGaugeLong) metric;
}
/**
* Get a MetricMutableCounterLong from the storage. If it is not there
* atomically put it.
*
* @param counterName Name of the counter to get
* @param potentialStartingValue starting value if we have to create a new counter
* @return a metric object
*/
public MetricMutableCounterLong getLongCounter(String counterName,
long potentialStartingValue) {
//See getLongGauge for description on how this works.
MetricMutable counter = metricsMap.get(counterName);
if (counter == null) {
MetricMutableCounterLong newCounter =
new MetricMutableCounterLong(counterName, "", potentialStartingValue);
counter = metricsMap.putIfAbsent(counterName, newCounter);
if (counter == null) {
return newCounter;
}
}
if (!(counter instanceof MetricMutableCounterLong)) {
throw new MetricsException("Metric already exists in registry for metric name: " +
name + "and not of type MetricMutableCounterLong");
}
return (MetricMutableCounterLong) counter;
}
private<T extends MetricMutable> T
addNewMetricIfAbsent(String name,
T ret,
Class<T> metricClass) {
//If the value we get back is null then the put was successful and we will
// return that. Otherwise metric should contain the thing that was in
// before the put could be completed.
MetricMutable metric = metricsMap.putIfAbsent(name, ret);
if (metric == null) {
return ret;
}
return returnExistingWithCast(metric, metricClass, name);
}
private<T> T returnExistingWithCast(MetricMutable metric,
Class<T> metricClass, String name) {
if (!metricClass.isAssignableFrom(metric.getClass())) {
throw new MetricsException("Metric already exists in registry for metric name: " +
name + " and not of type " + metricClass);
}
return (T) metric;
}
}

View File

@ -0,0 +1 @@
org.apache.hadoop.hbase.master.metrics.MasterMetricsSourceImpl

View File

@ -0,0 +1 @@
org.apache.hadoop.hbase.metrics.MBeanSourceImpl

View File

@ -0,0 +1,40 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.junit.Test;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
/**
* Test for MasterMetricsSourceImpl
*/
public class MasterMetricsSourceImplTest {
@Test
public void testGetInstance() throws Exception {
MasterMetricsSource rms = CompatibilitySingletonFactory
.getInstance(MasterMetricsSource.class);
assertTrue(rms instanceof MasterMetricsSourceImpl);
assertSame(rms, CompatibilitySingletonFactory.getInstance(MasterMetricsSource.class));
}
}

View File

@ -43,10 +43,10 @@ public class BaseMetricsSourceImplTest {
public void testSetGauge() throws Exception {
String key = "testset";
bmsi.setGauge(key, 100);
MetricMutableGaugeLong g = bmsi.gauges.get(key);
MetricMutableGaugeLong g = (MetricMutableGaugeLong) bmsi.metricsRegistry.get(key);
assertEquals(key, g.name);
bmsi.setGauge(key, 110);
assertSame(g, bmsi.gauges.get(key));
assertSame(g, bmsi.metricsRegistry.get(key));
}
@ -54,37 +54,37 @@ public class BaseMetricsSourceImplTest {
public void testIncGauge() throws Exception {
String key = "testincgauge";
bmsi.incGauge(key, 100);
MetricMutableGaugeLong g = bmsi.gauges.get(key);
MetricMutableGaugeLong g = (MetricMutableGaugeLong) bmsi.metricsRegistry.get(key);
assertEquals(key, g.name);
bmsi.incGauge(key, 10);
assertSame(g, bmsi.gauges.get(key));
assertSame(g, bmsi.metricsRegistry.get(key));
}
@Test
public void testDecGauge() throws Exception {
String key = "testdec";
bmsi.decGauge(key, 100);
MetricMutableGaugeLong g = bmsi.gauges.get(key);
MetricMutableGaugeLong g = (MetricMutableGaugeLong) bmsi.metricsRegistry.get(key);
assertEquals(key, g.name);
bmsi.decGauge(key, 100);
assertSame(g, bmsi.gauges.get(key));
assertSame(g, bmsi.metricsRegistry.get(key));
}
@Test
public void testIncCounters() throws Exception {
String key = "testinccounter";
bmsi.incCounters(key, 100);
MetricMutableCounterLong c = bmsi.counters.get(key);
MetricMutableCounterLong c = (MetricMutableCounterLong) bmsi.metricsRegistry.get(key);
assertEquals(key, c.name);
bmsi.incCounters(key, 100);
assertSame(c, bmsi.counters.get(key));
assertSame(c, bmsi.metricsRegistry.get(key));
}
@Test
public void testRemoveGauge() throws Exception {
bmsi.setGauge("testrm", 100);
bmsi.removeGauge("testrm");
assertNull(bmsi.gauges.get("testrm"));
assertNull(bmsi.metricsRegistry.get("testrm"));
}
@ -92,6 +92,6 @@ public class BaseMetricsSourceImplTest {
public void testRemoveCounter() throws Exception {
bmsi.incCounters("testrm", 100);
bmsi.removeCounter("testrm");
assertNull(bmsi.counters.get("testrm"));
assertNull(bmsi.metricsRegistry.get("testrm"));
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.replication.regionserver.metrics;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
@ -29,7 +30,8 @@ public class ReplicationMetricsSourceImplTest {
@Test
public void testGetInstance() throws Exception {
ReplicationMetricsSource rms = ReplicationMetricsSourceFactory.getInstance();
ReplicationMetricsSource rms = CompatibilitySingletonFactory
.getInstance(ReplicationMetricsSource.class);
assertTrue(rms instanceof ReplicationMetricsSourceImpl);
}
}

View File

@ -0,0 +1,66 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
import org.apache.hadoop.hbase.metrics.BaseMetricsSourceImpl;
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
/**
* Hadoop2 implementation of MasterMetricsSource.
*/
public class MasterMetricsSourceImpl
extends BaseMetricsSourceImpl implements MasterMetricsSource {
MutableCounterLong clusterRequestsCounter;
MutableGaugeLong ritGauge;
MutableGaugeLong ritCountOverThresholdGauge;
MutableGaugeLong ritOldestAgeGauge;
public MasterMetricsSourceImpl() {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT);
}
public MasterMetricsSourceImpl(String metricsName,
String metricsDescription,
String metricsContext) {
super(metricsName, metricsDescription, metricsContext);
clusterRequestsCounter = getLongCounter("cluster_requests", 0);
ritGauge = getLongGauge("ritCount", 0);
ritCountOverThresholdGauge = getLongGauge("ritCountOverThreshold", 0);
ritOldestAgeGauge = getLongGauge("ritOldestAge", 0);
}
public void incRequests(final int inc) {
this.clusterRequestsCounter.incr(inc);
}
public void setRIT(int ritCount) {
ritGauge.set(ritCount);
}
public void setRITCountOverThreshold(int ritCount) {
ritCountOverThresholdGauge.set(ritCount);
}
public void setRITOldestAge(long ritCount) {
ritCountOverThresholdGauge.set(ritCount);
}
}

View File

@ -19,45 +19,37 @@
package org.apache.hadoop.hbase.metrics;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.HBaseMetricsFactory;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import org.apache.hadoop.metrics2.source.JvmMetrics;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/** Hadoop 2 implementation of BaseMetricsSource for */
/**
* Hadoop 2 implementation of BaseMetricsSource (using metrics2 framework)
*/
public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
private static boolean defaultMetricsSystemInited = false;
public static final String HBASE_METRICS_SYSTEM_NAME = "hbase";
public ConcurrentMap<String, MutableGaugeLong>
gauges = new ConcurrentHashMap<String, MutableGaugeLong>();
public ConcurrentMap<String, MutableCounterLong> counters =
new ConcurrentHashMap<String, MutableCounterLong>();
final DynamicMetricsRegistry metricsRegistry;
protected String metricsContext;
protected String metricsName;
protected String metricsDescription;
private JvmMetrics jvmMetricsSource;
public BaseMetricsSourceImpl(String metricsName,
String metricsDescription,
String metricsContext) {
this.metricsContext = metricsContext;
this.metricsName = metricsName;
this.metricsDescription = metricsDescription;
metricsRegistry = new DynamicMetricsRegistry(metricsName).setContext(metricsContext);
if (!defaultMetricsSystemInited) {
//Not too worried about mutlithread here as all it does is spam the logs.
defaultMetricsSystemInited = true;
DefaultMetricsSystem.initialize(HBASE_METRICS_SYSTEM_NAME);
jvmMetricsSource = JvmMetrics.create(metricsName, "", DefaultMetricsSystem.instance());
}
DefaultMetricsSystem.instance().register(this.metricsContext, this.metricsDescription, this);
DefaultMetricsSystem.instance().register(metricsContext, metricsDescription, this);
}
@ -112,7 +104,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param key
*/
public void removeGauge(String key) {
gauges.remove(key);
metricsRegistry.removeMetric(key);
}
/**
@ -121,21 +113,12 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param key
*/
public void removeCounter(String key) {
counters.remove(key);
metricsRegistry.removeMetric(key);
}
@Override
public void getMetrics(MetricsCollector metricsCollector, boolean all) {
MetricsRecordBuilder rb =
metricsCollector.addRecord(this.metricsName).setContext(metricsContext);
for (Map.Entry<String, MutableCounterLong> entry : counters.entrySet()) {
entry.getValue().snapshot(rb, all);
}
for (Map.Entry<String, MutableGaugeLong> entry : gauges.entrySet()) {
entry.getValue().snapshot(rb, all);
}
metricsRegistry.snapshot(metricsCollector.addRecord(metricsRegistry.info()), all);
}
/**
@ -145,28 +128,8 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param potentialStartingValue value of the new counter if we have to create it.
* @return
*/
private MutableGaugeLong getLongGauge(String gaugeName, long potentialStartingValue) {
//Try and get the guage.
MutableGaugeLong gaugeInt = gauges.get(gaugeName);
//If it's not there then try and put a new one in the storage.
if (gaugeInt == null) {
//Create the potential new gauge.
MutableGaugeLong newGauge = HBaseMetricsFactory.newGauge(gaugeName,
"",
potentialStartingValue);
// Try and put the gauge in. This is atomic.
gaugeInt = gauges.putIfAbsent(gaugeName, newGauge);
//If the value we get back is null then the put was successful and we will return that.
//otherwise gaugeInt should contain the thing that was in before the put could be completed.
if (gaugeInt == null) {
gaugeInt = newGauge;
}
}
return gaugeInt;
protected MutableGaugeLong getLongGauge(String gaugeName, long potentialStartingValue) {
return metricsRegistry.getLongGauge(gaugeName, potentialStartingValue);
}
/**
@ -176,18 +139,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
* @param potentialStartingValue starting value if we have to create a new counter
* @return
*/
private MutableCounterLong getLongCounter(String counterName, long potentialStartingValue) {
//See getLongGauge for description on how this works.
MutableCounterLong counter = counters.get(counterName);
if (counter == null) {
MutableCounterLong newCounter =
HBaseMetricsFactory.newCounter(counterName, "", potentialStartingValue);
counter = counters.putIfAbsent(counterName, newCounter);
if (counter == null) {
counter = newCounter;
protected MutableCounterLong getLongCounter(String counterName, long potentialStartingValue) {
return metricsRegistry.getLongCounter(counterName, potentialStartingValue);
}
}
return counter;
}
}

View File

@ -0,0 +1,41 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import org.apache.hadoop.metrics2.util.MBeans;
import javax.management.ObjectName;
/**
* Hadoop2 metrics2 implementation of an object that registers MBeans.
*/
public class MBeanSourceImpl implements MBeanSource {
/**
* Register an mbean with the underlying metrics system
* @param serviceName Metrics service/system name
* @param metricsName name of the metrics obejct to expose
* @param theMbean the actual MBean
* @return ObjectName from jmx
*/
@Override
public ObjectName register(String serviceName, String metricsName, Object theMbean) {
return MBeans.register(serviceName, metricsName, theMbean);
}
}

View File

@ -28,10 +28,6 @@ import org.apache.hadoop.metrics2.MetricsSource;
public class ReplicationMetricsSourceImpl extends BaseMetricsSourceImpl implements
ReplicationMetricsSource {
public static final String METRICS_NAME = "ReplicationMetrics";
public static final String METRICS_CONTEXT = "replicationmetrics";
public static final String METRICS_DESCRIPTION = "Metrics about HBase replication";
public ReplicationMetricsSourceImpl() {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT);
}

View File

@ -0,0 +1,468 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import java.util.Collection;
import java.util.concurrent.ConcurrentMap;
import com.google.common.base.Objects;
import com.google.common.collect.Maps;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.impl.MsInfo;
/**
* An optional metrics registry class for creating and maintaining a
* collection of MetricsMutables, making writing metrics source easier.
* NOTE: this is a copy of org.apache.hadoop.metrics2.lib.MetricsRegistry with added one
* feature: metrics can be removed. When HADOOP-8313 is fixed, usages of this class
* should be substituted with org.apache.hadoop.metrics2.lib.MetricsRegistry.
* This implementation also provides handy methods for creating metrics
* dynamically.
* Another difference is that metricsMap implementation is substituted with
* thread-safe map, as we allow dynamic metrics additions/removals.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class DynamicMetricsRegistry {
private final ConcurrentMap<String, MutableMetric> metricsMap =
Maps.newConcurrentMap();
private final ConcurrentMap<String, MetricsTag> tagsMap =
Maps.newConcurrentMap();
private final MetricsInfo metricsInfo;
/**
* Construct the registry with a record name
* @param name of the record of the metrics
*/
public DynamicMetricsRegistry(String name) {
metricsInfo = Interns.info(name, name);
}
/**
* Construct the registry with a metadata object
* @param info the info object for the metrics record/group
*/
public DynamicMetricsRegistry(MetricsInfo info) {
metricsInfo = info;
}
/**
* @return the info object of the metrics registry
*/
public MetricsInfo info() {
return metricsInfo;
}
/**
* Get a metric by name
* @param name of the metric
* @return the metric object
*/
public MutableMetric get(String name) {
return metricsMap.get(name);
}
/**
* Get a tag by name
* @param name of the tag
* @return the tag object
*/
public MetricsTag getTag(String name) {
return tagsMap.get(name);
}
/**
* Create a mutable integer counter
* @param name of the metric
* @param desc metric description
* @param iVal initial value
* @return a new counter object
*/
public MutableCounterInt newCounter(String name, String desc, int iVal) {
return newCounter(Interns.info(name, desc), iVal);
}
/**
* Create a mutable integer counter
* @param info metadata of the metric
* @param iVal initial value
* @return a new counter object
*/
public MutableCounterInt newCounter(MetricsInfo info, int iVal) {
MutableCounterInt ret = new MutableCounterInt(info, iVal);
return addNewMetricIfAbsent(info.name(), ret, MutableCounterInt.class);
}
/**
* Create a mutable long integer counter
* @param name of the metric
* @param desc metric description
* @param iVal initial value
* @return a new counter object
*/
public MutableCounterLong newCounter(String name, String desc, long iVal) {
return newCounter(Interns.info(name, desc), iVal);
}
/**
* Create a mutable long integer counter
* @param info metadata of the metric
* @param iVal initial value
* @return a new counter object
*/
public MutableCounterLong newCounter(MetricsInfo info, long iVal) {
MutableCounterLong ret = new MutableCounterLong(info, iVal);
return addNewMetricIfAbsent(info.name(), ret, MutableCounterLong.class);
}
/**
* Create a mutable integer gauge
* @param name of the metric
* @param desc metric description
* @param iVal initial value
* @return a new gauge object
*/
public MutableGaugeInt newGauge(String name, String desc, int iVal) {
return newGauge(Interns.info(name, desc), iVal);
}
/**
* Create a mutable integer gauge
* @param info metadata of the metric
* @param iVal initial value
* @return a new gauge object
*/
public MutableGaugeInt newGauge(MetricsInfo info, int iVal) {
MutableGaugeInt ret = new MutableGaugeInt(info, iVal);
return addNewMetricIfAbsent(info.name(), ret, MutableGaugeInt.class);
}
/**
* Create a mutable long integer gauge
* @param name of the metric
* @param desc metric description
* @param iVal initial value
* @return a new gauge object
*/
public MutableGaugeLong newGauge(String name, String desc, long iVal) {
return newGauge(Interns.info(name, desc), iVal);
}
/**
* Create a mutable long integer gauge
* @param info metadata of the metric
* @param iVal initial value
* @return a new gauge object
*/
public MutableGaugeLong newGauge(MetricsInfo info, long iVal) {
MutableGaugeLong ret = new MutableGaugeLong(info, iVal);
return addNewMetricIfAbsent(info.name(), ret, MutableGaugeLong.class);
}
/**
* Create a mutable metric with stats
* @param name of the metric
* @param desc metric description
* @param sampleName of the metric (e.g., "Ops")
* @param valueName of the metric (e.g., "Time" or "Latency")
* @param extended produce extended stat (stdev, min/max etc.) if true.
* @return a new mutable stat metric object
*/
public MutableStat newStat(String name, String desc,
String sampleName, String valueName, boolean extended) {
MutableStat ret =
new MutableStat(name, desc, sampleName, valueName, extended);
return addNewMetricIfAbsent(name, ret, MutableStat.class);
}
/**
* Create a mutable metric with stats
* @param name of the metric
* @param desc metric description
* @param sampleName of the metric (e.g., "Ops")
* @param valueName of the metric (e.g., "Time" or "Latency")
* @return a new mutable metric object
*/
public MutableStat newStat(String name, String desc,
String sampleName, String valueName) {
return newStat(name, desc, sampleName, valueName, false);
}
/**
* Create a mutable rate metric
* @param name of the metric
* @return a new mutable metric object
*/
public MutableRate newRate(String name) {
return newRate(name, name, false);
}
/**
* Create a mutable rate metric
* @param name of the metric
* @param description of the metric
* @return a new mutable rate metric object
*/
public MutableRate newRate(String name, String description) {
return newRate(name, description, false);
}
/**
* Create a mutable rate metric (for throughput measurement)
* @param name of the metric
* @param desc description
* @param extended produce extended stat (stdev/min/max etc.) if true
* @return a new mutable rate metric object
*/
public MutableRate newRate(String name, String desc, boolean extended) {
return newRate(name, desc, extended, true);
}
@InterfaceAudience.Private
public MutableRate newRate(String name, String desc,
boolean extended, boolean returnExisting) {
if (returnExisting) {
MutableMetric rate = metricsMap.get(name);
if (rate != null) {
if (rate instanceof MutableRate) return (MutableRate) rate;
throw new MetricsException("Unexpected metrics type "+ rate.getClass()
+" for "+ name);
}
}
MutableRate ret = new MutableRate(name, desc, extended);
metricsMap.put(name, ret);
return ret;
}
synchronized void add(String name, MutableMetric metric) {
addNewMetricIfAbsent(name, metric, MutableMetric.class);
}
/**
* Add sample to a stat metric by name.
* @param name of the metric
* @param value of the snapshot to add
*/
public void add(String name, long value) {
MutableMetric m = metricsMap.get(name);
if (m != null) {
if (m instanceof MutableStat) {
((MutableStat) m).add(value);
}
else {
throw new MetricsException("Unsupported add(value) for metric "+ name);
}
}
else {
metricsMap.put(name, newRate(name)); // default is a rate metric
add(name, value);
}
}
/**
* Set the metrics context tag
* @param name of the context
* @return the registry itself as a convenience
*/
public DynamicMetricsRegistry setContext(String name) {
return tag(MsInfo.Context, name, true);
}
/**
* Add a tag to the metrics
* @param name of the tag
* @param description of the tag
* @param value of the tag
* @return the registry (for keep adding tags)
*/
public DynamicMetricsRegistry tag(String name, String description, String value) {
return tag(name, description, value, false);
}
/**
* Add a tag to the metrics
* @param name of the tag
* @param description of the tag
* @param value of the tag
* @param override existing tag if true
* @return the registry (for keep adding tags)
*/
public DynamicMetricsRegistry tag(String name, String description, String value,
boolean override) {
return tag(Interns.info(name, description), value, override);
}
/**
* Add a tag to the metrics
* @param info metadata of the tag
* @param value of the tag
* @param override existing tag if true
* @return the registry (for keep adding tags etc.)
*/
public DynamicMetricsRegistry tag(MetricsInfo info, String value, boolean override) {
MetricsTag tag = Interns.tag(info, value);
if (!override) {
MetricsTag existing = tagsMap.putIfAbsent(info.name(), tag);
if (existing != null) {
throw new MetricsException("Tag "+ info.name() +" already exists!");
}
return this;
}
tagsMap.put(info.name(), tag);
return this;
}
public DynamicMetricsRegistry tag(MetricsInfo info, String value) {
return tag(info, value, false);
}
Collection<MetricsTag> tags() {
return tagsMap.values();
}
Collection<MutableMetric> metrics() {
return metricsMap.values();
}
/**
* Sample all the mutable metrics and put the snapshot in the builder
* @param builder to contain the metrics snapshot
* @param all get all the metrics even if the values are not changed.
*/
public void snapshot(MetricsRecordBuilder builder, boolean all) {
for (MetricsTag tag : tags()) {
builder.add(tag);
}
for (MutableMetric metric : metrics()) {
metric.snapshot(builder, all);
}
}
@Override public String toString() {
return Objects.toStringHelper(this)
.add("info", metricsInfo).add("tags", tags()).add("metrics", metrics())
.toString();
}
/**
* Removes metric by name
* @param name name of the metric to remove
*/
public void removeMetric(String name) {
metricsMap.remove(name);
}
/**
* Get a MetricMutableGaugeLong from the storage. If it is not there atomically put it.
*
* @param gaugeName name of the gauge to create or get.
* @param potentialStartingValue value of the new gauge if we have to create it.
* @return
*/
public MutableGaugeLong getLongGauge(String gaugeName, long potentialStartingValue) {
//Try and get the guage.
MutableMetric metric = metricsMap.get(gaugeName);
//If it's not there then try and put a new one in the storage.
if (metric == null) {
//Create the potential new gauge.
MutableGaugeLong newGauge = new MutableGaugeLong(Interns.info(gaugeName, ""),
potentialStartingValue);
// Try and put the gauge in. This is atomic.
metric = metricsMap.putIfAbsent(gaugeName, newGauge);
//If the value we get back is null then the put was successful and we will return that.
//otherwise gaugeLong should contain the thing that was in before the put could be completed.
if (metric == null) {
return newGauge;
}
}
if (!(metric instanceof MutableGaugeLong)) {
throw new MetricsException("Metric already exists in registry for metric name: " + gaugeName +
" and not of type MetricMutableGaugeLong");
}
return (MutableGaugeLong) metric;
}
/**
* Get a MetricMutableCounterLong from the storage. If it is not there atomically put it.
*
* @param counterName Name of the counter to get
* @param potentialStartingValue starting value if we have to create a new counter
* @return
*/
public MutableCounterLong getLongCounter(String counterName, long potentialStartingValue) {
//See getLongGauge for description on how this works.
MutableMetric counter = metricsMap.get(counterName);
if (counter == null) {
MutableCounterLong newCounter =
new MutableCounterLong(Interns.info(counterName, ""), potentialStartingValue);
counter = metricsMap.putIfAbsent(counterName, newCounter);
if (counter == null) {
return newCounter;
}
}
if (!(counter instanceof MutableCounterLong)) {
throw new MetricsException("Metric already exists in registry for metric name: " +
counterName + " and not of type MetricMutableCounterLong");
}
return (MutableCounterLong) counter;
}
private<T extends MutableMetric> T
addNewMetricIfAbsent(String name,
T ret,
Class<T> metricClass) {
//If the value we get back is null then the put was successful and we will
// return that. Otherwise metric should contain the thing that was in
// before the put could be completed.
MutableMetric metric = metricsMap.putIfAbsent(name, ret);
if (metric == null) {
return ret;
}
return returnExistingWithCast(metric, metricClass, name);
}
private<T> T returnExistingWithCast(MutableMetric metric,
Class<T> metricClass, String name) {
if (!metricClass.isAssignableFrom(metric.getClass())) {
throw new MetricsException("Metric already exists in registry for metric name: " +
name + " and not of type " + metricClass +
" but instead of type " + metric.getClass());
}
return (T) metric;
}
}

View File

@ -1,50 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
/**
* Factory providing static methods to create MutableMetrics classes.
* HBase uses this class rather than MetricsRegistry because MetricsRegistry does not
* allow metrics to be removed.
*/
public class HBaseMetricsFactory {
/**
* Create a new gauge
* @param name Name of the gauge
* @param desc Description of the gauge
* @param startingValue The starting value
* @return a new MutableGaugeLong that has a starting value.
*/
public static MutableGaugeLong newGauge(String name, String desc, long startingValue) {
return new MutableGaugeLong(Interns.info(name, desc), startingValue);
}
/**
* Create a new counter.
* @param name Name of the counter.
* @param desc Description of the counter.
* @param startingValue The starting value.
* @return a new MutableCounterLong that has a starting value.
*/
public static MutableCounterLong newCounter(String name, String desc, long startingValue) {
return new MutableCounterLong(Interns.info(name, desc), startingValue);
}
}

View File

@ -0,0 +1 @@
org.apache.hadoop.hbase.master.metrics.MasterMetricsSourceImpl

View File

@ -0,0 +1 @@
org.apache.hadoop.hbase.metrics.MBeanSourceImpl

View File

@ -0,0 +1,40 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.junit.Test;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
/**
* Test for MasterMetricsSourceImpl
*/
public class MasterMetricsSourceImplTest {
@Test
public void testGetInstance() throws Exception {
MasterMetricsSource rms = CompatibilitySingletonFactory
.getInstance(MasterMetricsSource.class);
assertTrue(rms instanceof MasterMetricsSourceImpl);
assertSame(rms, CompatibilitySingletonFactory.getInstance(MasterMetricsSource.class));
}
}

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.hbase.metrics;
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import org.junit.BeforeClass;
import org.junit.Test;
@ -39,36 +41,36 @@ public class BaseMetricsSourceImplTest {
@Test
public void testSetGauge() throws Exception {
bmsi.setGauge("testset", 100);
assertEquals(100, bmsi.gauges.get("testset").value());
assertEquals(100, ((MutableGaugeLong) bmsi.metricsRegistry.get("testset")).value());
bmsi.setGauge("testset", 300);
assertEquals(300, bmsi.gauges.get("testset").value());
assertEquals(300, ((MutableGaugeLong) bmsi.metricsRegistry.get("testset")).value());
}
@Test
public void testIncGauge() throws Exception {
bmsi.incGauge("testincgauge", 100);
assertEquals(100, bmsi.gauges.get("testincgauge").value());
assertEquals(100, ((MutableGaugeLong) bmsi.metricsRegistry.get("testincgauge")).value());
bmsi.incGauge("testincgauge", 100);
assertEquals(200, bmsi.gauges.get("testincgauge").value());
assertEquals(200, ((MutableGaugeLong) bmsi.metricsRegistry.get("testincgauge")).value());
}
@Test
public void testDecGauge() throws Exception {
bmsi.decGauge("testdec", 100);
assertEquals(-100, bmsi.gauges.get("testdec").value());
assertEquals(-100, ((MutableGaugeLong) bmsi.metricsRegistry.get("testdec")).value());
bmsi.decGauge("testdec", 100);
assertEquals(-200, bmsi.gauges.get("testdec").value());
assertEquals(-200, ((MutableGaugeLong) bmsi.metricsRegistry.get("testdec")).value());
}
@Test
public void testIncCounters() throws Exception {
bmsi.incCounters("testinccounter", 100);
assertEquals(100, bmsi.counters.get("testinccounter").value());
assertEquals(100, ((MutableCounterLong) bmsi.metricsRegistry.get("testinccounter")).value());
bmsi.incCounters("testinccounter", 100);
assertEquals(200, bmsi.counters.get("testinccounter").value());
assertEquals(200, ((MutableCounterLong) bmsi.metricsRegistry.get("testinccounter")).value());
}
@ -76,13 +78,13 @@ public class BaseMetricsSourceImplTest {
public void testRemoveGauge() throws Exception {
bmsi.setGauge("testrmgauge", 100);
bmsi.removeGauge("testrmgauge");
assertNull(bmsi.gauges.get("testrmgauge"));
assertNull(bmsi.metricsRegistry.get("testrmgauge"));
}
@Test
public void testRemoveCounter() throws Exception {
bmsi.incCounters("testrmcounter", 100);
bmsi.removeCounter("testrmcounter");
assertNull(bmsi.counters.get("testrmcounter"));
assertNull(bmsi.metricsRegistry.get("testrmcounter"));
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.replication.regionserver.metrics;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
@ -27,7 +28,8 @@ public class ReplicationMetricsSourceImplTest {
@Test
public void testGetInstance() throws Exception {
ReplicationMetricsSource rms = ReplicationMetricsSourceFactory.getInstance();
ReplicationMetricsSource rms = CompatibilitySingletonFactory
.getInstance(ReplicationMetricsSource.class);
assertTrue(rms instanceof ReplicationMetricsSourceImpl);
}
}

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.Chore;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.DeserializationException;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -80,6 +81,8 @@ import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorType;
import org.apache.hadoop.hbase.ipc.HBaseRPC;
import org.apache.hadoop.hbase.ipc.HBaseServer;
import org.apache.hadoop.hbase.master.metrics.MXBeanImpl;
import org.apache.hadoop.hbase.metrics.MBeanSource;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.ipc.ProtocolSignature;
@ -399,6 +402,10 @@ Server {
}
MasterMetrics getMetrics() {
return metrics;
}
/**
* Main processing loop for the HMaster.
* <ol>
@ -2252,7 +2259,8 @@ Server {
*/
void registerMBean() {
MXBeanImpl mxBeanInfo = MXBeanImpl.init(this);
MBeanUtil.registerMBean("Master", "Master", mxBeanInfo);
mxBean = CompatibilitySingletonFactory.getInstance(
MBeanSource.class).register("hbase", "HMaster,sub=MXBean", mxBeanInfo);
LOG.info("Registered HMaster MXBean");
}

View File

@ -15,17 +15,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
package org.apache.hadoop.hbase.master.metrics;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* This is the JMX management interface for Hbase master information
*/
@Evolving
@InterfaceStability.Evolving
@InterfaceAudience.Private
public interface MXBean {
/**
@ -101,18 +100,11 @@ public interface MXBean {
* Get the live region servers
* @return Live region servers
*/
public Map<String, ServerLoad> getRegionServers();
public int getRegionServers();
/**
* Get the dead region servers
* @return Dead region Servers
*/
public String[] getDeadRegionServers();
/**
* Get information on regions in transition
* @return Regions in transition
*/
public RegionsInTransitionInfo[] getRegionsInTransition();
}

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
package org.apache.hadoop.hbase.master.metrics;
import java.util.ArrayList;
import java.util.HashMap;
@ -25,6 +25,8 @@ import java.util.Map.Entry;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.master.RegionState;
import org.apache.hadoop.hbase.master.HMaster;
/**
* Impl for exposing HMaster Information through JMX
@ -76,13 +78,8 @@ public class MXBeanImpl implements MXBean {
}
@Override
public Map<String, ServerLoad> getRegionServers() {
Map<String, ServerLoad> data = new HashMap<String, ServerLoad>();
for (final Entry<ServerName, ServerLoad> entry:
this.master.getServerManager().getOnlineServers().entrySet()) {
data.put(entry.getKey().getServerName(), entry.getValue());
}
return data;
public int getRegionServers() {
return this.master.getServerManager().getOnlineServers().size();
}
@Override
@ -94,48 +91,6 @@ public class MXBeanImpl implements MXBean {
return deadServers.toArray(new String[0]);
}
@Override
public RegionsInTransitionInfo[] getRegionsInTransition() {
List<RegionsInTransitionInfo> info =
new ArrayList<RegionsInTransitionInfo>();
for (final Entry<String, RegionState> entry : master.getAssignmentManager()
.getRegionStates().getRegionsInTransition().entrySet()) {
RegionsInTransitionInfo innerinfo = new RegionsInTransitionInfo() {
@Override
public String getRegionState() {
return entry.getValue().getState().toString();
}
@Override
public String getRegionName() {
return entry.getKey();
}
@Override
public long getLastUpdateTime() {
return entry.getValue().getStamp();
}
@Override
public String getRegionServerName() {
ServerName serverName = entry.getValue().getServerName();
if (serverName != null) {
return serverName.getServerName();
}
else {
return "";
}
}
};
info.add(innerinfo);
}
RegionsInTransitionInfo[] data =
new RegionsInTransitionInfo[info.size()];
info.toArray(data);
return data;
}
@Override
public String getServerName() {
return master.getServerName().getServerName();

View File

@ -17,24 +17,11 @@
*/
package org.apache.hadoop.hbase.master.metrics;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.HBaseInfo;
import org.apache.hadoop.hbase.metrics.MetricsRate;
import org.apache.hadoop.hbase.metrics.histogram.MetricsHistogram;
import org.apache.hadoop.metrics.ContextFactory;
import org.apache.hadoop.metrics.MetricsContext;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.MetricsUtil;
import org.apache.hadoop.metrics.Updater;
import org.apache.hadoop.metrics.jvm.JvmMetrics;
import org.apache.hadoop.metrics.util.MetricsIntValue;
import org.apache.hadoop.metrics.util.MetricsLongValue;
import org.apache.hadoop.metrics.util.MetricsRegistry;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
/**
* This class is for maintaining the various master statistics
@ -43,100 +30,19 @@ import org.apache.hadoop.metrics.util.MetricsRegistry;
* This class has a number of metrics variables that are publicly accessible;
* these variables (objects) have methods to update their values.
*/
@InterfaceStability.Evolving
@InterfaceAudience.Private
public class MasterMetrics implements Updater {
public class MasterMetrics {
private final Log LOG = LogFactory.getLog(this.getClass());
private final MetricsRecord metricsRecord;
private final MetricsRegistry registry = new MetricsRegistry();
private final MasterStatistics masterStatistics;
private long lastUpdate = System.currentTimeMillis();
private long lastExtUpdate = System.currentTimeMillis();
private long extendedPeriod = 0;
/*
* Count of requests to the cluster since last call to metrics update
*/
private final MetricsRate cluster_requests =
new MetricsRate("cluster_requests", registry);
/** Time it takes to finish HLog.splitLog() */
final MetricsHistogram splitTime = new MetricsHistogram("splitTime", registry);
/** Size of HLog files being split */
final MetricsHistogram splitSize = new MetricsHistogram("splitSize", registry);
/**
* Regions in Transition metrics such as number of RIT regions, oldest
* RIT time and number of such regions that are in transition
* for more than a specified threshold.
*/
public final MetricsIntValue ritCount =
new MetricsIntValue("ritCount", registry);
public final MetricsIntValue ritCountOverThreshold =
new MetricsIntValue("ritCountOverThreshold", registry);
public final MetricsLongValue ritOldestAge =
new MetricsLongValue("ritOldestAge", registry);
private MasterMetricsSource masterMetricsSource;
public MasterMetrics(final String name) {
MetricsContext context = MetricsUtil.getContext("hbase");
metricsRecord = MetricsUtil.createRecord(context, "master");
metricsRecord.setTag("Master", name);
context.registerUpdater(this);
JvmMetrics.init("Master", name);
HBaseInfo.init();
// expose the MBean for metrics
masterStatistics = new MasterStatistics(this.registry);
// get custom attributes
try {
Object m =
ContextFactory.getFactory().getAttribute("hbase.extendedperiod");
if (m instanceof String) {
this.extendedPeriod = Long.parseLong((String) m)*1000;
}
} catch (IOException ioe) {
LOG.info("Couldn't load ContextFactory for Metrics config info");
masterMetricsSource = CompatibilitySingletonFactory.getInstance(MasterMetricsSource.class);
}
LOG.info("Initialized");
}
public void shutdown() {
if (masterStatistics != null)
masterStatistics.shutdown();
}
/**
* Since this object is a registered updater, this method will be called
* periodically, e.g. every 5 seconds.
* @param unused
*/
public void doUpdates(MetricsContext unused) {
synchronized (this) {
this.lastUpdate = System.currentTimeMillis();
// has the extended period for long-living stats elapsed?
if (this.extendedPeriod > 0 &&
this.lastUpdate - this.lastExtUpdate >= this.extendedPeriod) {
this.lastExtUpdate = this.lastUpdate;
this.splitTime.clear();
this.splitSize.clear();
this.resetAllMinMax();
}
this.cluster_requests.pushMetric(metricsRecord);
this.splitTime.pushMetric(metricsRecord);
this.splitSize.pushMetric(metricsRecord);
this.ritCount.pushMetric(metricsRecord);
this.ritCountOverThreshold.pushMetric(metricsRecord);
this.ritOldestAge.pushMetric(metricsRecord);
}
this.metricsRecord.update();
}
public void resetAllMinMax() {
// Nothing to do
// for unit-test usage
public MasterMetricsSource getMetricsSource() {
return masterMetricsSource;
}
/**
@ -145,22 +51,17 @@ public class MasterMetrics implements Updater {
* @param size length of original HLogs that were split
*/
public synchronized void addSplit(long time, long size) {
splitTime.update(time);
splitSize.update(size);
}
/**
* @return Count of requests.
*/
public float getRequests() {
return this.cluster_requests.getPreviousIntervalValue();
//TODO use new metrics histogram
}
/**
* @param inc How much to add to requests.
*/
public void incrementRequests(final int inc) {
this.cluster_requests.inc(inc);
masterMetricsSource.incRequests(inc);
}
/**
@ -168,7 +69,7 @@ public class MasterMetrics implements Updater {
* @param ritCount
*/
public void updateRITCount(int ritCount) {
this.ritCount.set(ritCount);
masterMetricsSource.setRIT(ritCount);
}
/**
@ -177,13 +78,13 @@ public class MasterMetrics implements Updater {
* @param ritCountOverThreshold
*/
public void updateRITCountOverThreshold(int ritCountOverThreshold) {
this.ritCountOverThreshold.set(ritCountOverThreshold);
masterMetricsSource.setRITCountOverThreshold(ritCountOverThreshold);
}
/**
* update the timestamp for oldest region in transition metrics.
* @param timestamp
*/
public void updateRITOldestAge(long timestamp) {
this.ritOldestAge.set(timestamp);
masterMetricsSource.setRITOldestAge(timestamp);
}
}

View File

@ -19,8 +19,7 @@
package org.apache.hadoop.hbase.replication.regionserver.metrics;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.replication.regionserver.metrics.ReplicationMetricsSource;
import org.apache.hadoop.hbase.replication.regionserver.metrics.ReplicationMetricsSourceFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
/**
* This class is for maintaining the various replication statistics for a sink and publishing them
@ -36,7 +35,7 @@ public class ReplicationSinkMetrics {
private ReplicationMetricsSource rms;
public ReplicationSinkMetrics() {
rms = ReplicationMetricsSourceFactory.getInstance();
rms = CompatibilitySingletonFactory.getInstance(ReplicationMetricsSource.class);
}
/**

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.replication.regionserver.metrics;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
/**
* This class is for maintaining the various replication statistics for a source and publishing them
@ -65,7 +66,7 @@ public class ReplicationSourceMetrics {
logEditsFilteredKey = "source." + id + ".logEditsFiltered";
shippedBatchesKey = "source." + this.id + ".shippedBatches";
shippedOpsKey = "source." + this.id + ".shippedOps";
rms = ReplicationMetricsSourceFactory.getInstance();
rms = CompatibilitySingletonFactory.getInstance(ReplicationMetricsSource.class);
}
/**

View File

@ -25,6 +25,7 @@ import junit.framework.Assert;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.master.metrics.MXBeanImpl;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.MediumTests;
import org.junit.AfterClass;
@ -48,22 +49,6 @@ public class TestMXBean {
TEST_UTIL.shutdownMiniCluster();
}
private void verifyRegionServers(Map<String, ServerLoad> regions) {
Set<String> expected = new HashSet<String>();
for (int i = 0; i < 4; ++i) {
HRegionServer rs = TEST_UTIL.getMiniHBaseCluster().getRegionServer(i);
expected.add(rs.getServerName().getServerName());
}
int found = 0;
for (java.util.Map.Entry<String, ServerLoad> entry : regions.entrySet()) {
if (expected.contains(entry.getKey())) {
++found;
}
}
Assert.assertEquals(4, found);
}
@Test
public void testInfo() {
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
@ -77,20 +62,16 @@ public class TestMXBean {
Assert.assertEquals(master.getCoprocessors().length,
info.getCoprocessors().length);
Assert.assertEquals(master.getServerManager().getOnlineServersList().size(),
info.getRegionServers().size());
Assert.assertEquals(master.getAssignmentManager().getRegionStates().isRegionsInTransition(),
info.getRegionsInTransition().length > 0);
Assert.assertTrue(info.getRegionServers().size() == 4);
info.getRegionServers());
Assert.assertTrue(info.getRegionServers() == 4);
String zkServers = info.getZookeeperQuorum();
Assert.assertEquals(zkServers.split(",").length,
TEST_UTIL.getZkCluster().getZooKeeperServerNum());
verifyRegionServers(info.getRegionServers());
TEST_UTIL.getMiniHBaseCluster().stopRegionServer(3, false);
TEST_UTIL.getMiniHBaseCluster().waitOnRegionServer(3);
Assert.assertTrue(info.getRegionServers().size() == 3);
Assert.assertTrue(info.getRegionServers() == 3);
Assert.assertTrue(info.getDeadRegionServers().length == 1);
}