HBASE-4050 Combine Master Metrics into a single class

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1377896 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-08-27 22:39:23 +00:00
parent 275f0453c5
commit 93e03d7d09
24 changed files with 309 additions and 137 deletions

View File

@ -31,15 +31,41 @@ public interface MasterMetricsSource extends BaseMetricsSource {
public static final String METRICS_NAME = "HMaster";
/**
* The name of the metrics context that metrics will be under.
* The context metrics will be under.
*/
public static final String METRICS_CONTEXT = "HMaster,sub=Dynamic";
public static final String METRICS_CONTEXT = "hmaster";
/**
* The name of the metrics context that metrics will be under in jmx
*/
public static final String METRICS_JMX_CONTEXT = "HMaster";
/**
* Description
*/
public static final String METRICS_DESCRIPTION = "Metrics about HBase master server";
// Strings used for exporting to metrics system.
public static final String MASTER_ACTIVE_TIME_NAME = "masterActiveTime";
public static final String MASTER_START_TIME_NAME = "masterStartTime";
public static final String AVERAGE_LOAD_NAME = "averageLoad";
public static final String NUM_REGION_SERVERS_NAME = "numRegionServers";
public static final String NUM_DEAD_REGION_SERVERS_NAME = "numDeadRegionServers";
public static final String ZOOKEEPER_QUORUM_NAME = "zookeeperQuorum";
public static final String SERVER_NAME_NAME = "serverName";
public static final String CLUSTER_ID_NAME = "clusterId";
public static final String IS_ACTIVE_MASTER_NAME = "isActiveMaster";
public static final String MASTER_ACTIVE_TIME_DESC = "Master Active Time";
public static final String MASTER_START_TIME_DESC = "Master Start Time";
public static final String AVERAGE_LOAD_DESC = "AverageLoad";
public static final String NUMBER_OF_REGION_SERVERS_DESC = "Number of RegionServers";
public static final String NUMBER_OF_DEAD_REGION_SERVERS_DESC = "Number of dead RegionServers";
public static final String ZOOKEEPER_QUORUM_DESC = "Zookeeper Quorum";
public static final String SERVER_NAME_DESC = "Server Name";
public static final String CLUSTER_ID_DESC = "Cluster Id";
public static final String IS_ACTIVE_MASTER_DESC = "Is Active Master";
/**
* Increment the number of requests the cluster has seen.
* @param inc Ammount to increment the total by.

View File

@ -0,0 +1,28 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
/**
* Interface of a factory to create MasterMetricsSource when given a MasterMetricsWrapper
*/
public interface MasterMetricsSourceFactory {
public MasterMetricsSource create(MasterMetricsWrapper beanWrapper);
}

View File

@ -15,39 +15,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* This is the JMX management interface for Hbase master information
* This is the interface that will expose information to hadoop1/hadoop2 implementations of the
* MasterMetricsSource.
*/
@InterfaceStability.Evolving
@InterfaceAudience.Private
public interface MXBean {
/**
* Required for MXBean implementation
*/
public static interface RegionsInTransitionInfo {
/**
* Name of region in transition
*/
public String getRegionName();
/**
* Current transition state
*/
public String getRegionState();
/**
* Get Region Server name
*/
public String getRegionServerName();
/**
* Get last update time
*/
public long getLastUpdateTime();
}
public interface MasterMetricsWrapper {
/**
* Get ServerName
@ -106,5 +81,5 @@ public interface MXBean {
* Get the dead region servers
* @return Dead region Servers
*/
public String[] getDeadRegionServers();
public int getDeadRegionServers();
}

View File

@ -35,6 +35,11 @@ public interface ReplicationMetricsSource extends BaseMetricsSource {
*/
public static final String METRICS_CONTEXT = "replicationmetrics";
/**
* The name of the metrics context that metrics will be under.
*/
public static final String METRICS_JMX_CONTEXT = "ReplicationMetrics";
/**
* A description.
*/

View File

@ -0,0 +1,30 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
/**
* Factory to create MasterMetricsSource when given a MasterMetricsWrapper
*/
public class MasterMetricsSourceFactoryImpl implements MasterMetricsSourceFactory {
@Override
public MasterMetricsSource create(MasterMetricsWrapper beanWrapper) {
return new MasterMetricsSourceImpl(beanWrapper);
}
}

View File

@ -18,7 +18,11 @@
package org.apache.hadoop.hbase.master.metrics;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.metrics.BaseMetricsSourceImpl;
import org.apache.hadoop.metrics2.MetricsBuilder;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.MetricMutableCounterLong;
import org.apache.hadoop.metrics2.lib.MetricMutableGaugeLong;
@ -28,21 +32,27 @@ import org.apache.hadoop.metrics2.lib.MetricMutableGaugeLong;
public class MasterMetricsSourceImpl
extends BaseMetricsSourceImpl implements MasterMetricsSource {
MetricMutableCounterLong clusterRequestsCounter;
MetricMutableGaugeLong ritGauge;
MetricMutableGaugeLong ritCountOverThresholdGauge;
MetricMutableGaugeLong ritOldestAgeGauge;
private static final Log LOG = LogFactory.getLog(MasterMetricsSourceImpl.class.getName());
final MetricMutableCounterLong clusterRequestsCounter;
final MetricMutableGaugeLong ritGauge;
final MetricMutableGaugeLong ritCountOverThresholdGauge;
final MetricMutableGaugeLong ritOldestAgeGauge;
public MasterMetricsSourceImpl() {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT);
private final MasterMetricsWrapper masterWrapper;
public MasterMetricsSourceImpl(MasterMetricsWrapper masterWrapper) {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT, METRICS_JMX_CONTEXT, masterWrapper);
}
public MasterMetricsSourceImpl(String metricsName,
String metricsDescription,
String metricsContext) {
super(metricsName, metricsDescription, metricsContext);
String metricsContext,
String metricsJmxContext,
MasterMetricsWrapper masterWrapper) {
super(metricsName, metricsDescription, metricsContext, metricsJmxContext);
this.masterWrapper = masterWrapper;
clusterRequestsCounter = getLongCounter("cluster_requests", 0);
ritGauge = getLongGauge("ritCount", 0);
ritCountOverThresholdGauge = getLongGauge("ritCountOverThreshold", 0);
@ -64,4 +74,40 @@ public class MasterMetricsSourceImpl
public void setRITOldestAge(long ritCount) {
ritCountOverThresholdGauge.set(ritCount);
}
/**
* Method to export all the metrics.
*
* @param metricsBuilder Builder to accept metrics
* @param all push all or only changed?
*/
@Override
public void getMetrics(MetricsBuilder metricsBuilder, boolean all) {
MetricsRecordBuilder metricsRecordBuilder = metricsBuilder.addRecord(metricsName)
.setContext(metricsContext);
// masterWrapper can be null because this function is called inside of init.
if (masterWrapper != null) {
metricsRecordBuilder
.addGauge(MASTER_ACTIVE_TIME_NAME,
MASTER_ACTIVE_TIME_DESC, masterWrapper.getMasterStartTime())
.addGauge(MASTER_START_TIME_NAME,
MASTER_START_TIME_DESC, masterWrapper.getMasterStartTime())
.addGauge(AVERAGE_LOAD_NAME, AVERAGE_LOAD_DESC, masterWrapper.getAverageLoad())
.addGauge(NUM_REGION_SERVERS_NAME,
NUMBER_OF_REGION_SERVERS_DESC, masterWrapper.getRegionServers())
.addGauge(NUM_DEAD_REGION_SERVERS_NAME,
NUMBER_OF_DEAD_REGION_SERVERS_DESC,
masterWrapper.getDeadRegionServers())
.tag(ZOOKEEPER_QUORUM_NAME, ZOOKEEPER_QUORUM_DESC, masterWrapper.getZookeeperQuorum())
.tag(SERVER_NAME_NAME, SERVER_NAME_DESC, masterWrapper.getServerName())
.tag(CLUSTER_ID_NAME, CLUSTER_ID_DESC, masterWrapper.getClusterId())
.tag(IS_ACTIVE_MASTER_NAME,
IS_ACTIVE_MASTER_DESC,
String.valueOf(masterWrapper.getIsActiveMaster()));
}
metricsRegistry.snapshot(metricsRecordBuilder, true);
}
}

View File

@ -41,14 +41,26 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
private static boolean defaultMetricsSystemInited = false;
public static final String HBASE_METRICS_SYSTEM_NAME = "hbase";
final DynamicMetricsRegistry metricsRegistry;
protected final DynamicMetricsRegistry metricsRegistry;
private JvmMetricsSource jvmMetricsSource;
protected final String metricsName;
protected final String metricsDescription;
protected final String metricsContext;
protected final String metricsJmxContext;
public BaseMetricsSourceImpl(
String metricsName,
String metricsDescription,
String metricsContext) {
String metricsContext,
String metricsJmxContext) {
this.metricsName = metricsName;
this.metricsDescription = metricsDescription;
this.metricsContext = metricsContext;
this.metricsJmxContext = metricsJmxContext;
metricsRegistry = new DynamicMetricsRegistry(metricsName).setContext(metricsContext);
@ -62,7 +74,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
}
//Register this instance.
DefaultMetricsSystem.INSTANCE.registerSource(metricsContext, metricsDescription, this);
DefaultMetricsSystem.INSTANCE.registerSource(metricsJmxContext, metricsDescription, this);
}
/**

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.replication.regionserver.metrics;
import org.apache.hadoop.hbase.metrics.BaseMetricsSourceImpl;
import org.apache.hadoop.metrics2.MetricsSource;
/**
* Hadoop1 implementation of ReplicationMetricsSource. This provides access to metrics gauges and
@ -28,14 +27,14 @@ import org.apache.hadoop.metrics2.MetricsSource;
public class ReplicationMetricsSourceImpl extends BaseMetricsSourceImpl implements
ReplicationMetricsSource {
public ReplicationMetricsSourceImpl() {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT);
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT, METRICS_JMX_CONTEXT);
}
ReplicationMetricsSourceImpl(String metricsName,
String metricsDescription, String metricsContext) {
super(metricsName, metricsDescription, metricsContext);
String metricsDescription,
String metricsContext,
String metricsJmxContext) {
super(metricsName, metricsDescription, metricsContext, metricsJmxContext);
}
}

View File

@ -1 +0,0 @@
org.apache.hadoop.hbase.master.metrics.MasterMetricsSourceImpl

View File

@ -0,0 +1 @@
org.apache.hadoop.hbase.master.metrics.MasterMetricsSourceFactoryImpl

View File

@ -31,10 +31,11 @@ public class MasterMetricsSourceImplTest {
@Test
public void testGetInstance() throws Exception {
MasterMetricsSource rms = CompatibilitySingletonFactory
.getInstance(MasterMetricsSource.class);
assertTrue(rms instanceof MasterMetricsSourceImpl);
assertSame(rms, CompatibilitySingletonFactory.getInstance(MasterMetricsSource.class));
MasterMetricsSourceFactory masterMetricsSourceFactory = CompatibilitySingletonFactory
.getInstance(MasterMetricsSourceFactory.class);
MasterMetricsSource masterMetricsSource = masterMetricsSourceFactory.create(null);
assertTrue(masterMetricsSource instanceof MasterMetricsSourceImpl);
assertSame(masterMetricsSourceFactory, CompatibilitySingletonFactory.getInstance(MasterMetricsSourceFactory.class));
}
}

View File

@ -36,7 +36,7 @@ public class BaseMetricsSourceImplTest {
@BeforeClass
public static void setUp() throws Exception {
bmsi = new BaseMetricsSourceImpl("TestName", "test description", "testcontext");
bmsi = new BaseMetricsSourceImpl("TestName", "test description", "testcontext", "TestContext");
}
@Test

View File

@ -0,0 +1,30 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.metrics;
/**
* Factory to create MasterMetricsSource when given a MasterMetricsWrapper
*/
public class MasterMetricsSourceFactoryImpl implements MasterMetricsSourceFactory {
@Override
public MasterMetricsSource create(MasterMetricsWrapper beanWrapper) {
return new MasterMetricsSourceImpl(beanWrapper);
}
}

View File

@ -19,28 +19,37 @@
package org.apache.hadoop.hbase.master.metrics;
import org.apache.hadoop.hbase.metrics.BaseMetricsSourceImpl;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
/**
* Hadoop2 implementation of MasterMetricsSource.
*/
/** Hadoop2 implementation of MasterMetricsSource. */
public class MasterMetricsSourceImpl
extends BaseMetricsSourceImpl implements MasterMetricsSource {
extends BaseMetricsSourceImpl implements MasterMetricsSource {
MutableCounterLong clusterRequestsCounter;
MutableGaugeLong ritGauge;
MutableGaugeLong ritCountOverThresholdGauge;
MutableGaugeLong ritOldestAgeGauge;
private final MasterMetricsWrapper masterWrapper;
public MasterMetricsSourceImpl() {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT);
public MasterMetricsSourceImpl(MasterMetricsWrapper masterMetricsWrapper) {
this(METRICS_NAME,
METRICS_DESCRIPTION,
METRICS_CONTEXT,
METRICS_JMX_CONTEXT,
masterMetricsWrapper);
}
public MasterMetricsSourceImpl(String metricsName,
String metricsDescription,
String metricsContext) {
super(metricsName, metricsDescription, metricsContext);
String metricsContext,
String metricsJmxContext,
MasterMetricsWrapper masterWrapper) {
super(metricsName, metricsDescription, metricsContext, metricsJmxContext);
this.masterWrapper = masterWrapper;
clusterRequestsCounter = getLongCounter("cluster_requests", 0);
ritGauge = getLongGauge("ritCount", 0);
@ -63,4 +72,37 @@ public class MasterMetricsSourceImpl
public void setRITOldestAge(long ritCount) {
ritCountOverThresholdGauge.set(ritCount);
}
@Override
public void getMetrics(MetricsCollector metricsCollector, boolean all) {
MetricsRecordBuilder metricsRecordBuilder = metricsCollector.addRecord(metricsName)
.setContext(metricsContext);
// masterWrapper can be null because this function is called inside of init.
if (masterWrapper != null) {
metricsRecordBuilder
.addGauge(Interns.info(MASTER_ACTIVE_TIME_NAME,
MASTER_ACTIVE_TIME_DESC), masterWrapper.getMasterStartTime())
.addGauge(Interns.info(MASTER_START_TIME_NAME,
MASTER_START_TIME_DESC), masterWrapper.getMasterStartTime())
.addGauge(Interns.info(AVERAGE_LOAD_NAME, AVERAGE_LOAD_DESC),
masterWrapper.getAverageLoad())
.addGauge(Interns.info(NUM_REGION_SERVERS_NAME,
NUMBER_OF_REGION_SERVERS_DESC), masterWrapper.getRegionServers())
.addGauge(Interns.info(NUM_DEAD_REGION_SERVERS_NAME,
NUMBER_OF_DEAD_REGION_SERVERS_DESC),
masterWrapper.getDeadRegionServers())
.tag(Interns.info(ZOOKEEPER_QUORUM_NAME, ZOOKEEPER_QUORUM_DESC),
masterWrapper.getZookeeperQuorum())
.tag(Interns.info(SERVER_NAME_NAME, SERVER_NAME_DESC), masterWrapper.getServerName())
.tag(Interns.info(CLUSTER_ID_NAME, CLUSTER_ID_DESC), masterWrapper.getClusterId())
.tag(Interns.info(IS_ACTIVE_MASTER_NAME,
IS_ACTIVE_MASTER_DESC),
String.valueOf(masterWrapper.getIsActiveMaster()));
}
metricsRegistry.snapshot(metricsRecordBuilder, true);
}
}

View File

@ -34,13 +34,25 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
private static boolean defaultMetricsSystemInited = false;
public static final String HBASE_METRICS_SYSTEM_NAME = "hbase";
final DynamicMetricsRegistry metricsRegistry;
protected final DynamicMetricsRegistry metricsRegistry;
protected final String metricsName;
protected final String metricsDescription;
protected final String metricsContext;
protected final String metricsJmxContext;
private JvmMetrics jvmMetricsSource;
public BaseMetricsSourceImpl(String metricsName,
String metricsDescription,
String metricsContext) {
public BaseMetricsSourceImpl(
String metricsName,
String metricsDescription,
String metricsContext,
String metricsJmxContext) {
this.metricsName = metricsName;
this.metricsDescription = metricsDescription;
this.metricsContext = metricsContext;
this.metricsJmxContext = metricsJmxContext;
metricsRegistry = new DynamicMetricsRegistry(metricsName).setContext(metricsContext);
if (!defaultMetricsSystemInited) {
@ -49,7 +61,8 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
DefaultMetricsSystem.initialize(HBASE_METRICS_SYSTEM_NAME);
jvmMetricsSource = JvmMetrics.create(metricsName, "", DefaultMetricsSystem.instance());
}
DefaultMetricsSystem.instance().register(metricsContext, metricsDescription, this);
DefaultMetricsSystem.instance().register(metricsJmxContext, metricsDescription, this);
}
@ -116,11 +129,6 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
metricsRegistry.removeMetric(key);
}
@Override
public void getMetrics(MetricsCollector metricsCollector, boolean all) {
metricsRegistry.snapshot(metricsCollector.addRecord(metricsRegistry.info()), all);
}
/**
* Get a MetricMutableGaugeLong from the storage. If it is not there atomically put it.
*
@ -142,4 +150,9 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource {
protected MutableCounterLong getLongCounter(String counterName, long potentialStartingValue) {
return metricsRegistry.getLongCounter(counterName, potentialStartingValue);
}
@Override
public void getMetrics(MetricsCollector metricsCollector, boolean all) {
metricsRegistry.snapshot(metricsCollector.addRecord(metricsRegistry.info()), all);
}
}

View File

@ -28,12 +28,15 @@ import org.apache.hadoop.metrics2.MetricsSource;
public class ReplicationMetricsSourceImpl extends BaseMetricsSourceImpl implements
ReplicationMetricsSource {
public ReplicationMetricsSourceImpl() {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT);
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT, METRICS_JMX_CONTEXT);
}
ReplicationMetricsSourceImpl(String metricsName,
String metricsDescription, String metricsContext) {
super(metricsName, metricsDescription, metricsContext);
String metricsDescription,
String metricsContext,
String metricsJmxContext) {
super(metricsName, metricsDescription, metricsContext, metricsJmxContext);
}
}

View File

@ -1 +0,0 @@
org.apache.hadoop.hbase.master.metrics.MasterMetricsSourceImpl

View File

@ -0,0 +1 @@
org.apache.hadoop.hbase.master.metrics.MasterMetricsSourceFactoryImpl

View File

@ -31,10 +31,11 @@ public class MasterMetricsSourceImplTest {
@Test
public void testGetInstance() throws Exception {
MasterMetricsSource rms = CompatibilitySingletonFactory
.getInstance(MasterMetricsSource.class);
assertTrue(rms instanceof MasterMetricsSourceImpl);
assertSame(rms, CompatibilitySingletonFactory.getInstance(MasterMetricsSource.class));
MasterMetricsSourceFactory masterMetricsSourceFactory = CompatibilitySingletonFactory
.getInstance(MasterMetricsSourceFactory.class);
MasterMetricsSource masterMetricsSource = masterMetricsSourceFactory.create(null);
assertTrue(masterMetricsSource instanceof MasterMetricsSourceImpl);
assertSame(masterMetricsSourceFactory, CompatibilitySingletonFactory.getInstance(MasterMetricsSourceFactory.class));
}
}

View File

@ -35,7 +35,7 @@ public class BaseMetricsSourceImplTest {
@BeforeClass
public static void setUp() throws Exception {
bmsi = new BaseMetricsSourceImpl("TestName", "test description", "testcontext");
bmsi = new BaseMetricsSourceImpl("TestName", "test description", "testcontext", "TestContext");
}
@Test

View File

@ -49,7 +49,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.Chore;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.DeserializationException;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -81,8 +80,7 @@ import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorType;
import org.apache.hadoop.hbase.ipc.HBaseRPC;
import org.apache.hadoop.hbase.ipc.HBaseServer;
import org.apache.hadoop.hbase.master.metrics.MXBeanImpl;
import org.apache.hadoop.hbase.metrics.MBeanSource;
import org.apache.hadoop.hbase.master.metrics.MasterMetricsWrapperImpl;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.ipc.ProtocolSignature;
@ -367,12 +365,14 @@ Server {
this.zooKeeper = new ZooKeeperWatcher(conf, MASTER + ":" + isa.getPort(), this, true);
this.rpcServer.startThreads();
this.metrics = new MasterMetrics(getServerName().toString());
// metrics interval: using the same property as region server.
this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);
//should we check the compression codec type at master side, default true, HBASE-6370
this.masterCheckCompression = conf.getBoolean("hbase.master.check.compression", true);
this.metrics = new MasterMetrics( new MasterMetricsWrapperImpl(this));
}
/**
@ -690,8 +690,6 @@ Server {
this.balancerChore = getAndStartBalancerChore(this);
this.catalogJanitorChore = new CatalogJanitor(this, this);
startCatalogJanitorChore();
registerMBean();
}
status.markComplete("Initialization successful");
@ -2259,16 +2257,6 @@ Server {
new HMasterCommandLine(HMaster.class).doMain(args);
}
/**
* Register bean with platform management server
*/
void registerMBean() {
MXBeanImpl mxBeanInfo = MXBeanImpl.init(this);
mxBean = CompatibilitySingletonFactory.getInstance(
MBeanSource.class).register("hbase", "HMaster,sub=MXBean", mxBeanInfo);
LOG.info("Registered HMaster MXBean");
}
public HFileCleaner getHFileCleaner() {
return this.hfileCleaner;
}

View File

@ -36,8 +36,8 @@ public class MasterMetrics {
private final Log LOG = LogFactory.getLog(this.getClass());
private MasterMetricsSource masterMetricsSource;
public MasterMetrics(final String name) {
masterMetricsSource = CompatibilitySingletonFactory.getInstance(MasterMetricsSource.class);
public MasterMetrics(MasterMetricsWrapper masterWrapper) {
masterMetricsSource = CompatibilitySingletonFactory.getInstance(MasterMetricsSourceFactory.class).create(masterWrapper);
}
// for unit-test usage
@ -51,7 +51,6 @@ public class MasterMetrics {
* @param size length of original HLogs that were split
*/
public synchronized void addSplit(long time, long size) {
//TODO use new metrics histogram
}

View File

@ -17,33 +17,16 @@
*/
package org.apache.hadoop.hbase.master.metrics;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.master.RegionState;
import org.apache.hadoop.hbase.master.HMaster;
/**
* Impl for exposing HMaster Information through JMX
*/
public class MXBeanImpl implements MXBean {
public class MasterMetricsWrapperImpl implements MasterMetricsWrapper {
private final HMaster master;
private static MXBeanImpl instance = null;
public synchronized static MXBeanImpl init(final HMaster master) {
if (instance == null) {
instance = new MXBeanImpl(master);
}
return instance;
}
protected MXBeanImpl(final HMaster master) {
public MasterMetricsWrapperImpl(final HMaster master) {
this.master = master;
}
@ -83,12 +66,8 @@ public class MXBeanImpl implements MXBean {
}
@Override
public String[] getDeadRegionServers() {
List<String> deadServers = new ArrayList<String>();
for (ServerName name : master.getServerManager().getDeadServers()) {
deadServers.add(name.getHostAndPort());
}
return deadServers.toArray(new String[0]);
public int getDeadRegionServers() {
return master.getServerManager().getDeadServers().size();
}
@Override

View File

@ -15,18 +15,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
package org.apache.hadoop.hbase.master.metrics;
import junit.framework.Assert;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.master.metrics.MXBeanImpl;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.metrics.MasterMetricsWrapperImpl;
import org.apache.hadoop.hbase.MediumTests;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -34,7 +29,7 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(MediumTests.class)
public class TestMXBean {
public class TestMasterMetricsWrapper {
private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
@ -52,7 +47,7 @@ public class TestMXBean {
@Test
public void testInfo() {
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
MXBeanImpl info = MXBeanImpl.init(master);
MasterMetricsWrapperImpl info = new MasterMetricsWrapperImpl(master);
Assert.assertEquals(master.getAverageLoad(), info.getAverageLoad());
Assert.assertEquals(master.getClusterId(), info.getClusterId());
Assert.assertEquals(master.getMasterActiveTime(),
@ -72,7 +67,7 @@ public class TestMXBean {
TEST_UTIL.getMiniHBaseCluster().stopRegionServer(3, false);
TEST_UTIL.getMiniHBaseCluster().waitOnRegionServer(3);
Assert.assertTrue(info.getRegionServers() == 3);
Assert.assertTrue(info.getDeadRegionServers().length == 1);
Assert.assertTrue(info.getDeadRegionServers() == 1);
}