From f854df4223ce2cc13c5c91b34f790440d5b4a0ba Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Thu, 26 Apr 2012 17:59:58 +0000 Subject: [PATCH] HBASE-5862 After Region Close remove the Operation Metrics git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1330997 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop/hbase/regionserver/HRegion.java | 1 + .../hbase/regionserver/HRegionServer.java | 8 +++ .../metrics/OperationMetrics.java | 15 +++-- .../metrics/RegionMetricsStorage.java | 8 +++ .../metrics/RegionServerDynamicMetrics.java | 62 +++++++++++++++++++ .../regionserver/TestRegionServerMetrics.java | 27 ++++++++ 6 files changed, 117 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 727461f7918..bf3d3692ba0 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -966,6 +966,7 @@ public class HRegion implements HeapSize { // , Writable{ status.setStatus("Running coprocessor post-close hooks"); this.coprocessorHost.postClose(abort); } + this.opMetrics.closeMetrics(); status.markComplete("Closed"); LOG.info("Closed " + this); return result; diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 6dd2d4bbedb..ebffad6b11e 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -2680,6 +2680,14 @@ public class HRegionServer extends RegionServer public boolean removeFromOnlineRegions(final String encodedName) { HRegion toReturn = null; toReturn = this.onlineRegions.remove(encodedName); + + //Clear all of the dynamic metrics as they are now probably useless. + //This is a clear because dynamic metrics could include metrics per cf and + //per hfile. Figuring out which cfs, hfiles, and regions are still relevant to + //this region server would be an onerous task. Instead just clear everything + //and on the next tick of the metrics everything that is still relevant will be + //re-added. + this.dynamicMetrics.clear(); return toReturn != null; } diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/OperationMetrics.java b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/OperationMetrics.java index ac4e57f56c2..305dfaba40f 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/OperationMetrics.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/OperationMetrics.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.util.Bytes; /** @@ -64,12 +63,12 @@ public class OperationMetrics { * @param conf The Configuration of the HRegion reporting operations coming in. * @param regionInfo The region info */ - public OperationMetrics(Configuration conf, HRegionInfo regionInfo) { + public OperationMetrics(Configuration conf, HRegionInfo regionInfo) { // Configure SchemaMetrics before trying to create a RegionOperationMetrics instance as // RegionOperationMetrics relies on SchemaMetrics to do naming. if (conf != null) { SchemaMetrics.configureGlobally(conf); - + this.conf = conf; if (regionInfo != null) { this.tableName = regionInfo.getTableNameAsString(); @@ -172,6 +171,13 @@ public class OperationMetrics { public void updateDeleteMetrics(Set columnFamilies, long value) { doUpdateTimeVarying(columnFamilies, DELETE_KEY, value); } + + /** + * This deletes all old metrics this instance has ever created or updated. + */ + public void closeMetrics() { + RegionMetricsStorage.clear(); + } /** * Method to send updates for cf and region metrics. This is the normal method @@ -199,7 +205,8 @@ public class OperationMetrics { private void doSafeIncTimeVarying(String prefix, String key, long value) { if (conf.getBoolean(CONF_KEY, true)) { if (prefix != null && !prefix.isEmpty() && key != null && !key.isEmpty()) { - RegionMetricsStorage.incrTimeVaryingMetric(prefix + key, value); + String m = prefix + key; + RegionMetricsStorage.incrTimeVaryingMetric(m, value); } } } diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionMetricsStorage.java b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionMetricsStorage.java index 416e495057d..5d4beffc2e2 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionMetricsStorage.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionMetricsStorage.java @@ -127,4 +127,12 @@ public class RegionMetricsStorage { return m.get(); } + /** + * Clear all copies of the metrics this stores. + */ + public static void clear() { + timeVaryingMetrics.clear(); + numericMetrics.clear(); + numericPersistentMetrics.clear(); + } } diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerDynamicMetrics.java b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerDynamicMetrics.java index eb733a0db13..baa226af9f7 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerDynamicMetrics.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerDynamicMetrics.java @@ -20,7 +20,9 @@ package org.apache.hadoop.hbase.regionserver.metrics; +import java.lang.reflect.Field; import java.lang.reflect.Method; +import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicInteger; @@ -52,12 +54,18 @@ import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate; */ @InterfaceAudience.Private public class RegionServerDynamicMetrics implements Updater { + private static final String UNABLE_TO_CLEAR = "Unable to clear RegionServerDynamicMetrics"; + private MetricsRecord metricsRecord; private MetricsContext context; private final RegionServerDynamicStatistics rsDynamicStatistics; private Method updateMbeanInfoIfMetricsListChanged = null; private static final Log LOG = LogFactory.getLog(RegionServerDynamicStatistics.class); + + private boolean reflectionInitialized = false; + private Field recordMetricMapField; + private Field registryMetricMapField; /** * The metrics variables are public: @@ -126,6 +134,60 @@ public class RegionServerDynamicMetrics implements Updater { m.inc(numOps, amt); } } + + /** + * Clear all metrics this exposes. + * Uses reflection to clear them from hadoop metrics side as well. + */ + @SuppressWarnings("rawtypes") + public void clear() { + + // If this is the first clear use reflection to get the two maps that hold copies of our + // metrics on the hadoop metrics side. We have to use reflection because there is not + // remove metrics on the hadoop side. If we can't get them then clearing old metrics + // is not possible and bailing out early is our best option. + if (!this.reflectionInitialized) { + this.reflectionInitialized = true; + try { + this.recordMetricMapField = this.metricsRecord.getClass().getDeclaredField("metricTable"); + this.recordMetricMapField.setAccessible(true); + } catch (SecurityException e) { + LOG.debug(UNABLE_TO_CLEAR); + return; + } catch (NoSuchFieldException e) { + LOG.debug(UNABLE_TO_CLEAR); + return; + } + + try { + this.registryMetricMapField = this.registry.getClass().getDeclaredField("metricsList"); + this.registryMetricMapField.setAccessible(true); + } catch (SecurityException e) { + LOG.debug(UNABLE_TO_CLEAR); + return; + } catch (NoSuchFieldException e) { + LOG.debug(UNABLE_TO_CLEAR); + return; + } + } + + + //If we found both fields then try and clear the maps. + if (this.recordMetricMapField != null && this.registryMetricMapField != null) { + try { + Map recordMap = (Map) this.recordMetricMapField.get(this.metricsRecord); + recordMap.clear(); + Map registryMap = (Map) this.registryMetricMapField.get(this.registry); + registryMap.clear(); + } catch (IllegalArgumentException e) { + LOG.debug(UNABLE_TO_CLEAR); + } catch (IllegalAccessException e) { + LOG.debug(UNABLE_TO_CLEAR); + } + } else { + LOG.debug(UNABLE_TO_CLEAR); + } + } /** * Push the metrics to the monitoring subsystem on doUpdate() call. diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java index 6defa737520..aa5ca3729b0 100644 --- a/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java +++ b/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java @@ -49,6 +49,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; + /** * Test metrics incremented on region server operations. */ @@ -196,6 +197,32 @@ public class TestRegionServerMetrics { } + @Test + public void testRemoveRegionMetrics() throws IOException, InterruptedException { + String cf = "REMOVECF"; + HTable hTable = TEST_UTIL.createTable(TABLE_NAME.getBytes(), cf.getBytes()); + HRegionInfo[] regionInfos = + hTable.getRegionLocations().keySet() + .toArray(new HRegionInfo[hTable.getRegionLocations().keySet().size()]); + + String regionName = regionInfos[0].getEncodedName(); + + // Do some operations so there are metrics. + Put pOne = new Put("TEST".getBytes()); + pOne.add(cf.getBytes(), "test".getBytes(), "test".getBytes()); + hTable.put(pOne); + + Get g = new Get("TEST".getBytes()); + g.addFamily(cf.getBytes()); + hTable.get(g); + assertTimeVaryingMetricCount(1, TABLE_NAME, cf, regionName, "get_"); + HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); + admin.disableTable(TABLE_NAME.getBytes()); + admin.deleteTable(TABLE_NAME.getBytes()); + + assertTimeVaryingMetricCount(0, TABLE_NAME, cf, regionName, "get_"); + } + @Test public void testMultipleRegions() throws IOException, InterruptedException {