From f482cff0c98803d42ecac2320e07555bb87b983f Mon Sep 17 00:00:00 2001 From: gchanan Date: Wed, 19 Sep 2012 20:40:22 +0000 Subject: [PATCH] HBASE-6591 checkAndPut executed/not metrics git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1387746 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop/hbase/regionserver/HRegion.java | 12 ++++- .../hbase/regionserver/HRegionServer.java | 6 +++ .../metrics/RegionServerMetrics.java | 11 ++++ .../regionserver/TestRegionServerMetrics.java | 52 ++++++++++++++++++- 4 files changed, 78 insertions(+), 3 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 4edff2af7a7..8182afdc3f0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -239,6 +239,8 @@ public class HRegion implements HeapSize { // , Writable{ final AtomicLong numPutsWithoutWAL = new AtomicLong(0); final AtomicLong dataInMemoryWithoutWAL = new AtomicLong(0); + final Counter checkAndMutateChecksPassed = new Counter(); + final Counter checkAndMutateChecksFailed = new Counter(); final Counter readRequestsCount = new Counter(); final Counter writeRequestsCount = new Counter(); @@ -2416,7 +2418,7 @@ public class HRegion implements HeapSize { // , Writable{ * @param lockId * @param writeToWAL * @throws IOException - * @return true if the new put was execute, false otherwise + * @return true if the new put was executed, false otherwise */ public boolean checkAndMutate(byte [] row, byte [] family, byte [] qualifier, CompareOp compareOp, ByteArrayComparable comparator, Writable w, @@ -2498,8 +2500,10 @@ public class HRegion implements HeapSize { // , Writable{ prepareDelete(d); internalDelete(d, HConstants.DEFAULT_CLUSTER_ID, writeToWAL); } + this.checkAndMutateChecksPassed.increment(); return true; } + this.checkAndMutateChecksFailed.increment(); return false; } finally { if(lockId == null) releaseRowLock(lid); @@ -4229,6 +4233,10 @@ public class HRegion implements HeapSize { // , Writable{ newRegionInfo, a.getTableDesc(), null); dstRegion.readRequestsCount.set(a.readRequestsCount.get() + b.readRequestsCount.get()); dstRegion.writeRequestsCount.set(a.writeRequestsCount.get() + b.writeRequestsCount.get()); + dstRegion.checkAndMutateChecksFailed.set( + a.checkAndMutateChecksFailed.get() + b.checkAndMutateChecksFailed.get()); + dstRegion.checkAndMutateChecksPassed.set( + a.checkAndMutateChecksPassed.get() + b.checkAndMutateChecksPassed.get()); dstRegion.initialize(); dstRegion.compactStores(); if (LOG.isDebugEnabled()) { @@ -5034,7 +5042,7 @@ public class HRegion implements HeapSize { // , Writable{ public static final long FIXED_OVERHEAD = ClassSize.align( ClassSize.OBJECT + ClassSize.ARRAY + - 37 * ClassSize.REFERENCE + Bytes.SIZEOF_INT + + 39 * ClassSize.REFERENCE + Bytes.SIZEOF_INT + (7 * Bytes.SIZEOF_LONG) + Bytes.SIZEOF_BOOLEAN); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index c14665597c3..cfe5960ed25 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -1453,6 +1453,8 @@ public class HRegionServer implements ClientProtocol, long memstoreSize = 0; int readRequestsCount = 0; int writeRequestsCount = 0; + long checkAndMutateChecksFailed = 0; + long checkAndMutateChecksPassed = 0; long storefileIndexSize = 0; HDFSBlocksDistribution hdfsBlocksDistribution = new HDFSBlocksDistribution(); @@ -1476,6 +1478,8 @@ public class HRegionServer implements ClientProtocol, dataInMemoryWithoutWAL += r.dataInMemoryWithoutWAL.get(); readRequestsCount += r.readRequestsCount.get(); writeRequestsCount += r.writeRequestsCount.get(); + checkAndMutateChecksFailed += r.checkAndMutateChecksFailed.get(); + checkAndMutateChecksPassed += r.checkAndMutateChecksPassed.get(); synchronized (r.stores) { stores += r.stores.size(); for (Map.Entry ee : r.stores.entrySet()) { @@ -1549,6 +1553,8 @@ public class HRegionServer implements ClientProtocol, (int) (totalStaticBloomSize / 1024)); this.metrics.readRequestsCount.set(readRequestsCount); this.metrics.writeRequestsCount.set(writeRequestsCount); + this.metrics.checkAndMutateChecksFailed.set(checkAndMutateChecksFailed); + this.metrics.checkAndMutateChecksPassed.set(checkAndMutateChecksPassed); this.metrics.compactionQueueSize.set(compactSplitThread .getCompactionQueueSize()); this.metrics.flushQueueSize.set(cacheFlusher diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java index 3726616e96d..03ed7c4de2b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java @@ -160,6 +160,17 @@ public class RegionServerMetrics implements Updater { public final MetricsLongValue writeRequestsCount = new MetricsLongValue("writeRequestsCount", registry); + /** + * Count of checkAndMutates the failed the check + */ + public final MetricsLongValue checkAndMutateChecksFailed = + new MetricsLongValue("checkAndMutateChecksFailed", registry); + + /** + * Count of checkAndMutates that passed the check + */ + public final MetricsLongValue checkAndMutateChecksPassed = + new MetricsLongValue("checkAndMutateChecksPassed", registry); /** */ public final MetricsIntValue storefileIndexSizeMB = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java index a383a4842fb..060f8394bfd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.regionserver.metrics.RegionMetricsStorage; +import org.apache.hadoop.hbase.regionserver.metrics.RegionServerMetrics; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics. StoreMetricType; @@ -124,7 +125,7 @@ public class TestRegionServerMetrics { RegionMetricsStorage.getNumericMetric(storeMetricName) - (startValue != null ? startValue : 0)); } - + @Test public void testOperationMetrics() throws IOException { String cf = "OPCF"; @@ -198,6 +199,55 @@ public class TestRegionServerMetrics { } + private void assertCheckAndMutateMetrics(final HRegionServer rs, + long expectedPassed, long expectedFailed) { + rs.doMetrics(); + RegionServerMetrics metrics = rs.getMetrics(); + assertEquals("checkAndMutatePassed metrics incorrect", + expectedPassed, metrics.checkAndMutateChecksPassed.get()); + assertEquals("checkAndMutateFailed metrics incorrect", + expectedFailed, metrics.checkAndMutateChecksFailed.get()); + } + + @Test + public void testCheckAndMutateMetrics() throws Exception { + final HRegionServer rs = + TEST_UTIL.getMiniHBaseCluster().getRegionServer(0); + byte [] tableName = Bytes.toBytes("testCheckAndMutateMetrics"); + byte [] family = Bytes.toBytes("family"); + byte [] qualifier = Bytes.toBytes("qualifier"); + byte [] row = Bytes.toBytes("row1"); + HTable table = TEST_UTIL.createTable(tableName, family); + long expectedPassed = 0; + long expectedFailed = 0; + + // checkAndPut success + Put put = new Put(row); + byte [] val1 = Bytes.toBytes("val1"); + put.add(family, qualifier, val1); + table.checkAndPut(row, family, qualifier, null, put); + expectedPassed++; + assertCheckAndMutateMetrics(rs, expectedPassed, expectedFailed); + + // checkAndPut failure + byte [] val2 = Bytes.toBytes("val2"); + table.checkAndPut(row, family, qualifier, val2, put); + expectedFailed++; + assertCheckAndMutateMetrics(rs, expectedPassed, expectedFailed); + + // checkAndDelete success + Delete delete = new Delete(row); + delete.deleteColumn(family, qualifier); + table.checkAndDelete(row, family, qualifier, val1, delete); + expectedPassed++; + assertCheckAndMutateMetrics(rs, expectedPassed, expectedFailed); + + // checkAndDelete failure + table.checkAndDelete(row, family, qualifier, val1, delete); + expectedFailed++; + assertCheckAndMutateMetrics(rs, expectedPassed, expectedFailed); + } + @Test public void testRemoveRegionMetrics() throws IOException, InterruptedException { String cf = "REMOVECF";