HBASE-11516 Track time spent in executing coprocessors in each region (Srikanth Srungarapu and Andrew Purtell)

This commit is contained in:
Andrew Purtell 2014-08-01 09:37:56 -07:00
parent 33e1418105
commit 19e9b8aa52
11 changed files with 344 additions and 11 deletions

View File

@ -82,6 +82,10 @@
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math</artifactId>
</dependency>
</dependencies>
<profiles>

View File

@ -35,6 +35,8 @@ public interface MetricsRegionSource extends Comparable<MetricsRegionSource> {
"Sum of filesize on all files entering a finished, successful or aborted, compaction";
String NUM_FILES_COMPACTED_DESC =
"Number of files that were input for finished, successful or aborted, compactions";
String COPROCESSOR_EXECUTION_STATISTICS = "coprocessorExecutionStatistics";
String COPROCESSOR_EXECUTION_STATISTICS_DESC = "Statistics for coprocessor execution times";
/**
* Close the region's metrics as this region is closing.

View File

@ -18,6 +18,10 @@
package org.apache.hadoop.hbase.regionserver;
import java.util.Map;
import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
/**
* Interface of class that will wrap an HRegion and export numbers so they can be
* used in MetricsRegionSource
@ -79,4 +83,9 @@ public interface MetricsRegionWrapper {
long getNumBytesCompacted();
long getNumCompactionsCompleted();
/**
* Get the time spent by coprocessors in this region.
*/
Map<String, DescriptiveStatistics> getCoprocessorExecutionStatistics();
}

View File

@ -18,8 +18,11 @@
package org.apache.hadoop.hbase.regionserver;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.impl.JmxCacheBuster;
@ -194,6 +197,32 @@ public class MetricsRegionSourceImpl implements MetricsRegionSource {
mrb.addCounter(Interns.info(regionNamePrefix + MetricsRegionSource.NUM_FILES_COMPACTED_COUNT,
MetricsRegionSource.NUM_FILES_COMPACTED_DESC),
this.regionWrapper.getNumFilesCompacted());
for (Map.Entry<String, DescriptiveStatistics> entry : this.regionWrapper
.getCoprocessorExecutionStatistics()
.entrySet()) {
DescriptiveStatistics ds = entry.getValue();
mrb.addGauge(Interns.info(regionNamePrefix + " " + entry.getKey() + " "
+ MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS,
MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS_DESC + "Min: "), ds.getMin() / 1000);
mrb.addGauge(Interns.info(regionNamePrefix + " " + entry.getKey() + " "
+ MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS,
MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS_DESC + "Mean: "), ds.getMean() / 1000);
mrb.addGauge(Interns.info(regionNamePrefix + " " + entry.getKey() + " "
+ MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS,
MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS_DESC + "Max: "), ds.getMax() / 1000);
mrb.addGauge(Interns.info(regionNamePrefix + " " + entry.getKey() + " "
+ MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS,
MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS_DESC + "90th percentile: "), ds
.getPercentile(90d) / 1000);
mrb.addGauge(Interns.info(regionNamePrefix + " " + entry.getKey() + " "
+ MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS,
MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS_DESC + "95th percentile: "), ds
.getPercentile(95d) / 1000);
mrb.addGauge(Interns.info(regionNamePrefix + " " + entry.getKey() + " "
+ MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS,
MetricsRegionSource.COPROCESSOR_EXECUTION_STATISTICS_DESC + "99th percentile: "), ds
.getPercentile(99d) / 1000);
}
}
}

View File

@ -18,13 +18,16 @@
package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import java.util.Map;
import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.junit.Test;
public class TestMetricsRegionSourceImpl {
@Test
@ -120,5 +123,10 @@ public class TestMetricsRegionSourceImpl {
public long getNumCompactionsCompleted() {
return 0;
}
@Override
public Map<String, DescriptiveStatistics> getCoprocessorExecutionStatistics() {
return null;
}
}
}

View File

@ -22,15 +22,18 @@
</%args>
<%import>
java.util.*;
org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
org.apache.hadoop.hbase.regionserver.HRegionServer;
org.apache.hadoop.hbase.util.Bytes;
org.apache.hadoop.hbase.HRegionInfo;
org.apache.hadoop.hbase.regionserver.HRegion;
org.apache.hadoop.hbase.ServerName;
org.apache.hadoop.hbase.HBaseConfiguration;
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo;
org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
org.apache.hadoop.hbase.client.RegionReplicaUtil;
org.apache.hadoop.hbase.regionserver.MetricsRegionWrapper;
</%import>
<%if (onlineRegions != null && onlineRegions.size() > 0) %>
@ -45,6 +48,7 @@
<li class=""><a href="#tab_regionStoreStats" data-toggle="tab">Storefile Metrics</a></li>
<li class=""><a href="#tab_regionMemstoreStats" data-toggle="tab">Memstore Metrics</a></li>
<li class=""><a href="#tab_regionCompactStats" data-toggle="tab">Compaction Metrics</a></li>
<li class=""><a href="#tab_coprocessorStats" data-toggle="tab">Coprocessor Metrics</a></li>
</ul>
<div class="tab-content" style="padding-bottom: 9px; border-bottom: 1px solid #ddd;">
<div class="tab-pane active" id="tab_regionBaseInfo">
@ -62,6 +66,9 @@
<div class="tab-pane" id="tab_regionCompactStats">
<& compactStats; onlineRegions = onlineRegions; &>
</div>
<div class="tab-pane" id="tab_coprocessorStats">
<& coprocessorStats; onlineRegions = onlineRegions; &>
</div>
</div>
</div>
<p>Region names are made of the containing table's name, a comma,
@ -231,3 +238,65 @@
</%for>
</table>
</%def>
<%def coprocessorStats>
<%args>
List<HRegionInfo> onlineRegions;
</%args>
<table class="table table-striped">
<tr>
<th>Region Name</th>
<th>Coprocessor</th>
<th>Execution Time Statistics</th>
</tr>
<%for HRegionInfo r: onlineRegions %>
<%java>
HRegion region = regionServer.getFromOnlineRegions(r.getEncodedName());
MetricsRegionWrapper mWrap = region == null ? null: region.getMetrics().getRegionWrapper();
</%java>
<%if mWrap != null %>
<%for Map.Entry<String, DescriptiveStatistics> entry: mWrap.getCoprocessorExecutionStatistics().entrySet() %>
<tr>
<%java>
String coprocessorName = entry.getKey();
DescriptiveStatistics ds = entry.getValue();
</%java>
<td><% r.getRegionNameAsString() %></td>
<td><% coprocessorName %></td>
<td>
<table class="table-condensed">
<tr>
<td>Min Time </td>
<td><% String.format("%.3f%n", ds.getMin()/1000/1000) %>ms</td>
</tr>
<tr>
<td>Avg Time </td>
<td><% String.format("%.3f%n", ds.getMean()/1000/1000) %>ms</td>
</tr>
<tr>
<td>Max Time </td>
<td><% String.format("%.3f%n", ds.getMax()/1000/1000) %>ms</td>
</tr>
<tr>
<td>90th percentile </td>
<td><% String.format("%.3f%n", ds.getPercentile(90d)/1000/1000) %>ms</td>
</tr>
<tr>
<td>95th percentile </td>
<td><% String.format("%.3f%n", ds.getPercentile(95d)/1000/1000) %>ms</td>
</tr>
<tr>
<td>99th percentile </td>
<td><% String.format("%.3f%n", ds.getPercentile(99d)/1000/1000) %>ms</td>
</tr>
</table>
</td>
</tr>
</%for>
</%if>
</%for>
</table>
</%def>

View File

@ -960,7 +960,7 @@ public class HRegion implements HeapSize { // , Writable{
return this.writeRequestsCount.get();
}
MetricsRegion getMetrics() {
public MetricsRegion getMetrics() {
return metricsRegion;
}

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
@ -31,10 +30,12 @@ import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
public class MetricsRegion {
private final MetricsRegionSource source;
private MetricsRegionWrapper regionWrapper;
public MetricsRegion(final MetricsRegionWrapper wrapper) {
source = CompatibilitySingletonFactory.getInstance(MetricsRegionServerSourceFactory.class)
.createRegion(wrapper);
this.regionWrapper = wrapper;
}
public void close() {
@ -68,4 +69,9 @@ public class MetricsRegion {
MetricsRegionSource getSource() {
return source;
}
public MetricsRegionWrapper getRegionWrapper() {
return regionWrapper;
}
}

View File

@ -18,18 +18,20 @@
package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.metrics2.MetricsExecutor;
import java.io.Closeable;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.metrics2.MetricsExecutor;
public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable {
public static final int PERIOD = 45;
@ -41,6 +43,7 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable
private long numStoreFiles;
private long memstoreSize;
private long storeFileSize;
private Map<String, DescriptiveStatistics> coprocessorTimes;
private ScheduledFuture<?> regionMetricsUpdateTask;
@ -50,6 +53,7 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable
this.runnable = new HRegionMetricsWrapperRunnable();
this.regionMetricsUpdateTask = this.executor.scheduleWithFixedDelay(this.runnable, PERIOD,
PERIOD, TimeUnit.SECONDS);
this.coprocessorTimes = new HashMap<String, DescriptiveStatistics>();
}
@Override
@ -148,6 +152,8 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable
numStoreFiles = tempNumStoreFiles;
memstoreSize = tempMemstoreSize;
storeFileSize = tempStoreFileSize;
coprocessorTimes = region.getCoprocessorHost().getCoprocessorExecutionStatistics();
}
}
@ -156,4 +162,9 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable
regionMetricsUpdateTask.cancel(true);
}
@Override
public Map<String, DescriptiveStatistics> getCoprocessorExecutionStatistics() {
return coprocessorTimes;
}
}

View File

@ -18,6 +18,11 @@
package org.apache.hadoop.hbase.regionserver;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
public class MetricsRegionWrapperStub implements MetricsRegionWrapper {
@Override
@ -79,4 +84,9 @@ public class MetricsRegionWrapperStub implements MetricsRegionWrapper {
public long getNumCompactionsCompleted() {
return 0;
}
@Override
public Map<String, DescriptiveStatistics> getCoprocessorExecutionStatistics() {
return new HashMap<String, DescriptiveStatistics>();
}
}