HBASE-4145 Provide metrics for hbase client (Ming Ma)
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1177501 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
93a104a018
commit
2ce873e3d1
|
@ -10,6 +10,7 @@ Release 0.93.0 - Unreleased
|
||||||
interface definition (dhruba via jgray)
|
interface definition (dhruba via jgray)
|
||||||
HBASE-4477 Ability for an application to store metadata into the
|
HBASE-4477 Ability for an application to store metadata into the
|
||||||
transaction log (dhruba via jgray)
|
transaction log (dhruba via jgray)
|
||||||
|
HBASE-4145 Provide metrics for hbase client (Ming Ma)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
HBASE-4488 Store could miss rows during flush (Lars H via jgray)
|
HBASE-4488 Store could miss rows during flush (Lars H via jgray)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/**
|
/**
|
||||||
* Copyright 2010 The Apache Software Foundation
|
* Copyright 2011 The Apache Software Foundation
|
||||||
*
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
@ -56,6 +56,7 @@ import org.apache.hadoop.hbase.UnknownScannerException;
|
||||||
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
|
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
|
||||||
import org.apache.hadoop.hbase.client.HConnectionManager.HConnectable;
|
import org.apache.hadoop.hbase.client.HConnectionManager.HConnectable;
|
||||||
import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor;
|
import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor;
|
||||||
|
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||||
import org.apache.hadoop.hbase.client.coprocessor.Batch;
|
import org.apache.hadoop.hbase.client.coprocessor.Batch;
|
||||||
import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
|
import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
|
||||||
import org.apache.hadoop.hbase.ipc.ExecRPCInvoker;
|
import org.apache.hadoop.hbase.ipc.ExecRPCInvoker;
|
||||||
|
@ -64,6 +65,7 @@ import org.apache.hadoop.hbase.util.Addressing;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.Pair;
|
import org.apache.hadoop.hbase.util.Pair;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
|
import org.apache.hadoop.io.DataOutputBuffer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <p>Used to communicate with a single HBase table.
|
* <p>Used to communicate with a single HBase table.
|
||||||
|
@ -1035,6 +1037,7 @@ public class HTable implements HTableInterface, Closeable {
|
||||||
private long lastNext;
|
private long lastNext;
|
||||||
// Keep lastResult returned successfully in case we have to reset scanner.
|
// Keep lastResult returned successfully in case we have to reset scanner.
|
||||||
private Result lastResult = null;
|
private Result lastResult = null;
|
||||||
|
private ScanMetrics scanMetrics = null;
|
||||||
|
|
||||||
protected ClientScanner(final Scan scan) {
|
protected ClientScanner(final Scan scan) {
|
||||||
if (CLIENT_LOG.isDebugEnabled()) {
|
if (CLIENT_LOG.isDebugEnabled()) {
|
||||||
|
@ -1045,6 +1048,13 @@ public class HTable implements HTableInterface, Closeable {
|
||||||
this.scan = scan;
|
this.scan = scan;
|
||||||
this.lastNext = System.currentTimeMillis();
|
this.lastNext = System.currentTimeMillis();
|
||||||
|
|
||||||
|
// check if application wants to collect scan metrics
|
||||||
|
byte[] enableMetrics = scan.getAttribute(
|
||||||
|
Scan.SCAN_ATTRIBUTES_METRICS_ENABLE);
|
||||||
|
if (enableMetrics != null && Bytes.toBoolean(enableMetrics)) {
|
||||||
|
scanMetrics = new ScanMetrics();
|
||||||
|
}
|
||||||
|
|
||||||
// Use the caching from the Scan. If not set, use the default cache setting for this table.
|
// Use the caching from the Scan. If not set, use the default cache setting for this table.
|
||||||
if (this.scan.getCaching() > 0) {
|
if (this.scan.getCaching() > 0) {
|
||||||
this.caching = this.scan.getCaching();
|
this.caching = this.scan.getCaching();
|
||||||
|
@ -1140,6 +1150,9 @@ public class HTable implements HTableInterface, Closeable {
|
||||||
// beginning of the region
|
// beginning of the region
|
||||||
getConnection().getRegionServerWithRetries(callable);
|
getConnection().getRegionServerWithRetries(callable);
|
||||||
this.currentRegion = callable.getHRegionInfo();
|
this.currentRegion = callable.getHRegionInfo();
|
||||||
|
if (this.scanMetrics != null) {
|
||||||
|
this.scanMetrics.countOfRegions.inc();
|
||||||
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
close();
|
close();
|
||||||
throw e;
|
throw e;
|
||||||
|
@ -1151,15 +1164,39 @@ public class HTable implements HTableInterface, Closeable {
|
||||||
int nbRows) {
|
int nbRows) {
|
||||||
scan.setStartRow(localStartKey);
|
scan.setStartRow(localStartKey);
|
||||||
ScannerCallable s = new ScannerCallable(getConnection(),
|
ScannerCallable s = new ScannerCallable(getConnection(),
|
||||||
getTableName(), scan);
|
getTableName(), scan, this.scanMetrics);
|
||||||
s.setCaching(nbRows);
|
s.setCaching(nbRows);
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* publish the scan metrics
|
||||||
|
* For now, we use scan.setAttribute to pass the metrics for application
|
||||||
|
* or TableInputFormat to consume
|
||||||
|
* Later, we could push it to other systems
|
||||||
|
* We don't use metrics framework because it doesn't support
|
||||||
|
* multi instances of the same metrics on the same machine; for scan/map
|
||||||
|
* reduce scenarios, we will have multiple scans running at the same time
|
||||||
|
*/
|
||||||
|
private void writeScanMetrics() throws IOException
|
||||||
|
{
|
||||||
|
// by default, scanMetrics is null
|
||||||
|
// if application wants to collect scanMetrics, it can turn it on by
|
||||||
|
// calling scan.setAttribute(SCAN_ATTRIBUTES_METRICS_ENABLE,
|
||||||
|
// Bytes.toBytes(Boolean.TRUE))
|
||||||
|
if (this.scanMetrics == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
final DataOutputBuffer d = new DataOutputBuffer();
|
||||||
|
scanMetrics.write(d);
|
||||||
|
scan.setAttribute(Scan.SCAN_ATTRIBUTES_METRICS_DATA, d.getData());
|
||||||
|
}
|
||||||
|
|
||||||
public Result next() throws IOException {
|
public Result next() throws IOException {
|
||||||
// If the scanner is closed but there is some rows left in the cache,
|
// If the scanner is closed but there is some rows left in the cache,
|
||||||
// it will first empty it before returning null
|
// it will first empty it before returning null
|
||||||
if (cache.size() == 0 && this.closed) {
|
if (cache.size() == 0 && this.closed) {
|
||||||
|
writeScanMetrics();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (cache.size() == 0) {
|
if (cache.size() == 0) {
|
||||||
|
@ -1219,7 +1256,12 @@ public class HTable implements HTableInterface, Closeable {
|
||||||
this.currentRegion = null;
|
this.currentRegion = null;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
lastNext = System.currentTimeMillis();
|
long currentTime = System.currentTimeMillis();
|
||||||
|
if (this.scanMetrics != null ) {
|
||||||
|
this.scanMetrics.sumOfMillisSecBetweenNexts.inc(
|
||||||
|
currentTime-lastNext);
|
||||||
|
}
|
||||||
|
lastNext = currentTime;
|
||||||
if (values != null && values.length > 0) {
|
if (values != null && values.length > 0) {
|
||||||
for (Result rs : values) {
|
for (Result rs : values) {
|
||||||
cache.add(rs);
|
cache.add(rs);
|
||||||
|
@ -1237,6 +1279,7 @@ public class HTable implements HTableInterface, Closeable {
|
||||||
if (cache.size() > 0) {
|
if (cache.size() > 0) {
|
||||||
return cache.poll();
|
return cache.poll();
|
||||||
}
|
}
|
||||||
|
writeScanMetrics();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -189,7 +189,7 @@ public class MetaScanner {
|
||||||
" starting at row=" + Bytes.toStringBinary(startRow) + " for max=" +
|
" starting at row=" + Bytes.toStringBinary(startRow) + " for max=" +
|
||||||
rowUpperLimit + " rows");
|
rowUpperLimit + " rows");
|
||||||
}
|
}
|
||||||
callable = new ScannerCallable(connection, metaTableName, scan);
|
callable = new ScannerCallable(connection, metaTableName, scan, null);
|
||||||
// Open scanner
|
// Open scanner
|
||||||
connection.getRegionServerWithRetries(callable);
|
connection.getRegionServerWithRetries(callable);
|
||||||
|
|
||||||
|
|
|
@ -88,6 +88,13 @@ public class Scan extends OperationWithAttributes implements Writable {
|
||||||
private int maxVersions = 1;
|
private int maxVersions = 1;
|
||||||
private int batch = -1;
|
private int batch = -1;
|
||||||
|
|
||||||
|
// If application wants to collect scan metrics, it needs to
|
||||||
|
// call scan.setAttribute(SCAN_ATTRIBUTES_ENABLE, Bytes.toBytes(Boolean.TRUE))
|
||||||
|
static public String SCAN_ATTRIBUTES_METRICS_ENABLE =
|
||||||
|
"scan.attributes.metrics.enable";
|
||||||
|
static public String SCAN_ATTRIBUTES_METRICS_DATA =
|
||||||
|
"scan.attributes.metrics.data";
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* -1 means no caching
|
* -1 means no caching
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
/**
|
/**
|
||||||
* Copyright 2010 The Apache Software Foundation
|
|
||||||
*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -21,16 +19,18 @@
|
||||||
package org.apache.hadoop.hbase.client;
|
package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.net.UnknownHostException;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.NotServingRegionException;
|
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
|
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
|
import org.apache.hadoop.net.DNS;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retries scanner operations such as create, next, etc.
|
* Retries scanner operations such as create, next, etc.
|
||||||
|
@ -43,15 +43,23 @@ public class ScannerCallable extends ServerCallable<Result[]> {
|
||||||
private boolean closed = false;
|
private boolean closed = false;
|
||||||
private Scan scan;
|
private Scan scan;
|
||||||
private int caching = 1;
|
private int caching = 1;
|
||||||
|
private ScanMetrics scanMetrics;
|
||||||
|
|
||||||
|
// indicate if it is a remote server call
|
||||||
|
private boolean isRegionServerRemote = true;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param connection which connection
|
* @param connection which connection
|
||||||
* @param tableName table callable is on
|
* @param tableName table callable is on
|
||||||
* @param scan the scan to execute
|
* @param scan the scan to execute
|
||||||
|
* @param scanMetrics the ScanMetrics to used, if it is null, ScannerCallable
|
||||||
|
* won't collect metrics
|
||||||
*/
|
*/
|
||||||
public ScannerCallable (HConnection connection, byte [] tableName, Scan scan) {
|
public ScannerCallable (HConnection connection, byte [] tableName, Scan scan,
|
||||||
|
ScanMetrics scanMetrics) {
|
||||||
super(connection, tableName, scan.getStartRow());
|
super(connection, tableName, scan.getStartRow());
|
||||||
this.scan = scan;
|
this.scan = scan;
|
||||||
|
this.scanMetrics = scanMetrics;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -62,8 +70,33 @@ public class ScannerCallable extends ServerCallable<Result[]> {
|
||||||
public void instantiateServer(boolean reload) throws IOException {
|
public void instantiateServer(boolean reload) throws IOException {
|
||||||
if (!instantiated || reload) {
|
if (!instantiated || reload) {
|
||||||
super.instantiateServer(reload);
|
super.instantiateServer(reload);
|
||||||
|
checkIfRegionServerIsRemote();
|
||||||
instantiated = true;
|
instantiated = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// check how often we retry.
|
||||||
|
// HConnectionManager will call instantiateServer with reload==true
|
||||||
|
// if and only if for retries.
|
||||||
|
if (reload && this.scanMetrics != null) {
|
||||||
|
this.scanMetrics.countOfRPCRetries.inc();
|
||||||
|
if (isRegionServerRemote) {
|
||||||
|
this.scanMetrics.countOfRemoteRPCRetries.inc();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* compare the local machine hostname with region server's hostname
|
||||||
|
* to decide if hbase client connects to a remote region server
|
||||||
|
* @throws UnknownHostException.
|
||||||
|
*/
|
||||||
|
private void checkIfRegionServerIsRemote() throws UnknownHostException {
|
||||||
|
String myAddress = DNS.getDefaultHost("default", "default");
|
||||||
|
if (this.location.getHostname().equalsIgnoreCase(myAddress)) {
|
||||||
|
isRegionServerRemote = false;
|
||||||
|
} else {
|
||||||
|
isRegionServerRemote = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -77,7 +110,9 @@ public class ScannerCallable extends ServerCallable<Result[]> {
|
||||||
} else {
|
} else {
|
||||||
Result [] rrs = null;
|
Result [] rrs = null;
|
||||||
try {
|
try {
|
||||||
|
incRPCcallsMetrics();
|
||||||
rrs = server.next(scannerId, caching);
|
rrs = server.next(scannerId, caching);
|
||||||
|
updateResultsMetrics(rrs);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
IOException ioe = null;
|
IOException ioe = null;
|
||||||
if (e instanceof RemoteException) {
|
if (e instanceof RemoteException) {
|
||||||
|
@ -88,6 +123,9 @@ public class ScannerCallable extends ServerCallable<Result[]> {
|
||||||
// Throw a DNRE so that we break out of cycle of calling NSRE
|
// Throw a DNRE so that we break out of cycle of calling NSRE
|
||||||
// when what we need is to open scanner against new location.
|
// when what we need is to open scanner against new location.
|
||||||
// Attach NSRE to signal client that it needs to resetup scanner.
|
// Attach NSRE to signal client that it needs to resetup scanner.
|
||||||
|
if (this.scanMetrics != null) {
|
||||||
|
this.scanMetrics.countOfNSRE.inc();
|
||||||
|
}
|
||||||
throw new DoNotRetryIOException("Reset scanner", ioe);
|
throw new DoNotRetryIOException("Reset scanner", ioe);
|
||||||
} else if (ioe instanceof RegionServerStoppedException) {
|
} else if (ioe instanceof RegionServerStoppedException) {
|
||||||
// Throw a DNRE so that we break out of cycle of calling RSSE
|
// Throw a DNRE so that we break out of cycle of calling RSSE
|
||||||
|
@ -104,11 +142,35 @@ public class ScannerCallable extends ServerCallable<Result[]> {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void incRPCcallsMetrics() {
|
||||||
|
if (this.scanMetrics == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.scanMetrics.countOfRPCcalls.inc();
|
||||||
|
if (isRegionServerRemote) {
|
||||||
|
this.scanMetrics.countOfRemoteRPCcalls.inc();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateResultsMetrics(Result[] rrs) {
|
||||||
|
if (this.scanMetrics == null || rrs == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
for (Result rr : rrs) {
|
||||||
|
this.scanMetrics.countOfBytesInResults.inc(rr.getBytes().getLength());
|
||||||
|
if (isRegionServerRemote) {
|
||||||
|
this.scanMetrics.countOfBytesInRemoteResults.inc(
|
||||||
|
rr.getBytes().getLength());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void close() {
|
private void close() {
|
||||||
if (this.scannerId == -1L) {
|
if (this.scannerId == -1L) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
incRPCcallsMetrics();
|
||||||
this.server.close(this.scannerId);
|
this.server.close(this.scannerId);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.warn("Ignore, probably already closed", e);
|
LOG.warn("Ignore, probably already closed", e);
|
||||||
|
@ -117,6 +179,7 @@ public class ScannerCallable extends ServerCallable<Result[]> {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected long openScanner() throws IOException {
|
protected long openScanner() throws IOException {
|
||||||
|
incRPCcallsMetrics();
|
||||||
return this.server.openScanner(this.location.getRegionInfo().getRegionName(),
|
return this.server.openScanner(this.location.getRegionInfo().getRegionName(),
|
||||||
this.scan);
|
this.scan);
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,7 +110,6 @@ extends InputFormat<ImmutableBytesWritable, Result> {
|
||||||
sc.setStopRow(tSplit.getEndRow());
|
sc.setStopRow(tSplit.getEndRow());
|
||||||
trr.setScan(sc);
|
trr.setScan(sc);
|
||||||
trr.setHTable(table);
|
trr.setHTable(table);
|
||||||
trr.init();
|
|
||||||
return trr;
|
return trr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/**
|
/**
|
||||||
* Copyright 2010 The Apache Software Foundation
|
* Copyright 2011 The Apache Software Foundation
|
||||||
*
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
@ -48,14 +48,6 @@ extends RecordReader<ImmutableBytesWritable, Result> {
|
||||||
this.recordReaderImpl.restart(firstRow);
|
this.recordReaderImpl.restart(firstRow);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Build the scanner. Not done in constructor to allow for extension.
|
|
||||||
*
|
|
||||||
* @throws IOException When restarting the scan fails.
|
|
||||||
*/
|
|
||||||
public void init() throws IOException {
|
|
||||||
this.recordReaderImpl.init();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the HBase table.
|
* Sets the HBase table.
|
||||||
|
@ -127,6 +119,7 @@ extends RecordReader<ImmutableBytesWritable, Result> {
|
||||||
public void initialize(InputSplit inputsplit,
|
public void initialize(InputSplit inputsplit,
|
||||||
TaskAttemptContext context) throws IOException,
|
TaskAttemptContext context) throws IOException,
|
||||||
InterruptedException {
|
InterruptedException {
|
||||||
|
this.recordReaderImpl.initialize(inputsplit, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
/**
|
/**
|
||||||
* Copyright 2010 The Apache Software Foundation
|
|
||||||
*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -20,6 +18,7 @@
|
||||||
package org.apache.hadoop.hbase.mapreduce;
|
package org.apache.hadoop.hbase.mapreduce;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.lang.reflect.Method;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
@ -28,7 +27,14 @@ import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.Result;
|
import org.apache.hadoop.hbase.client.Result;
|
||||||
import org.apache.hadoop.hbase.client.ResultScanner;
|
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
|
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
|
import org.apache.hadoop.mapreduce.Counter;
|
||||||
|
import org.apache.hadoop.mapreduce.InputSplit;
|
||||||
|
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||||
|
import org.apache.hadoop.metrics.util.MetricsTimeVaryingLong;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -40,12 +46,18 @@ public class TableRecordReaderImpl {
|
||||||
|
|
||||||
static final Log LOG = LogFactory.getLog(TableRecordReader.class);
|
static final Log LOG = LogFactory.getLog(TableRecordReader.class);
|
||||||
|
|
||||||
|
// HBASE_COUNTER_GROUP_NAME is the name of mapreduce counter group for HBase
|
||||||
|
private static final String HBASE_COUNTER_GROUP_NAME =
|
||||||
|
"HBase Counters";
|
||||||
private ResultScanner scanner = null;
|
private ResultScanner scanner = null;
|
||||||
private Scan scan = null;
|
private Scan scan = null;
|
||||||
|
private Scan currentScan = null;
|
||||||
private HTable htable = null;
|
private HTable htable = null;
|
||||||
private byte[] lastSuccessfulRow = null;
|
private byte[] lastSuccessfulRow = null;
|
||||||
private ImmutableBytesWritable key = null;
|
private ImmutableBytesWritable key = null;
|
||||||
private Result value = null;
|
private Result value = null;
|
||||||
|
private TaskAttemptContext context = null;
|
||||||
|
private Method getCounter = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Restart from survivable exceptions by creating a new scanner.
|
* Restart from survivable exceptions by creating a new scanner.
|
||||||
|
@ -54,18 +66,31 @@ public class TableRecordReaderImpl {
|
||||||
* @throws IOException When restarting fails.
|
* @throws IOException When restarting fails.
|
||||||
*/
|
*/
|
||||||
public void restart(byte[] firstRow) throws IOException {
|
public void restart(byte[] firstRow) throws IOException {
|
||||||
Scan newScan = new Scan(scan);
|
currentScan = new Scan(scan);
|
||||||
newScan.setStartRow(firstRow);
|
currentScan.setStartRow(firstRow);
|
||||||
this.scanner = this.htable.getScanner(newScan);
|
currentScan.setAttribute(Scan.SCAN_ATTRIBUTES_METRICS_ENABLE,
|
||||||
|
Bytes.toBytes(Boolean.TRUE));
|
||||||
|
this.scanner = this.htable.getScanner(currentScan);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build the scanner. Not done in constructor to allow for extension.
|
* In new mapreduce APIs, TaskAttemptContext has two getCounter methods
|
||||||
*
|
* Check if getCounter(String, String) method is available.
|
||||||
* @throws IOException When restarting the scan fails.
|
* @return The getCounter method or null if not available.
|
||||||
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void init() throws IOException {
|
private Method retrieveGetCounterWithStringsParams(TaskAttemptContext context)
|
||||||
restart(scan.getStartRow());
|
throws IOException {
|
||||||
|
Method m = null;
|
||||||
|
try {
|
||||||
|
m = context.getClass().getMethod("getCounter",
|
||||||
|
new Class [] {String.class, String.class});
|
||||||
|
} catch (SecurityException e) {
|
||||||
|
throw new IOException("Failed test for getCounter", e);
|
||||||
|
} catch (NoSuchMethodException e) {
|
||||||
|
// Ignore
|
||||||
|
}
|
||||||
|
return m;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -86,6 +111,21 @@ public class TableRecordReaderImpl {
|
||||||
this.scan = scan;
|
this.scan = scan;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the scanner. Not done in constructor to allow for extension.
|
||||||
|
*
|
||||||
|
* @throws IOException, InterruptedException
|
||||||
|
*/
|
||||||
|
public void initialize(InputSplit inputsplit,
|
||||||
|
TaskAttemptContext context) throws IOException,
|
||||||
|
InterruptedException {
|
||||||
|
if (context != null) {
|
||||||
|
this.context = context;
|
||||||
|
getCounter = retrieveGetCounterWithStringsParams(context);
|
||||||
|
}
|
||||||
|
restart(scan.getStartRow());
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Closes the split.
|
* Closes the split.
|
||||||
*
|
*
|
||||||
|
@ -154,9 +194,48 @@ public class TableRecordReaderImpl {
|
||||||
lastSuccessfulRow = key.get();
|
lastSuccessfulRow = key.get();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
updateCounters();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If hbase runs on new version of mapreduce, RecordReader has access to
|
||||||
|
* counters thus can update counters based on scanMetrics.
|
||||||
|
* If hbase runs on old version of mapreduce, it won't be able to get
|
||||||
|
* access to counters and TableRecorderReader can't update counter values.
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
private void updateCounters() throws IOException {
|
||||||
|
// we can get access to counters only if hbase uses new mapreduce APIs
|
||||||
|
if (this.getCounter == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
byte[] serializedMetrics = currentScan.getAttribute(
|
||||||
|
Scan.SCAN_ATTRIBUTES_METRICS_DATA);
|
||||||
|
if (serializedMetrics == null || serializedMetrics.length == 0 ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataInputBuffer in = new DataInputBuffer();
|
||||||
|
in.reset(serializedMetrics, 0, serializedMetrics.length);
|
||||||
|
ScanMetrics scanMetrics = new ScanMetrics();
|
||||||
|
scanMetrics.readFields(in);
|
||||||
|
MetricsTimeVaryingLong[] mlvs =
|
||||||
|
scanMetrics.getMetricsTimeVaryingLongArray();
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (MetricsTimeVaryingLong mlv : mlvs) {
|
||||||
|
Counter ct = (Counter)this.getCounter.invoke(context,
|
||||||
|
HBASE_COUNTER_GROUP_NAME, mlv.getName());
|
||||||
|
ct.increment(mlv.getCurrentIntervalValue());
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOG.debug("can't update counter." + StringUtils.stringifyException(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The current progress of the record reader through its data.
|
* The current progress of the record reader through its data.
|
||||||
*
|
*
|
||||||
|
|
|
@ -68,6 +68,9 @@ import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
|
||||||
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
|
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
|
||||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
|
import org.apache.hadoop.metrics.util.MetricsTimeVaryingLong;
|
||||||
|
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -4193,5 +4196,48 @@ public class TestFromClientSide {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test ScanMetrics
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testScanMetrics() throws Exception {
|
||||||
|
byte [] TABLENAME = Bytes.toBytes("testScanMetrics");
|
||||||
|
|
||||||
|
Configuration conf = TEST_UTIL.getConfiguration();
|
||||||
|
TEST_UTIL.createTable(TABLENAME, FAMILY);
|
||||||
|
|
||||||
|
// Set up test table:
|
||||||
|
// Create table:
|
||||||
|
HTable ht = new HTable(conf, TABLENAME);
|
||||||
|
|
||||||
|
// Create multiple regions for this table
|
||||||
|
int numOfRegions = TEST_UTIL.createMultiRegions(ht, FAMILY);
|
||||||
|
|
||||||
|
Scan scan1 = new Scan();
|
||||||
|
for(Result result : ht.getScanner(scan1)) {
|
||||||
|
}
|
||||||
|
|
||||||
|
// by default, scan metrics collection is turned off
|
||||||
|
assertEquals(null, scan1.getAttribute(
|
||||||
|
Scan.SCAN_ATTRIBUTES_METRICS_DATA));
|
||||||
|
|
||||||
|
// turn on scan metrics
|
||||||
|
Scan scan = new Scan();
|
||||||
|
scan.setAttribute(Scan.SCAN_ATTRIBUTES_METRICS_ENABLE,
|
||||||
|
Bytes.toBytes(Boolean.TRUE));
|
||||||
|
for(Result result : ht.getScanner(scan)) {
|
||||||
|
}
|
||||||
|
|
||||||
|
byte[] serializedMetrics = scan.getAttribute(
|
||||||
|
Scan.SCAN_ATTRIBUTES_METRICS_DATA);
|
||||||
|
|
||||||
|
DataInputBuffer in = new DataInputBuffer();
|
||||||
|
in.reset(serializedMetrics, 0, serializedMetrics.length);
|
||||||
|
ScanMetrics scanMetrics = new ScanMetrics();
|
||||||
|
scanMetrics.readFields(in);
|
||||||
|
assertEquals(numOfRegions, scanMetrics.countOfRegions.getCurrentIntervalValue());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -169,7 +169,7 @@ public class TestTableInputFormat {
|
||||||
trr.setScan(s);
|
trr.setScan(s);
|
||||||
trr.setHTable(table);
|
trr.setHTable(table);
|
||||||
|
|
||||||
trr.init();
|
trr.initialize(null, null);
|
||||||
Result r = new Result();
|
Result r = new Result();
|
||||||
ImmutableBytesWritable key = new ImmutableBytesWritable();
|
ImmutableBytesWritable key = new ImmutableBytesWritable();
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue