HBASE-17578 Thrift metrics should handle exceptions

This commit is contained in:
Gary Helmling 2016-12-28 13:13:24 -08:00
parent 19e1450b0f
commit 7a6518f05d
13 changed files with 610 additions and 163 deletions

View File

@ -19,9 +19,9 @@
package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource;
public interface MetricsHBaseServerSource extends BaseSource {
public interface MetricsHBaseServerSource extends ExceptionTrackingSource {
String AUTHORIZATION_SUCCESSES_NAME = "authorizationSuccesses";
String AUTHORIZATION_SUCCESSES_DESC =
"Number of authorization successes.";
@ -88,22 +88,6 @@ public interface MetricsHBaseServerSource extends BaseSource {
String NUM_LIFO_MODE_SWITCHES_DESC = "Total number of calls in general queue which " +
"were served from the tail of the queue";
String EXCEPTIONS_NAME="exceptions";
String EXCEPTIONS_DESC="Exceptions caused by requests";
String EXCEPTIONS_TYPE_DESC="Number of requests that resulted in the specified type of Exception";
String EXCEPTIONS_OOO_NAME="exceptions.OutOfOrderScannerNextException";
String EXCEPTIONS_BUSY_NAME="exceptions.RegionTooBusyException";
String EXCEPTIONS_UNKNOWN_NAME="exceptions.UnknownScannerException";
String EXCEPTIONS_SCANNER_RESET_NAME="exceptions.ScannerResetException";
String EXCEPTIONS_SANITY_NAME="exceptions.FailedSanityCheckException";
String EXCEPTIONS_MOVED_NAME="exceptions.RegionMovedException";
String EXCEPTIONS_NSRE_NAME="exceptions.NotServingRegionException";
String EXCEPTIONS_MULTI_TOO_LARGE_NAME = "exceptions.multiResponseTooLarge";
String EXCEPTIONS_MULTI_TOO_LARGE_DESC = "A response to a multi request was too large and the " +
"rest of the requests will have to be retried.";
String EXCEPTIONS_CALL_QUEUE_TOO_BIG = "exceptions.callQueueTooBig";
String EXCEPTIONS_CALL_QUEUE_TOO_BIG_DESC = "Call queue is full";
void authorizationSuccess();
void authorizationFailure();
@ -114,21 +98,6 @@ public interface MetricsHBaseServerSource extends BaseSource {
void authenticationFallback();
void exception();
/**
* Different types of exceptions
*/
void outOfOrderException();
void failedSanityException();
void movedRegionException();
void notServingRegionException();
void unknownScannerException();
void scannerResetException();
void tooBusyException();
void multiActionTooLargeException();
void callQueueTooBigException();
void sentBytes(long count);
void receivedBytes(int count);
@ -142,6 +111,4 @@ public interface MetricsHBaseServerSource extends BaseSource {
void processedCall(int processingTime);
void queuedAndProcessedCall(int totalTime);
}

View File

@ -0,0 +1,56 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
/**
* Common interface for metrics source implementations which need to track individual exception
* types thrown or received.
*/
public interface ExceptionTrackingSource extends BaseSource {
String EXCEPTIONS_NAME="exceptions";
String EXCEPTIONS_DESC="Exceptions caused by requests";
String EXCEPTIONS_TYPE_DESC="Number of requests that resulted in the specified type of Exception";
String EXCEPTIONS_OOO_NAME="exceptions.OutOfOrderScannerNextException";
String EXCEPTIONS_BUSY_NAME="exceptions.RegionTooBusyException";
String EXCEPTIONS_UNKNOWN_NAME="exceptions.UnknownScannerException";
String EXCEPTIONS_SCANNER_RESET_NAME="exceptions.ScannerResetException";
String EXCEPTIONS_SANITY_NAME="exceptions.FailedSanityCheckException";
String EXCEPTIONS_MOVED_NAME="exceptions.RegionMovedException";
String EXCEPTIONS_NSRE_NAME="exceptions.NotServingRegionException";
String EXCEPTIONS_MULTI_TOO_LARGE_NAME = "exceptions.multiResponseTooLarge";
String EXCEPTIONS_MULTI_TOO_LARGE_DESC = "A response to a multi request was too large and the " +
"rest of the requests will have to be retried.";
String EXCEPTIONS_CALL_QUEUE_TOO_BIG = "exceptions.callQueueTooBig";
String EXCEPTIONS_CALL_QUEUE_TOO_BIG_DESC = "Call queue is full";
void exception();
/**
* Different types of exceptions
*/
void outOfOrderException();
void failedSanityException();
void movedRegionException();
void notServingRegionException();
void unknownScannerException();
void scannerResetException();
void tooBusyException();
void multiActionTooLargeException();
void callQueueTooBigException();
}

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.hbase.thrift;
import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource;
import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource;
/**
* Interface of a class that will export metrics about Thrift to hadoop's metrics2.
*/
public interface MetricsThriftServerSource extends BaseSource, JvmPauseMonitorSource {
public interface MetricsThriftServerSource extends ExceptionTrackingSource, JvmPauseMonitorSource {
String BATCH_GET_KEY = "batchGet";
String BATCH_MUTATE_KEY = "batchMutate";
@ -75,5 +75,4 @@ public interface MetricsThriftServerSource extends BaseSource, JvmPauseMonitorSo
* @param time Time
*/
void incSlowCall(long time);
}

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.ExceptionTrackingSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsCollector;
@ -28,7 +29,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.MutableFastCounter;
@InterfaceAudience.Private
public class MetricsHBaseServerSourceImpl extends BaseSourceImpl
public class MetricsHBaseServerSourceImpl extends ExceptionTrackingSourceImpl
implements MetricsHBaseServerSource {
@ -41,17 +42,6 @@ public class MetricsHBaseServerSourceImpl extends BaseSourceImpl
private final MutableFastCounter sentBytes;
private final MutableFastCounter receivedBytes;
private final MutableFastCounter exceptions;
private final MutableFastCounter exceptionsOOO;
private final MutableFastCounter exceptionsBusy;
private final MutableFastCounter exceptionsUnknown;
private final MutableFastCounter exceptionsScannerReset;
private final MutableFastCounter exceptionsSanity;
private final MutableFastCounter exceptionsNSRE;
private final MutableFastCounter exceptionsMoved;
private final MutableFastCounter exceptionsMultiTooLarge;
private final MutableFastCounter exceptionsCallQueueTooBig;
private MetricHistogram queueCallTime;
private MetricHistogram processCallTime;
@ -71,27 +61,6 @@ public class MetricsHBaseServerSourceImpl extends BaseSourceImpl
AUTHORIZATION_SUCCESSES_DESC, 0L);
this.authorizationFailures = this.getMetricsRegistry().newCounter(AUTHORIZATION_FAILURES_NAME,
AUTHORIZATION_FAILURES_DESC, 0L);
this.exceptions = this.getMetricsRegistry().newCounter(EXCEPTIONS_NAME, EXCEPTIONS_DESC, 0L);
this.exceptionsOOO = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_OOO_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsBusy = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_BUSY_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsUnknown = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_UNKNOWN_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsScannerReset = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_SCANNER_RESET_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsSanity = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_SANITY_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsMoved = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_MOVED_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsNSRE = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_NSRE_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsMultiTooLarge = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_MULTI_TOO_LARGE_NAME, EXCEPTIONS_MULTI_TOO_LARGE_DESC, 0L);
this.exceptionsCallQueueTooBig = this.getMetricsRegistry().newCounter(
EXCEPTIONS_CALL_QUEUE_TOO_BIG, EXCEPTIONS_CALL_QUEUE_TOO_BIG_DESC, 0L);
this.authenticationSuccesses = this.getMetricsRegistry().newCounter(
AUTHENTICATION_SUCCESSES_NAME, AUTHENTICATION_SUCCESSES_DESC, 0L);
this.authenticationFailures = this.getMetricsRegistry().newCounter(AUTHENTICATION_FAILURES_NAME,
@ -134,56 +103,6 @@ public class MetricsHBaseServerSourceImpl extends BaseSourceImpl
authenticationFallbacks.incr();
}
@Override
public void exception() {
exceptions.incr();
}
@Override
public void outOfOrderException() {
exceptionsOOO.incr();
}
@Override
public void failedSanityException() {
exceptionsSanity.incr();
}
@Override
public void movedRegionException() {
exceptionsMoved.incr();
}
@Override
public void notServingRegionException() {
exceptionsNSRE.incr();
}
@Override
public void unknownScannerException() {
exceptionsUnknown.incr();
}
@Override
public void scannerResetException() {
exceptionsScannerReset.incr();
}
@Override
public void tooBusyException() {
exceptionsBusy.incr();
}
@Override
public void multiActionTooLargeException() {
exceptionsMultiTooLarge.incr();
}
@Override
public void callQueueTooBigException() {
exceptionsCallQueueTooBig.incr();
}
@Override
public void authenticationSuccess() {
authenticationSuccesses.incr();

View File

@ -0,0 +1,118 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;
import org.apache.hadoop.metrics2.lib.MutableFastCounter;
/**
* Common base implementation for metrics sources which need to track exceptions thrown or
* received.
*/
public class ExceptionTrackingSourceImpl extends BaseSourceImpl
implements ExceptionTrackingSource {
protected MutableFastCounter exceptions;
protected MutableFastCounter exceptionsOOO;
protected MutableFastCounter exceptionsBusy;
protected MutableFastCounter exceptionsUnknown;
protected MutableFastCounter exceptionsScannerReset;
protected MutableFastCounter exceptionsSanity;
protected MutableFastCounter exceptionsNSRE;
protected MutableFastCounter exceptionsMoved;
protected MutableFastCounter exceptionsMultiTooLarge;
protected MutableFastCounter exceptionsCallQueueTooBig;
public ExceptionTrackingSourceImpl(String metricsName, String metricsDescription,
String metricsContext, String metricsJmxContext) {
super(metricsName, metricsDescription, metricsContext, metricsJmxContext);
}
@Override
public void init() {
super.init();
this.exceptions = this.getMetricsRegistry().newCounter(EXCEPTIONS_NAME, EXCEPTIONS_DESC, 0L);
this.exceptionsOOO = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_OOO_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsBusy = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_BUSY_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsUnknown = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_UNKNOWN_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsScannerReset = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_SCANNER_RESET_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsSanity = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_SANITY_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsMoved = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_MOVED_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsNSRE = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_NSRE_NAME, EXCEPTIONS_TYPE_DESC, 0L);
this.exceptionsMultiTooLarge = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_MULTI_TOO_LARGE_NAME, EXCEPTIONS_MULTI_TOO_LARGE_DESC, 0L);
this.exceptionsCallQueueTooBig = this.getMetricsRegistry()
.newCounter(EXCEPTIONS_CALL_QUEUE_TOO_BIG, EXCEPTIONS_CALL_QUEUE_TOO_BIG_DESC, 0L);
}
@Override
public void exception() {
exceptions.incr();
}
@Override
public void outOfOrderException() {
exceptionsOOO.incr();
}
@Override
public void failedSanityException() {
exceptionsSanity.incr();
}
@Override
public void movedRegionException() {
exceptionsMoved.incr();
}
@Override
public void notServingRegionException() {
exceptionsNSRE.incr();
}
@Override
public void unknownScannerException() {
exceptionsUnknown.incr();
}
@Override
public void scannerResetException() {
exceptionsScannerReset.incr();
}
@Override
public void tooBusyException() {
exceptionsBusy.incr();
}
@Override
public void multiActionTooLargeException() {
exceptionsMultiTooLarge.incr();
}
@Override
public void callQueueTooBigException() {
exceptionsCallQueueTooBig.incr();
}
}

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.thrift;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.ExceptionTrackingSourceImpl;
import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.lib.MutableFastCounter;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
@ -31,7 +32,7 @@ import org.apache.hadoop.metrics2.lib.MutableHistogram;
* Implements BaseSource through BaseSourceImpl, following the pattern
*/
@InterfaceAudience.Private
public class MetricsThriftServerSourceImpl extends BaseSourceImpl implements
public class MetricsThriftServerSourceImpl extends ExceptionTrackingSourceImpl implements
MetricsThriftServerSource {
private MetricHistogram batchGetStat;
@ -73,7 +74,6 @@ public class MetricsThriftServerSourceImpl extends BaseSourceImpl implements
thriftCallStat = getMetricsRegistry().newTimeHistogram(THRIFT_CALL_KEY);
thriftSlowCallStat = getMetricsRegistry().newTimeHistogram(SLOW_THRIFT_CALL_KEY);
callQueueLenGauge = getMetricsRegistry().getGauge(CALL_QUEUE_LEN_KEY, 0);
}
@Override

View File

@ -62,16 +62,19 @@ public class HbaseHandlerMetricsProxy implements InvocationHandler {
public Object invoke(Object proxy, Method m, Object[] args)
throws Throwable {
Object result;
long start = now();
try {
long start = now();
result = m.invoke(handler, args);
long processTime = now() - start;
metrics.incMethodTime(m.getName(), processTime);
} catch (InvocationTargetException e) {
metrics.exception(e.getCause());
throw e.getTargetException();
} catch (Exception e) {
metrics.exception(e);
throw new RuntimeException(
"unexpected invocation exception: " + e.getMessage());
} finally {
long processTime = now() - start;
metrics.incMethodTime(m.getName(), processTime);
}
return result;
}

View File

@ -19,9 +19,21 @@
package org.apache.hadoop.hbase.thrift;
import org.apache.hadoop.hbase.CallQueueTooBigException;
import org.apache.hadoop.hbase.MultiActionResultTooLarge;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
import org.apache.hadoop.hbase.exceptions.RegionMovedException;
import org.apache.hadoop.hbase.exceptions.ScannerResetException;
import org.apache.hadoop.hbase.thrift.generated.IOError;
import org.apache.hadoop.hbase.thrift2.generated.TIOError;
/**
* This class is for maintaining the various statistics of thrift server
@ -87,4 +99,53 @@ public class ThriftMetrics {
}
}
/**
* Increment the count for a specific exception type. This is called for each exception type
* that is returned to the thrift handler.
* @param rawThrowable type of exception
*/
public void exception(Throwable rawThrowable) {
source.exception();
Throwable throwable = unwrap(rawThrowable);
/**
* Keep some metrics for commonly seen exceptions
*
* Try and put the most common types first.
* Place child types before the parent type that they extend.
*
* If this gets much larger we might have to go to a hashmap
*/
if (throwable != null) {
if (throwable instanceof OutOfOrderScannerNextException) {
source.outOfOrderException();
} else if (throwable instanceof RegionTooBusyException) {
source.tooBusyException();
} else if (throwable instanceof UnknownScannerException) {
source.unknownScannerException();
} else if (throwable instanceof ScannerResetException) {
source.scannerResetException();
} else if (throwable instanceof RegionMovedException) {
source.movedRegionException();
} else if (throwable instanceof NotServingRegionException) {
source.notServingRegionException();
} else if (throwable instanceof FailedSanityCheckException) {
source.failedSanityException();
} else if (throwable instanceof MultiActionResultTooLarge) {
source.multiActionTooLargeException();
} else if (throwable instanceof CallQueueTooBigException) {
source.callQueueTooBigException();
}
}
}
private static Throwable unwrap(Throwable t) {
if (t == null) {
return t;
}
if (t instanceof TIOError || t instanceof IOError) {
t = t.getCause();
}
return ClientExceptionsUtil.findException(t);
}
}

View File

@ -807,7 +807,7 @@ public class ThriftServerRunner implements Runnable {
getAdmin().enableTable(getTableName(tableName));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -817,7 +817,7 @@ public class ThriftServerRunner implements Runnable {
getAdmin().disableTable(getTableName(tableName));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -827,7 +827,7 @@ public class ThriftServerRunner implements Runnable {
return this.connectionCache.getAdmin().isTableEnabled(getTableName(tableName));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -844,7 +844,7 @@ public class ThriftServerRunner implements Runnable {
}
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -861,7 +861,7 @@ public class ThriftServerRunner implements Runnable {
}
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -876,7 +876,7 @@ public class ThriftServerRunner implements Runnable {
return list;
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -909,7 +909,7 @@ public class ThriftServerRunner implements Runnable {
return Collections.emptyList();
} catch (IOException e){
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -954,7 +954,7 @@ public class ThriftServerRunner implements Runnable {
return ThriftUtilities.cellFromHBase(result.rawCells());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally {
closeTable(table);
}
@ -1000,7 +1000,7 @@ public class ThriftServerRunner implements Runnable {
return ThriftUtilities.cellFromHBase(result.rawCells());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1047,7 +1047,7 @@ public class ThriftServerRunner implements Runnable {
return ThriftUtilities.cellFromHBase(result.rawCells());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1108,7 +1108,7 @@ public class ThriftServerRunner implements Runnable {
return ThriftUtilities.rowResultFromHBase(result);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1177,7 +1177,7 @@ public class ThriftServerRunner implements Runnable {
return ThriftUtilities.rowResultFromHBase(result);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1212,7 +1212,7 @@ public class ThriftServerRunner implements Runnable {
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally {
closeTable(table);
}
@ -1237,7 +1237,7 @@ public class ThriftServerRunner implements Runnable {
table.delete(delete);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally {
closeTable(table);
}
@ -1260,7 +1260,7 @@ public class ThriftServerRunner implements Runnable {
getAdmin().createTable(desc);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} catch (IllegalArgumentException e) {
LOG.warn(e.getMessage(), e);
throw new IllegalArgument(Throwables.getStackTraceAsString(e));
@ -1284,7 +1284,7 @@ public class ThriftServerRunner implements Runnable {
getAdmin().deleteTable(tableName);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -1342,7 +1342,7 @@ public class ThriftServerRunner implements Runnable {
table.put(put);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} catch (IllegalArgumentException e) {
LOG.warn(e.getMessage(), e);
throw new IllegalArgument(Throwables.getStackTraceAsString(e));
@ -1415,7 +1415,7 @@ public class ThriftServerRunner implements Runnable {
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} catch (IllegalArgumentException e) {
LOG.warn(e.getMessage(), e);
throw new IllegalArgument(Throwables.getStackTraceAsString(e));
@ -1445,7 +1445,7 @@ public class ThriftServerRunner implements Runnable {
getBytes(row), family, qualifier, amount);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally {
closeTable(table);
}
@ -1483,7 +1483,7 @@ public class ThriftServerRunner implements Runnable {
}
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
return ThriftUtilities.rowResultFromHBase(results, resultScannerWrapper.isColumnSorted());
}
@ -1542,7 +1542,7 @@ public class ThriftServerRunner implements Runnable {
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1571,7 +1571,7 @@ public class ThriftServerRunner implements Runnable {
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1601,7 +1601,7 @@ public class ThriftServerRunner implements Runnable {
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1635,7 +1635,7 @@ public class ThriftServerRunner implements Runnable {
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1665,7 +1665,7 @@ public class ThriftServerRunner implements Runnable {
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1697,7 +1697,7 @@ public class ThriftServerRunner implements Runnable {
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1722,7 +1722,7 @@ public class ThriftServerRunner implements Runnable {
return columns;
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally {
closeTable(table);
}
@ -1736,7 +1736,7 @@ public class ThriftServerRunner implements Runnable {
}
} catch (IOException e){
LOG.error(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -1775,7 +1775,7 @@ public class ThriftServerRunner implements Runnable {
return region;
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
}
}
@ -1818,7 +1818,7 @@ public class ThriftServerRunner implements Runnable {
table.increment(inc);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1849,7 +1849,7 @@ public class ThriftServerRunner implements Runnable {
return ThriftUtilities.cellFromHBase(result.rawCells());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} finally{
closeTable(table);
}
@ -1883,7 +1883,7 @@ public class ThriftServerRunner implements Runnable {
value != null ? getBytes(value) : HConstants.EMPTY_BYTE_ARRAY, put);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(Throwables.getStackTraceAsString(e));
throw getIOError(e);
} catch (IllegalArgumentException e) {
LOG.warn(e.getMessage(), e);
throw new IllegalArgument(Throwables.getStackTraceAsString(e));
@ -1894,6 +1894,11 @@ public class ThriftServerRunner implements Runnable {
}
private static IOError getIOError(Throwable throwable) {
IOError error = new IOErrorWithCause(throwable);
error.setMessage(Throwables.getStackTraceAsString(throwable));
return error;
}
/**
* Adds all the attributes into the Operation object
@ -1923,4 +1928,37 @@ public class ThriftServerRunner implements Runnable {
}
}
}
public static class IOErrorWithCause extends IOError {
private Throwable cause;
public IOErrorWithCause(Throwable cause) {
this.cause = cause;
}
@Override
public Throwable getCause() {
return cause;
}
@Override
public boolean equals(Object other) {
if (super.equals(other) &&
other instanceof IOErrorWithCause) {
Throwable otherCause = ((IOErrorWithCause) other).getCause();
if (this.getCause() != null) {
return otherCause != null && this.getCause().equals(otherCause);
} else {
return otherCause == null;
}
}
return false;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (cause != null ? cause.hashCode() : 0);
return result;
}
}
}

View File

@ -112,20 +112,58 @@ public class ThriftHBaseServiceHandler implements THBaseService.Iface {
@Override
public Object invoke(Object proxy, Method m, Object[] args) throws Throwable {
Object result;
long start = now();
try {
long start = now();
result = m.invoke(handler, args);
int processTime = (int) (now() - start);
metrics.incMethodTime(m.getName(), processTime);
} catch (InvocationTargetException e) {
metrics.exception(e.getCause());
throw e.getTargetException();
} catch (Exception e) {
metrics.exception(e);
throw new RuntimeException("unexpected invocation exception: " + e.getMessage());
} finally {
int processTime = (int) (now() - start);
metrics.incMethodTime(m.getName(), processTime);
}
return result;
}
}
private static class TIOErrorWithCause extends TIOError {
private Throwable cause;
public TIOErrorWithCause(Throwable cause) {
super();
this.cause = cause;
}
@Override
public Throwable getCause() {
return cause;
}
@Override
public boolean equals(Object other) {
if (super.equals(other) &&
other instanceof TIOErrorWithCause) {
Throwable otherCause = ((TIOErrorWithCause) other).getCause();
if (this.getCause() != null) {
return otherCause != null && this.getCause().equals(otherCause);
} else {
return otherCause == null;
}
}
return false;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (cause != null ? cause.hashCode() : 0);
return result;
}
}
private static long now() {
return System.nanoTime();
}
@ -163,7 +201,7 @@ public class ThriftHBaseServiceHandler implements THBaseService.Iface {
}
private TIOError getTIOError(IOException e) {
TIOError err = new TIOError();
TIOError err = new TIOErrorWithCause(e);
err.setMessage(e.getMessage());
return err;
}

View File

@ -0,0 +1,102 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.thrift;
import org.apache.hadoop.hbase.CallQueueTooBigException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.MultiActionResultTooLarge;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
import org.apache.hadoop.hbase.exceptions.RegionMovedException;
import org.apache.hadoop.hbase.exceptions.ScannerResetException;
import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.List;
/**
* Simple test coprocessor for injecting exceptions on Get requests.
*/
public class ErrorThrowingGetObserver extends BaseRegionObserver {
public static final String SHOULD_ERROR_ATTRIBUTE = "error";
@Override
public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> e,
Get get, List<Cell> results) throws IOException {
byte[] errorType = get.getAttribute(SHOULD_ERROR_ATTRIBUTE);
if (errorType != null) {
ErrorType type = ErrorType.valueOf(Bytes.toString(errorType));
switch (type) {
case CALL_QUEUE_TOO_BIG:
throw new CallQueueTooBigException("Failing for test");
case MULTI_ACTION_RESULT_TOO_LARGE:
throw new MultiActionResultTooLarge("Failing for test");
case FAILED_SANITY_CHECK:
throw new FailedSanityCheckException("Failing for test");
case NOT_SERVING_REGION:
throw new NotServingRegionException("Failing for test");
case REGION_MOVED:
throw new RegionMovedException(
e.getEnvironment().getRegionServerServices().getServerName(), 1);
case SCANNER_RESET:
throw new ScannerResetException("Failing for test");
case UNKNOWN_SCANNER:
throw new UnknownScannerException("Failing for test");
case REGION_TOO_BUSY:
throw new RegionTooBusyException("Failing for test");
case OUT_OF_ORDER_SCANNER_NEXT:
throw new OutOfOrderScannerNextException("Failing for test");
default:
throw new DoNotRetryIOException("Failing for test");
}
}
super.preGetOp(e, get, results);
}
public enum ErrorType {
CALL_QUEUE_TOO_BIG(ExceptionTrackingSource.EXCEPTIONS_CALL_QUEUE_TOO_BIG),
MULTI_ACTION_RESULT_TOO_LARGE(ExceptionTrackingSource.EXCEPTIONS_MULTI_TOO_LARGE_NAME),
FAILED_SANITY_CHECK(ExceptionTrackingSource.EXCEPTIONS_SANITY_NAME),
NOT_SERVING_REGION(ExceptionTrackingSource.EXCEPTIONS_NSRE_NAME),
REGION_MOVED(ExceptionTrackingSource.EXCEPTIONS_MOVED_NAME),
SCANNER_RESET(ExceptionTrackingSource.EXCEPTIONS_SCANNER_RESET_NAME),
UNKNOWN_SCANNER(ExceptionTrackingSource.EXCEPTIONS_UNKNOWN_NAME),
REGION_TOO_BUSY(ExceptionTrackingSource.EXCEPTIONS_BUSY_NAME),
OUT_OF_ORDER_SCANNER_NEXT(ExceptionTrackingSource.EXCEPTIONS_OOO_NAME);
private final String metricName;
ErrorType(String metricName) {
this.metricName = metricName;
}
public String getMetricName() {
return metricName;
}
}
}

View File

@ -22,10 +22,13 @@ import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -34,9 +37,13 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
@ -98,6 +105,7 @@ public class TestThriftServer {
public static void beforeClass() throws Exception {
UTIL.getConfiguration().setBoolean(ThriftServerRunner.COALESCE_INC_KEY, true);
UTIL.getConfiguration().setBoolean("hbase.table.sanity.checks", false);
UTIL.getConfiguration().setInt("hbase.client.retries.number", 3);
UTIL.startMiniCluster();
}
@ -708,6 +716,75 @@ public class TestThriftServer {
}
}
@Test
public void testMetricsWithException() throws Exception {
String rowkey = "row1";
String family = "f";
String col = "c";
// create a table which will throw exceptions for requests
TableName tableName = TableName.valueOf("testMetricsWithException");
HTableDescriptor tableDesc = new HTableDescriptor(tableName);
tableDesc.addCoprocessor(ErrorThrowingGetObserver.class.getName());
tableDesc.addFamily(new HColumnDescriptor(family));
Table table = UTIL.createTable(tableDesc, null);
long now = System.currentTimeMillis();
table.put(new Put(Bytes.toBytes(rowkey))
.addColumn(Bytes.toBytes(family), Bytes.toBytes(col), now, Bytes.toBytes("val1")));
Configuration conf = UTIL.getConfiguration();
ThriftMetrics metrics = getMetrics(conf);
ThriftServerRunner.HBaseHandler hbaseHandler =
new ThriftServerRunner.HBaseHandler(UTIL.getConfiguration(),
UserProvider.instantiate(UTIL.getConfiguration()));
Hbase.Iface handler = HbaseHandlerMetricsProxy.newInstance(hbaseHandler, metrics, conf);
ByteBuffer tTableName = asByteBuffer(tableName.getNameAsString());
// check metrics increment with a successful get
long preGetCounter = metricsHelper.checkCounterExists("getRow_num_ops", metrics.getSource()) ?
metricsHelper.getCounter("getRow_num_ops", metrics.getSource()) :
0;
List<TRowResult> tRowResult = handler.getRow(tTableName, asByteBuffer(rowkey), null);
assertEquals(1, tRowResult.size());
TRowResult tResult = tRowResult.get(0);
TCell expectedColumnValue = new TCell(asByteBuffer("val1"), now);
assertArrayEquals(Bytes.toBytes(rowkey), tResult.getRow());
Collection<TCell> returnedColumnValues = tResult.getColumns().values();
assertEquals(1, returnedColumnValues.size());
assertEquals(expectedColumnValue, returnedColumnValues.iterator().next());
metricsHelper.assertCounter("getRow_num_ops", preGetCounter + 1, metrics.getSource());
// check metrics increment when the get throws each exception type
for (ErrorThrowingGetObserver.ErrorType type : ErrorThrowingGetObserver.ErrorType.values()) {
testExceptionType(handler, metrics, tTableName, rowkey, type);
}
}
private void testExceptionType(Hbase.Iface handler, ThriftMetrics metrics,
ByteBuffer tTableName, String rowkey,
ErrorThrowingGetObserver.ErrorType errorType) throws Exception {
long preGetCounter = metricsHelper.getCounter("getRow_num_ops", metrics.getSource());
String exceptionKey = errorType.getMetricName();
long preExceptionCounter = metricsHelper.checkCounterExists(exceptionKey, metrics.getSource()) ?
metricsHelper.getCounter(exceptionKey, metrics.getSource()) :
0;
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<>();
attributes.put(asByteBuffer(ErrorThrowingGetObserver.SHOULD_ERROR_ATTRIBUTE),
asByteBuffer(errorType.name()));
try {
List<TRowResult> tRowResult = handler.getRow(tTableName, asByteBuffer(rowkey), attributes);
fail("Get with error attribute should have thrown an exception");
} catch (IOError e) {
LOG.info("Received exception: ", e);
metricsHelper.assertCounter("getRow_num_ops", preGetCounter + 1, metrics.getSource());
metricsHelper.assertCounter(exceptionKey, preExceptionCounter + 1, metrics.getSource());
}
}
/**
*
* @return a List of ColumnDescriptors for use in creating a table. Has one

View File

@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.thrift2;
import com.google.common.collect.Lists;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -33,11 +34,13 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.thrift.ErrorThrowingGetObserver;
import org.apache.hadoop.hbase.thrift.ThriftMetrics;
import org.apache.hadoop.hbase.thrift2.generated.TAppend;
import org.apache.hadoop.hbase.thrift2.generated.TColumn;
@ -139,6 +142,7 @@ public class TestThriftHBaseServiceHandler {
@BeforeClass
public static void beforeClass() throws Exception {
UTIL.getConfiguration().set("hbase.client.retries.number", "3");
UTIL.startMiniCluster();
Admin admin = UTIL.getAdmin();
HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableAname));
@ -975,6 +979,71 @@ public class TestThriftHBaseServiceHandler {
return m;
}
@Test
public void testMetricsWithException() throws Exception {
byte[] rowkey = Bytes.toBytes("row1");
byte[] family = Bytes.toBytes("f");
byte[] col = Bytes.toBytes("c");
// create a table which will throw exceptions for requests
TableName tableName = TableName.valueOf("testMetricsWithException");
HTableDescriptor tableDesc = new HTableDescriptor(tableName);
tableDesc.addCoprocessor(ErrorThrowingGetObserver.class.getName());
tableDesc.addFamily(new HColumnDescriptor(family));
Table table = UTIL.createTable(tableDesc, null);
table.put(new Put(rowkey).addColumn(family, col, Bytes.toBytes("val1")));
ThriftHBaseServiceHandler hbaseHandler = createHandler();
ThriftMetrics metrics = getMetrics(UTIL.getConfiguration());
THBaseService.Iface handler =
ThriftHBaseServiceHandler.newInstance(hbaseHandler, metrics);
ByteBuffer tTableName = wrap(tableName.getName());
// check metrics increment with a successful get
long preGetCounter = metricsHelper.checkCounterExists("get_num_ops", metrics.getSource()) ?
metricsHelper.getCounter("get_num_ops", metrics.getSource()) :
0;
TGet tGet = new TGet(wrap(rowkey));
TResult tResult = handler.get(tTableName, tGet);
List<TColumnValue> expectedColumnValues = Lists.newArrayList(
new TColumnValue(wrap(family), wrap(col), wrap(Bytes.toBytes("val1")))
);
assertArrayEquals(rowkey, tResult.getRow());
List<TColumnValue> returnedColumnValues = tResult.getColumnValues();
assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues);
metricsHelper.assertCounter("get_num_ops", preGetCounter + 1, metrics.getSource());
// check metrics increment when the get throws each exception type
for (ErrorThrowingGetObserver.ErrorType type : ErrorThrowingGetObserver.ErrorType.values()) {
testExceptionType(handler, metrics, tTableName, rowkey, type);
}
}
private void testExceptionType(THBaseService.Iface handler, ThriftMetrics metrics,
ByteBuffer tTableName, byte[] rowkey, ErrorThrowingGetObserver.ErrorType errorType) {
long preGetCounter = metricsHelper.getCounter("get_num_ops", metrics.getSource());
String exceptionKey = errorType.getMetricName();
long preExceptionCounter = metricsHelper.checkCounterExists(exceptionKey, metrics.getSource()) ?
metricsHelper.getCounter(exceptionKey, metrics.getSource()) :
0;
TGet tGet = new TGet(wrap(rowkey));
Map<ByteBuffer, ByteBuffer> attributes = new HashMap<>();
attributes.put(wrap(Bytes.toBytes(ErrorThrowingGetObserver.SHOULD_ERROR_ATTRIBUTE)),
wrap(Bytes.toBytes(errorType.name())));
tGet.setAttributes(attributes);
try {
TResult tResult = handler.get(tTableName, tGet);
fail("Get with error attribute should have thrown an exception");
} catch (TException e) {
LOG.info("Received exception: ", e);
metricsHelper.assertCounter("get_num_ops", preGetCounter + 1, metrics.getSource());
metricsHelper.assertCounter(exceptionKey, preExceptionCounter + 1, metrics.getSource());
}
}
@Test
public void testAttribute() throws Exception {
byte[] rowName = "testAttribute".getBytes();