From 7a6518f05d1e5b5ef2125fd996b13bdb35b79587 Mon Sep 17 00:00:00 2001 From: Gary Helmling Date: Wed, 28 Dec 2016 13:13:24 -0800 Subject: [PATCH] HBASE-17578 Thrift metrics should handle exceptions --- .../hbase/ipc/MetricsHBaseServerSource.java | 37 +----- .../metrics/ExceptionTrackingSource.java | 56 +++++++++ .../thrift/MetricsThriftServerSource.java | 5 +- .../ipc/MetricsHBaseServerSourceImpl.java | 85 +------------ .../metrics/ExceptionTrackingSourceImpl.java | 118 ++++++++++++++++++ .../thrift/MetricsThriftServerSourceImpl.java | 4 +- .../thrift/HbaseHandlerMetricsProxy.java | 9 +- .../hadoop/hbase/thrift/ThriftMetrics.java | 61 +++++++++ .../hbase/thrift/ThriftServerRunner.java | 104 ++++++++++----- .../thrift2/ThriftHBaseServiceHandler.java | 46 ++++++- .../thrift/ErrorThrowingGetObserver.java | 102 +++++++++++++++ .../hadoop/hbase/thrift/TestThriftServer.java | 77 ++++++++++++ .../TestThriftHBaseServiceHandler.java | 69 ++++++++++ 13 files changed, 610 insertions(+), 163 deletions(-) create mode 100644 hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/metrics/ExceptionTrackingSource.java create mode 100644 hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/ExceptionTrackingSourceImpl.java create mode 100644 hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/ErrorThrowingGetObserver.java diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java index cc0a60b12ac..534331aeb4d 100644 --- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java +++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java @@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.ipc; -import org.apache.hadoop.hbase.metrics.BaseSource; +import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource; -public interface MetricsHBaseServerSource extends BaseSource { +public interface MetricsHBaseServerSource extends ExceptionTrackingSource { String AUTHORIZATION_SUCCESSES_NAME = "authorizationSuccesses"; String AUTHORIZATION_SUCCESSES_DESC = "Number of authorization successes."; @@ -88,22 +88,6 @@ public interface MetricsHBaseServerSource extends BaseSource { String NUM_LIFO_MODE_SWITCHES_DESC = "Total number of calls in general queue which " + "were served from the tail of the queue"; - String EXCEPTIONS_NAME="exceptions"; - String EXCEPTIONS_DESC="Exceptions caused by requests"; - String EXCEPTIONS_TYPE_DESC="Number of requests that resulted in the specified type of Exception"; - String EXCEPTIONS_OOO_NAME="exceptions.OutOfOrderScannerNextException"; - String EXCEPTIONS_BUSY_NAME="exceptions.RegionTooBusyException"; - String EXCEPTIONS_UNKNOWN_NAME="exceptions.UnknownScannerException"; - String EXCEPTIONS_SCANNER_RESET_NAME="exceptions.ScannerResetException"; - String EXCEPTIONS_SANITY_NAME="exceptions.FailedSanityCheckException"; - String EXCEPTIONS_MOVED_NAME="exceptions.RegionMovedException"; - String EXCEPTIONS_NSRE_NAME="exceptions.NotServingRegionException"; - String EXCEPTIONS_MULTI_TOO_LARGE_NAME = "exceptions.multiResponseTooLarge"; - String EXCEPTIONS_MULTI_TOO_LARGE_DESC = "A response to a multi request was too large and the " + - "rest of the requests will have to be retried."; - String EXCEPTIONS_CALL_QUEUE_TOO_BIG = "exceptions.callQueueTooBig"; - String EXCEPTIONS_CALL_QUEUE_TOO_BIG_DESC = "Call queue is full"; - void authorizationSuccess(); void authorizationFailure(); @@ -114,21 +98,6 @@ public interface MetricsHBaseServerSource extends BaseSource { void authenticationFallback(); - void exception(); - - /** - * Different types of exceptions - */ - void outOfOrderException(); - void failedSanityException(); - void movedRegionException(); - void notServingRegionException(); - void unknownScannerException(); - void scannerResetException(); - void tooBusyException(); - void multiActionTooLargeException(); - void callQueueTooBigException(); - void sentBytes(long count); void receivedBytes(int count); @@ -142,6 +111,4 @@ public interface MetricsHBaseServerSource extends BaseSource { void processedCall(int processingTime); void queuedAndProcessedCall(int totalTime); - - } diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/metrics/ExceptionTrackingSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/metrics/ExceptionTrackingSource.java new file mode 100644 index 00000000000..fa252fc2962 --- /dev/null +++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/metrics/ExceptionTrackingSource.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.metrics; + +/** + * Common interface for metrics source implementations which need to track individual exception + * types thrown or received. + */ +public interface ExceptionTrackingSource extends BaseSource { + String EXCEPTIONS_NAME="exceptions"; + String EXCEPTIONS_DESC="Exceptions caused by requests"; + String EXCEPTIONS_TYPE_DESC="Number of requests that resulted in the specified type of Exception"; + String EXCEPTIONS_OOO_NAME="exceptions.OutOfOrderScannerNextException"; + String EXCEPTIONS_BUSY_NAME="exceptions.RegionTooBusyException"; + String EXCEPTIONS_UNKNOWN_NAME="exceptions.UnknownScannerException"; + String EXCEPTIONS_SCANNER_RESET_NAME="exceptions.ScannerResetException"; + String EXCEPTIONS_SANITY_NAME="exceptions.FailedSanityCheckException"; + String EXCEPTIONS_MOVED_NAME="exceptions.RegionMovedException"; + String EXCEPTIONS_NSRE_NAME="exceptions.NotServingRegionException"; + String EXCEPTIONS_MULTI_TOO_LARGE_NAME = "exceptions.multiResponseTooLarge"; + String EXCEPTIONS_MULTI_TOO_LARGE_DESC = "A response to a multi request was too large and the " + + "rest of the requests will have to be retried."; + String EXCEPTIONS_CALL_QUEUE_TOO_BIG = "exceptions.callQueueTooBig"; + String EXCEPTIONS_CALL_QUEUE_TOO_BIG_DESC = "Call queue is full"; + + void exception(); + + /** + * Different types of exceptions + */ + void outOfOrderException(); + void failedSanityException(); + void movedRegionException(); + void notServingRegionException(); + void unknownScannerException(); + void scannerResetException(); + void tooBusyException(); + void multiActionTooLargeException(); + void callQueueTooBigException(); +} diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSource.java index 558a8633bb1..77fb11a3942 100644 --- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSource.java +++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSource.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hbase.thrift; -import org.apache.hadoop.hbase.metrics.BaseSource; +import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource; import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource; /** * Interface of a class that will export metrics about Thrift to hadoop's metrics2. */ -public interface MetricsThriftServerSource extends BaseSource, JvmPauseMonitorSource { +public interface MetricsThriftServerSource extends ExceptionTrackingSource, JvmPauseMonitorSource { String BATCH_GET_KEY = "batchGet"; String BATCH_MUTATE_KEY = "batchMutate"; @@ -75,5 +75,4 @@ public interface MetricsThriftServerSource extends BaseSource, JvmPauseMonitorSo * @param time Time */ void incSlowCall(long time); - } diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java index 69aa5fed5f9..eee641a8089 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.metrics.BaseSourceImpl; +import org.apache.hadoop.hbase.metrics.ExceptionTrackingSourceImpl; import org.apache.hadoop.hbase.metrics.Interns; import org.apache.hadoop.metrics2.MetricHistogram; import org.apache.hadoop.metrics2.MetricsCollector; @@ -28,7 +29,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.lib.MutableFastCounter; @InterfaceAudience.Private -public class MetricsHBaseServerSourceImpl extends BaseSourceImpl +public class MetricsHBaseServerSourceImpl extends ExceptionTrackingSourceImpl implements MetricsHBaseServerSource { @@ -41,17 +42,6 @@ public class MetricsHBaseServerSourceImpl extends BaseSourceImpl private final MutableFastCounter sentBytes; private final MutableFastCounter receivedBytes; - private final MutableFastCounter exceptions; - private final MutableFastCounter exceptionsOOO; - private final MutableFastCounter exceptionsBusy; - private final MutableFastCounter exceptionsUnknown; - private final MutableFastCounter exceptionsScannerReset; - private final MutableFastCounter exceptionsSanity; - private final MutableFastCounter exceptionsNSRE; - private final MutableFastCounter exceptionsMoved; - private final MutableFastCounter exceptionsMultiTooLarge; - private final MutableFastCounter exceptionsCallQueueTooBig; - private MetricHistogram queueCallTime; private MetricHistogram processCallTime; @@ -71,27 +61,6 @@ public class MetricsHBaseServerSourceImpl extends BaseSourceImpl AUTHORIZATION_SUCCESSES_DESC, 0L); this.authorizationFailures = this.getMetricsRegistry().newCounter(AUTHORIZATION_FAILURES_NAME, AUTHORIZATION_FAILURES_DESC, 0L); - - this.exceptions = this.getMetricsRegistry().newCounter(EXCEPTIONS_NAME, EXCEPTIONS_DESC, 0L); - this.exceptionsOOO = this.getMetricsRegistry() - .newCounter(EXCEPTIONS_OOO_NAME, EXCEPTIONS_TYPE_DESC, 0L); - this.exceptionsBusy = this.getMetricsRegistry() - .newCounter(EXCEPTIONS_BUSY_NAME, EXCEPTIONS_TYPE_DESC, 0L); - this.exceptionsUnknown = this.getMetricsRegistry() - .newCounter(EXCEPTIONS_UNKNOWN_NAME, EXCEPTIONS_TYPE_DESC, 0L); - this.exceptionsScannerReset = this.getMetricsRegistry() - .newCounter(EXCEPTIONS_SCANNER_RESET_NAME, EXCEPTIONS_TYPE_DESC, 0L); - this.exceptionsSanity = this.getMetricsRegistry() - .newCounter(EXCEPTIONS_SANITY_NAME, EXCEPTIONS_TYPE_DESC, 0L); - this.exceptionsMoved = this.getMetricsRegistry() - .newCounter(EXCEPTIONS_MOVED_NAME, EXCEPTIONS_TYPE_DESC, 0L); - this.exceptionsNSRE = this.getMetricsRegistry() - .newCounter(EXCEPTIONS_NSRE_NAME, EXCEPTIONS_TYPE_DESC, 0L); - this.exceptionsMultiTooLarge = this.getMetricsRegistry() - .newCounter(EXCEPTIONS_MULTI_TOO_LARGE_NAME, EXCEPTIONS_MULTI_TOO_LARGE_DESC, 0L); - this.exceptionsCallQueueTooBig = this.getMetricsRegistry().newCounter( - EXCEPTIONS_CALL_QUEUE_TOO_BIG, EXCEPTIONS_CALL_QUEUE_TOO_BIG_DESC, 0L); - this.authenticationSuccesses = this.getMetricsRegistry().newCounter( AUTHENTICATION_SUCCESSES_NAME, AUTHENTICATION_SUCCESSES_DESC, 0L); this.authenticationFailures = this.getMetricsRegistry().newCounter(AUTHENTICATION_FAILURES_NAME, @@ -134,56 +103,6 @@ public class MetricsHBaseServerSourceImpl extends BaseSourceImpl authenticationFallbacks.incr(); } - @Override - public void exception() { - exceptions.incr(); - } - - @Override - public void outOfOrderException() { - exceptionsOOO.incr(); - } - - @Override - public void failedSanityException() { - exceptionsSanity.incr(); - } - - @Override - public void movedRegionException() { - exceptionsMoved.incr(); - } - - @Override - public void notServingRegionException() { - exceptionsNSRE.incr(); - } - - @Override - public void unknownScannerException() { - exceptionsUnknown.incr(); - } - - @Override - public void scannerResetException() { - exceptionsScannerReset.incr(); - } - - @Override - public void tooBusyException() { - exceptionsBusy.incr(); - } - - @Override - public void multiActionTooLargeException() { - exceptionsMultiTooLarge.incr(); - } - - @Override - public void callQueueTooBigException() { - exceptionsCallQueueTooBig.incr(); - } - @Override public void authenticationSuccess() { authenticationSuccesses.incr(); diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/ExceptionTrackingSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/ExceptionTrackingSourceImpl.java new file mode 100644 index 00000000000..9ed61a1a1ce --- /dev/null +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/ExceptionTrackingSourceImpl.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.metrics; + +import org.apache.hadoop.metrics2.lib.MutableFastCounter; + +/** + * Common base implementation for metrics sources which need to track exceptions thrown or + * received. + */ +public class ExceptionTrackingSourceImpl extends BaseSourceImpl + implements ExceptionTrackingSource { + protected MutableFastCounter exceptions; + protected MutableFastCounter exceptionsOOO; + protected MutableFastCounter exceptionsBusy; + protected MutableFastCounter exceptionsUnknown; + protected MutableFastCounter exceptionsScannerReset; + protected MutableFastCounter exceptionsSanity; + protected MutableFastCounter exceptionsNSRE; + protected MutableFastCounter exceptionsMoved; + protected MutableFastCounter exceptionsMultiTooLarge; + protected MutableFastCounter exceptionsCallQueueTooBig; + + public ExceptionTrackingSourceImpl(String metricsName, String metricsDescription, + String metricsContext, String metricsJmxContext) { + super(metricsName, metricsDescription, metricsContext, metricsJmxContext); + } + + @Override + public void init() { + super.init(); + this.exceptions = this.getMetricsRegistry().newCounter(EXCEPTIONS_NAME, EXCEPTIONS_DESC, 0L); + this.exceptionsOOO = this.getMetricsRegistry() + .newCounter(EXCEPTIONS_OOO_NAME, EXCEPTIONS_TYPE_DESC, 0L); + this.exceptionsBusy = this.getMetricsRegistry() + .newCounter(EXCEPTIONS_BUSY_NAME, EXCEPTIONS_TYPE_DESC, 0L); + this.exceptionsUnknown = this.getMetricsRegistry() + .newCounter(EXCEPTIONS_UNKNOWN_NAME, EXCEPTIONS_TYPE_DESC, 0L); + this.exceptionsScannerReset = this.getMetricsRegistry() + .newCounter(EXCEPTIONS_SCANNER_RESET_NAME, EXCEPTIONS_TYPE_DESC, 0L); + this.exceptionsSanity = this.getMetricsRegistry() + .newCounter(EXCEPTIONS_SANITY_NAME, EXCEPTIONS_TYPE_DESC, 0L); + this.exceptionsMoved = this.getMetricsRegistry() + .newCounter(EXCEPTIONS_MOVED_NAME, EXCEPTIONS_TYPE_DESC, 0L); + this.exceptionsNSRE = this.getMetricsRegistry() + .newCounter(EXCEPTIONS_NSRE_NAME, EXCEPTIONS_TYPE_DESC, 0L); + this.exceptionsMultiTooLarge = this.getMetricsRegistry() + .newCounter(EXCEPTIONS_MULTI_TOO_LARGE_NAME, EXCEPTIONS_MULTI_TOO_LARGE_DESC, 0L); + this.exceptionsCallQueueTooBig = this.getMetricsRegistry() + .newCounter(EXCEPTIONS_CALL_QUEUE_TOO_BIG, EXCEPTIONS_CALL_QUEUE_TOO_BIG_DESC, 0L); + } + + @Override + public void exception() { + exceptions.incr(); + } + + @Override + public void outOfOrderException() { + exceptionsOOO.incr(); + } + + @Override + public void failedSanityException() { + exceptionsSanity.incr(); + } + + @Override + public void movedRegionException() { + exceptionsMoved.incr(); + } + + @Override + public void notServingRegionException() { + exceptionsNSRE.incr(); + } + + @Override + public void unknownScannerException() { + exceptionsUnknown.incr(); + } + + @Override + public void scannerResetException() { + exceptionsScannerReset.incr(); + } + + @Override + public void tooBusyException() { + exceptionsBusy.incr(); + } + + @Override + public void multiActionTooLargeException() { + exceptionsMultiTooLarge.incr(); + } + + @Override + public void callQueueTooBigException() { + exceptionsCallQueueTooBig.incr(); + } +} diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceImpl.java index 71f67eb9a12..27323ac0490 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceImpl.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.thrift; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.metrics.BaseSourceImpl; +import org.apache.hadoop.hbase.metrics.ExceptionTrackingSourceImpl; import org.apache.hadoop.metrics2.MetricHistogram; import org.apache.hadoop.metrics2.lib.MutableFastCounter; import org.apache.hadoop.metrics2.lib.MutableGaugeLong; @@ -31,7 +32,7 @@ import org.apache.hadoop.metrics2.lib.MutableHistogram; * Implements BaseSource through BaseSourceImpl, following the pattern */ @InterfaceAudience.Private -public class MetricsThriftServerSourceImpl extends BaseSourceImpl implements +public class MetricsThriftServerSourceImpl extends ExceptionTrackingSourceImpl implements MetricsThriftServerSource { private MetricHistogram batchGetStat; @@ -73,7 +74,6 @@ public class MetricsThriftServerSourceImpl extends BaseSourceImpl implements thriftCallStat = getMetricsRegistry().newTimeHistogram(THRIFT_CALL_KEY); thriftSlowCallStat = getMetricsRegistry().newTimeHistogram(SLOW_THRIFT_CALL_KEY); callQueueLenGauge = getMetricsRegistry().getGauge(CALL_QUEUE_LEN_KEY, 0); - } @Override diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java index 51a0444a620..794143d2bc3 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java @@ -62,16 +62,19 @@ public class HbaseHandlerMetricsProxy implements InvocationHandler { public Object invoke(Object proxy, Method m, Object[] args) throws Throwable { Object result; + long start = now(); try { - long start = now(); result = m.invoke(handler, args); - long processTime = now() - start; - metrics.incMethodTime(m.getName(), processTime); } catch (InvocationTargetException e) { + metrics.exception(e.getCause()); throw e.getTargetException(); } catch (Exception e) { + metrics.exception(e); throw new RuntimeException( "unexpected invocation exception: " + e.getMessage()); + } finally { + long processTime = now() - start; + metrics.incMethodTime(m.getName(), processTime); } return result; } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java index 883bbdccffc..dc69b33064f 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java @@ -19,9 +19,21 @@ package org.apache.hadoop.hbase.thrift; +import org.apache.hadoop.hbase.CallQueueTooBigException; +import org.apache.hadoop.hbase.MultiActionResultTooLarge; +import org.apache.hadoop.hbase.NotServingRegionException; +import org.apache.hadoop.hbase.RegionTooBusyException; +import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; +import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil; +import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; +import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException; +import org.apache.hadoop.hbase.exceptions.RegionMovedException; +import org.apache.hadoop.hbase.exceptions.ScannerResetException; +import org.apache.hadoop.hbase.thrift.generated.IOError; +import org.apache.hadoop.hbase.thrift2.generated.TIOError; /** * This class is for maintaining the various statistics of thrift server @@ -87,4 +99,53 @@ public class ThriftMetrics { } } + /** + * Increment the count for a specific exception type. This is called for each exception type + * that is returned to the thrift handler. + * @param rawThrowable type of exception + */ + public void exception(Throwable rawThrowable) { + source.exception(); + + Throwable throwable = unwrap(rawThrowable); + /** + * Keep some metrics for commonly seen exceptions + * + * Try and put the most common types first. + * Place child types before the parent type that they extend. + * + * If this gets much larger we might have to go to a hashmap + */ + if (throwable != null) { + if (throwable instanceof OutOfOrderScannerNextException) { + source.outOfOrderException(); + } else if (throwable instanceof RegionTooBusyException) { + source.tooBusyException(); + } else if (throwable instanceof UnknownScannerException) { + source.unknownScannerException(); + } else if (throwable instanceof ScannerResetException) { + source.scannerResetException(); + } else if (throwable instanceof RegionMovedException) { + source.movedRegionException(); + } else if (throwable instanceof NotServingRegionException) { + source.notServingRegionException(); + } else if (throwable instanceof FailedSanityCheckException) { + source.failedSanityException(); + } else if (throwable instanceof MultiActionResultTooLarge) { + source.multiActionTooLargeException(); + } else if (throwable instanceof CallQueueTooBigException) { + source.callQueueTooBigException(); + } + } + } + + private static Throwable unwrap(Throwable t) { + if (t == null) { + return t; + } + if (t instanceof TIOError || t instanceof IOError) { + t = t.getCause(); + } + return ClientExceptionsUtil.findException(t); + } } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index 169d42f058d..77195ef0f4c 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -807,7 +807,7 @@ public class ThriftServerRunner implements Runnable { getAdmin().enableTable(getTableName(tableName)); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -817,7 +817,7 @@ public class ThriftServerRunner implements Runnable { getAdmin().disableTable(getTableName(tableName)); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -827,7 +827,7 @@ public class ThriftServerRunner implements Runnable { return this.connectionCache.getAdmin().isTableEnabled(getTableName(tableName)); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -844,7 +844,7 @@ public class ThriftServerRunner implements Runnable { } } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -861,7 +861,7 @@ public class ThriftServerRunner implements Runnable { } } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -876,7 +876,7 @@ public class ThriftServerRunner implements Runnable { return list; } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -909,7 +909,7 @@ public class ThriftServerRunner implements Runnable { return Collections.emptyList(); } catch (IOException e){ LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -954,7 +954,7 @@ public class ThriftServerRunner implements Runnable { return ThriftUtilities.cellFromHBase(result.rawCells()); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally { closeTable(table); } @@ -1000,7 +1000,7 @@ public class ThriftServerRunner implements Runnable { return ThriftUtilities.cellFromHBase(result.rawCells()); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1047,7 +1047,7 @@ public class ThriftServerRunner implements Runnable { return ThriftUtilities.cellFromHBase(result.rawCells()); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1108,7 +1108,7 @@ public class ThriftServerRunner implements Runnable { return ThriftUtilities.rowResultFromHBase(result); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1177,7 +1177,7 @@ public class ThriftServerRunner implements Runnable { return ThriftUtilities.rowResultFromHBase(result); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1212,7 +1212,7 @@ public class ThriftServerRunner implements Runnable { } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally { closeTable(table); } @@ -1237,7 +1237,7 @@ public class ThriftServerRunner implements Runnable { table.delete(delete); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally { closeTable(table); } @@ -1260,7 +1260,7 @@ public class ThriftServerRunner implements Runnable { getAdmin().createTable(desc); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } catch (IllegalArgumentException e) { LOG.warn(e.getMessage(), e); throw new IllegalArgument(Throwables.getStackTraceAsString(e)); @@ -1284,7 +1284,7 @@ public class ThriftServerRunner implements Runnable { getAdmin().deleteTable(tableName); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -1342,7 +1342,7 @@ public class ThriftServerRunner implements Runnable { table.put(put); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } catch (IllegalArgumentException e) { LOG.warn(e.getMessage(), e); throw new IllegalArgument(Throwables.getStackTraceAsString(e)); @@ -1415,7 +1415,7 @@ public class ThriftServerRunner implements Runnable { } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } catch (IllegalArgumentException e) { LOG.warn(e.getMessage(), e); throw new IllegalArgument(Throwables.getStackTraceAsString(e)); @@ -1445,7 +1445,7 @@ public class ThriftServerRunner implements Runnable { getBytes(row), family, qualifier, amount); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally { closeTable(table); } @@ -1483,7 +1483,7 @@ public class ThriftServerRunner implements Runnable { } } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } return ThriftUtilities.rowResultFromHBase(results, resultScannerWrapper.isColumnSorted()); } @@ -1542,7 +1542,7 @@ public class ThriftServerRunner implements Runnable { return addScanner(table.getScanner(scan), tScan.sortColumns); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1571,7 +1571,7 @@ public class ThriftServerRunner implements Runnable { return addScanner(table.getScanner(scan), false); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1601,7 +1601,7 @@ public class ThriftServerRunner implements Runnable { return addScanner(table.getScanner(scan), false); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1635,7 +1635,7 @@ public class ThriftServerRunner implements Runnable { return addScanner(table.getScanner(scan), false); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1665,7 +1665,7 @@ public class ThriftServerRunner implements Runnable { return addScanner(table.getScanner(scan), false); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1697,7 +1697,7 @@ public class ThriftServerRunner implements Runnable { return addScanner(table.getScanner(scan), false); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1722,12 +1722,12 @@ public class ThriftServerRunner implements Runnable { return columns; } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally { closeTable(table); } } - + private void closeTable(Table table) throws IOError { try{ @@ -1736,7 +1736,7 @@ public class ThriftServerRunner implements Runnable { } } catch (IOException e){ LOG.error(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -1775,7 +1775,7 @@ public class ThriftServerRunner implements Runnable { return region; } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } } @@ -1818,7 +1818,7 @@ public class ThriftServerRunner implements Runnable { table.increment(inc); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1849,7 +1849,7 @@ public class ThriftServerRunner implements Runnable { return ThriftUtilities.cellFromHBase(result.rawCells()); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } finally{ closeTable(table); } @@ -1883,7 +1883,7 @@ public class ThriftServerRunner implements Runnable { value != null ? getBytes(value) : HConstants.EMPTY_BYTE_ARRAY, put); } catch (IOException e) { LOG.warn(e.getMessage(), e); - throw new IOError(Throwables.getStackTraceAsString(e)); + throw getIOError(e); } catch (IllegalArgumentException e) { LOG.warn(e.getMessage(), e); throw new IllegalArgument(Throwables.getStackTraceAsString(e)); @@ -1894,6 +1894,11 @@ public class ThriftServerRunner implements Runnable { } + private static IOError getIOError(Throwable throwable) { + IOError error = new IOErrorWithCause(throwable); + error.setMessage(Throwables.getStackTraceAsString(throwable)); + return error; + } /** * Adds all the attributes into the Operation object @@ -1923,4 +1928,37 @@ public class ThriftServerRunner implements Runnable { } } } + + public static class IOErrorWithCause extends IOError { + private Throwable cause; + public IOErrorWithCause(Throwable cause) { + this.cause = cause; + } + + @Override + public Throwable getCause() { + return cause; + } + + @Override + public boolean equals(Object other) { + if (super.equals(other) && + other instanceof IOErrorWithCause) { + Throwable otherCause = ((IOErrorWithCause) other).getCause(); + if (this.getCause() != null) { + return otherCause != null && this.getCause().equals(otherCause); + } else { + return otherCause == null; + } + } + return false; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (cause != null ? cause.hashCode() : 0); + return result; + } + } } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java index a69a7df3574..c5da48ef4da 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java @@ -112,20 +112,58 @@ public class ThriftHBaseServiceHandler implements THBaseService.Iface { @Override public Object invoke(Object proxy, Method m, Object[] args) throws Throwable { Object result; + long start = now(); try { - long start = now(); result = m.invoke(handler, args); - int processTime = (int) (now() - start); - metrics.incMethodTime(m.getName(), processTime); } catch (InvocationTargetException e) { + metrics.exception(e.getCause()); throw e.getTargetException(); } catch (Exception e) { + metrics.exception(e); throw new RuntimeException("unexpected invocation exception: " + e.getMessage()); + } finally { + int processTime = (int) (now() - start); + metrics.incMethodTime(m.getName(), processTime); } return result; } } + private static class TIOErrorWithCause extends TIOError { + private Throwable cause; + + public TIOErrorWithCause(Throwable cause) { + super(); + this.cause = cause; + } + + @Override + public Throwable getCause() { + return cause; + } + + @Override + public boolean equals(Object other) { + if (super.equals(other) && + other instanceof TIOErrorWithCause) { + Throwable otherCause = ((TIOErrorWithCause) other).getCause(); + if (this.getCause() != null) { + return otherCause != null && this.getCause().equals(otherCause); + } else { + return otherCause == null; + } + } + return false; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (cause != null ? cause.hashCode() : 0); + return result; + } + } + private static long now() { return System.nanoTime(); } @@ -163,7 +201,7 @@ public class ThriftHBaseServiceHandler implements THBaseService.Iface { } private TIOError getTIOError(IOException e) { - TIOError err = new TIOError(); + TIOError err = new TIOErrorWithCause(e); err.setMessage(e.getMessage()); return err; } diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/ErrorThrowingGetObserver.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/ErrorThrowingGetObserver.java new file mode 100644 index 00000000000..810dabc77aa --- /dev/null +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/ErrorThrowingGetObserver.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.thrift; + +import org.apache.hadoop.hbase.CallQueueTooBigException; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.MultiActionResultTooLarge; +import org.apache.hadoop.hbase.NotServingRegionException; +import org.apache.hadoop.hbase.RegionTooBusyException; +import org.apache.hadoop.hbase.UnknownScannerException; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; +import org.apache.hadoop.hbase.coprocessor.ObserverContext; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; +import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException; +import org.apache.hadoop.hbase.exceptions.RegionMovedException; +import org.apache.hadoop.hbase.exceptions.ScannerResetException; +import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource; +import org.apache.hadoop.hbase.util.Bytes; + +import java.io.IOException; +import java.util.List; + +/** + * Simple test coprocessor for injecting exceptions on Get requests. + */ +public class ErrorThrowingGetObserver extends BaseRegionObserver { + public static final String SHOULD_ERROR_ATTRIBUTE = "error"; + + @Override + public void preGetOp(ObserverContext e, + Get get, List results) throws IOException { + byte[] errorType = get.getAttribute(SHOULD_ERROR_ATTRIBUTE); + if (errorType != null) { + ErrorType type = ErrorType.valueOf(Bytes.toString(errorType)); + switch (type) { + case CALL_QUEUE_TOO_BIG: + throw new CallQueueTooBigException("Failing for test"); + case MULTI_ACTION_RESULT_TOO_LARGE: + throw new MultiActionResultTooLarge("Failing for test"); + case FAILED_SANITY_CHECK: + throw new FailedSanityCheckException("Failing for test"); + case NOT_SERVING_REGION: + throw new NotServingRegionException("Failing for test"); + case REGION_MOVED: + throw new RegionMovedException( + e.getEnvironment().getRegionServerServices().getServerName(), 1); + case SCANNER_RESET: + throw new ScannerResetException("Failing for test"); + case UNKNOWN_SCANNER: + throw new UnknownScannerException("Failing for test"); + case REGION_TOO_BUSY: + throw new RegionTooBusyException("Failing for test"); + case OUT_OF_ORDER_SCANNER_NEXT: + throw new OutOfOrderScannerNextException("Failing for test"); + default: + throw new DoNotRetryIOException("Failing for test"); + } + } + super.preGetOp(e, get, results); + } + + public enum ErrorType { + CALL_QUEUE_TOO_BIG(ExceptionTrackingSource.EXCEPTIONS_CALL_QUEUE_TOO_BIG), + MULTI_ACTION_RESULT_TOO_LARGE(ExceptionTrackingSource.EXCEPTIONS_MULTI_TOO_LARGE_NAME), + FAILED_SANITY_CHECK(ExceptionTrackingSource.EXCEPTIONS_SANITY_NAME), + NOT_SERVING_REGION(ExceptionTrackingSource.EXCEPTIONS_NSRE_NAME), + REGION_MOVED(ExceptionTrackingSource.EXCEPTIONS_MOVED_NAME), + SCANNER_RESET(ExceptionTrackingSource.EXCEPTIONS_SCANNER_RESET_NAME), + UNKNOWN_SCANNER(ExceptionTrackingSource.EXCEPTIONS_UNKNOWN_NAME), + REGION_TOO_BUSY(ExceptionTrackingSource.EXCEPTIONS_BUSY_NAME), + OUT_OF_ORDER_SCANNER_NEXT(ExceptionTrackingSource.EXCEPTIONS_OOO_NAME); + + private final String metricName; + + ErrorType(String metricName) { + this.metricName = metricName; + } + + public String getMetricName() { + return metricName; + } + } +} diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java index 06a7d3d5f32..e3003359f92 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java @@ -22,10 +22,13 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -34,9 +37,13 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilityFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.test.MetricsAssertHelper; @@ -98,6 +105,7 @@ public class TestThriftServer { public static void beforeClass() throws Exception { UTIL.getConfiguration().setBoolean(ThriftServerRunner.COALESCE_INC_KEY, true); UTIL.getConfiguration().setBoolean("hbase.table.sanity.checks", false); + UTIL.getConfiguration().setInt("hbase.client.retries.number", 3); UTIL.startMiniCluster(); } @@ -708,6 +716,75 @@ public class TestThriftServer { } } + @Test + public void testMetricsWithException() throws Exception { + String rowkey = "row1"; + String family = "f"; + String col = "c"; + // create a table which will throw exceptions for requests + TableName tableName = TableName.valueOf("testMetricsWithException"); + HTableDescriptor tableDesc = new HTableDescriptor(tableName); + tableDesc.addCoprocessor(ErrorThrowingGetObserver.class.getName()); + tableDesc.addFamily(new HColumnDescriptor(family)); + + Table table = UTIL.createTable(tableDesc, null); + long now = System.currentTimeMillis(); + table.put(new Put(Bytes.toBytes(rowkey)) + .addColumn(Bytes.toBytes(family), Bytes.toBytes(col), now, Bytes.toBytes("val1"))); + + Configuration conf = UTIL.getConfiguration(); + ThriftMetrics metrics = getMetrics(conf); + ThriftServerRunner.HBaseHandler hbaseHandler = + new ThriftServerRunner.HBaseHandler(UTIL.getConfiguration(), + UserProvider.instantiate(UTIL.getConfiguration())); + Hbase.Iface handler = HbaseHandlerMetricsProxy.newInstance(hbaseHandler, metrics, conf); + + ByteBuffer tTableName = asByteBuffer(tableName.getNameAsString()); + + // check metrics increment with a successful get + long preGetCounter = metricsHelper.checkCounterExists("getRow_num_ops", metrics.getSource()) ? + metricsHelper.getCounter("getRow_num_ops", metrics.getSource()) : + 0; + List tRowResult = handler.getRow(tTableName, asByteBuffer(rowkey), null); + assertEquals(1, tRowResult.size()); + TRowResult tResult = tRowResult.get(0); + + TCell expectedColumnValue = new TCell(asByteBuffer("val1"), now); + + assertArrayEquals(Bytes.toBytes(rowkey), tResult.getRow()); + Collection returnedColumnValues = tResult.getColumns().values(); + assertEquals(1, returnedColumnValues.size()); + assertEquals(expectedColumnValue, returnedColumnValues.iterator().next()); + + metricsHelper.assertCounter("getRow_num_ops", preGetCounter + 1, metrics.getSource()); + + // check metrics increment when the get throws each exception type + for (ErrorThrowingGetObserver.ErrorType type : ErrorThrowingGetObserver.ErrorType.values()) { + testExceptionType(handler, metrics, tTableName, rowkey, type); + } + } + + private void testExceptionType(Hbase.Iface handler, ThriftMetrics metrics, + ByteBuffer tTableName, String rowkey, + ErrorThrowingGetObserver.ErrorType errorType) throws Exception { + long preGetCounter = metricsHelper.getCounter("getRow_num_ops", metrics.getSource()); + String exceptionKey = errorType.getMetricName(); + long preExceptionCounter = metricsHelper.checkCounterExists(exceptionKey, metrics.getSource()) ? + metricsHelper.getCounter(exceptionKey, metrics.getSource()) : + 0; + Map attributes = new HashMap<>(); + attributes.put(asByteBuffer(ErrorThrowingGetObserver.SHOULD_ERROR_ATTRIBUTE), + asByteBuffer(errorType.name())); + try { + List tRowResult = handler.getRow(tTableName, asByteBuffer(rowkey), attributes); + fail("Get with error attribute should have thrown an exception"); + } catch (IOError e) { + LOG.info("Received exception: ", e); + metricsHelper.assertCounter("getRow_num_ops", preGetCounter + 1, metrics.getSource()); + metricsHelper.assertCounter(exceptionKey, preExceptionCounter + 1, metrics.getSource()); + } + } + /** * * @return a List of ColumnDescriptors for use in creating a table. Has one diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java index 13d499716cd..9a3800d6df9 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java @@ -18,6 +18,7 @@ */ package org.apache.hadoop.hbase.thrift2; +import com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -33,11 +34,13 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.thrift.ErrorThrowingGetObserver; import org.apache.hadoop.hbase.thrift.ThriftMetrics; import org.apache.hadoop.hbase.thrift2.generated.TAppend; import org.apache.hadoop.hbase.thrift2.generated.TColumn; @@ -139,6 +142,7 @@ public class TestThriftHBaseServiceHandler { @BeforeClass public static void beforeClass() throws Exception { + UTIL.getConfiguration().set("hbase.client.retries.number", "3"); UTIL.startMiniCluster(); Admin admin = UTIL.getAdmin(); HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableAname)); @@ -975,6 +979,71 @@ public class TestThriftHBaseServiceHandler { return m; } + @Test + public void testMetricsWithException() throws Exception { + byte[] rowkey = Bytes.toBytes("row1"); + byte[] family = Bytes.toBytes("f"); + byte[] col = Bytes.toBytes("c"); + // create a table which will throw exceptions for requests + TableName tableName = TableName.valueOf("testMetricsWithException"); + HTableDescriptor tableDesc = new HTableDescriptor(tableName); + tableDesc.addCoprocessor(ErrorThrowingGetObserver.class.getName()); + tableDesc.addFamily(new HColumnDescriptor(family)); + + Table table = UTIL.createTable(tableDesc, null); + table.put(new Put(rowkey).addColumn(family, col, Bytes.toBytes("val1"))); + + ThriftHBaseServiceHandler hbaseHandler = createHandler(); + ThriftMetrics metrics = getMetrics(UTIL.getConfiguration()); + THBaseService.Iface handler = + ThriftHBaseServiceHandler.newInstance(hbaseHandler, metrics); + ByteBuffer tTableName = wrap(tableName.getName()); + + // check metrics increment with a successful get + long preGetCounter = metricsHelper.checkCounterExists("get_num_ops", metrics.getSource()) ? + metricsHelper.getCounter("get_num_ops", metrics.getSource()) : + 0; + TGet tGet = new TGet(wrap(rowkey)); + TResult tResult = handler.get(tTableName, tGet); + + List expectedColumnValues = Lists.newArrayList( + new TColumnValue(wrap(family), wrap(col), wrap(Bytes.toBytes("val1"))) + ); + assertArrayEquals(rowkey, tResult.getRow()); + List returnedColumnValues = tResult.getColumnValues(); + assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues); + + metricsHelper.assertCounter("get_num_ops", preGetCounter + 1, metrics.getSource()); + + // check metrics increment when the get throws each exception type + for (ErrorThrowingGetObserver.ErrorType type : ErrorThrowingGetObserver.ErrorType.values()) { + testExceptionType(handler, metrics, tTableName, rowkey, type); + } + } + + private void testExceptionType(THBaseService.Iface handler, ThriftMetrics metrics, + ByteBuffer tTableName, byte[] rowkey, ErrorThrowingGetObserver.ErrorType errorType) { + long preGetCounter = metricsHelper.getCounter("get_num_ops", metrics.getSource()); + String exceptionKey = errorType.getMetricName(); + long preExceptionCounter = metricsHelper.checkCounterExists(exceptionKey, metrics.getSource()) ? + metricsHelper.getCounter(exceptionKey, metrics.getSource()) : + 0; + TGet tGet = new TGet(wrap(rowkey)); + Map attributes = new HashMap<>(); + attributes.put(wrap(Bytes.toBytes(ErrorThrowingGetObserver.SHOULD_ERROR_ATTRIBUTE)), + wrap(Bytes.toBytes(errorType.name()))); + tGet.setAttributes(attributes); + try { + TResult tResult = handler.get(tTableName, tGet); + fail("Get with error attribute should have thrown an exception"); + } catch (TException e) { + LOG.info("Received exception: ", e); + metricsHelper.assertCounter("get_num_ops", preGetCounter + 1, metrics.getSource()); + metricsHelper.assertCounter(exceptionKey, preExceptionCounter + 1, metrics.getSource()); + } + + } + @Test public void testAttribute() throws Exception { byte[] rowName = "testAttribute".getBytes();