diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RemoteExceptionHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RemoteExceptionHandler.java deleted file mode 100644 index d9027ed2a3d..00000000000 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RemoteExceptionHandler.java +++ /dev/null @@ -1,120 +0,0 @@ -/** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.ipc.RemoteException; - -import java.io.IOException; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; - -/** - * An immutable class which contains a static method for handling - * org.apache.hadoop.ipc.RemoteException exceptions. - */ -@InterfaceAudience.Private -public class RemoteExceptionHandler { - /* Not instantiable */ - private RemoteExceptionHandler() {super();} - - /** - * Examine passed Throwable. See if its carrying a RemoteException. If so, - * run {@link #decodeRemoteException(RemoteException)} on it. Otherwise, - * pass back t unaltered. - * @param t Throwable to examine. - * @return Decoded RemoteException carried by t or - * t unaltered. - */ - public static Throwable checkThrowable(final Throwable t) { - Throwable result = t; - if (t instanceof RemoteException) { - try { - result = - RemoteExceptionHandler.decodeRemoteException((RemoteException)t); - } catch (Throwable tt) { - result = tt; - } - } - return result; - } - - /** - * Examine passed IOException. See if its carrying a RemoteException. If so, - * run {@link #decodeRemoteException(RemoteException)} on it. Otherwise, - * pass back e unaltered. - * @param e Exception to examine. - * @return Decoded RemoteException carried by e or - * e unaltered. - */ - public static IOException checkIOException(final IOException e) { - Throwable t = checkThrowable(e); - return t instanceof IOException? (IOException)t: new IOException(t); - } - - /** - * Converts org.apache.hadoop.ipc.RemoteException into original exception, - * if possible. If the original exception is an Error or a RuntimeException, - * throws the original exception. - * - * @param re original exception - * @return decoded RemoteException if it is an instance of or a subclass of - * IOException, or the original RemoteException if it cannot be decoded. - * - * @throws IOException indicating a server error ocurred if the decoded - * exception is not an IOException. The decoded exception is set as - * the cause. - * @deprecated Use {@link RemoteException#unwrapRemoteException()} instead. - * In fact we should look into deprecating this whole class - St.Ack 2010929 - */ - public static IOException decodeRemoteException(final RemoteException re) - throws IOException { - IOException i = re; - - try { - Class c = Class.forName(re.getClassName()); - - Class[] parameterTypes = { String.class }; - Constructor ctor = c.getConstructor(parameterTypes); - - Object[] arguments = { re.getMessage() }; - Throwable t = (Throwable) ctor.newInstance(arguments); - - if (t instanceof IOException) { - i = (IOException) t; - } else { - i = new IOException("server error"); - i.initCause(t); - throw i; - } - - } catch (ClassNotFoundException x) { - // continue - } catch (NoSuchMethodException x) { - // continue - } catch (IllegalAccessException x) { - // continue - } catch (InvocationTargetException x) { - // continue - } catch (InstantiationException x) { - // continue - } - return i; - } -} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 4cee076b463..f62d6bcacb1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.RegionLocations; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.UnknownScannerException; @@ -236,7 +235,7 @@ public class ScannerCallable extends RegionServerCallable { } IOException ioe = e; if (e instanceof RemoteException) { - ioe = RemoteExceptionHandler.decodeRemoteException((RemoteException)e); + ioe = ((RemoteException) e).unwrapRemoteException(); } if (logScannerActivity && (ioe instanceof UnknownScannerException)) { try { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java index 36099650817..9b3a9484cd5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java @@ -36,15 +36,14 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.ClusterId; -import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.InvalidFamilyOperationException; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.HFileArchiver; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.fs.HFileSystem; @@ -56,6 +55,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.ipc.RemoteException; import org.apache.zookeeper.KeeperException; /** @@ -507,7 +507,8 @@ public class MasterFileSystem { setInfoFamilyCachingForMeta(true); HRegion.closeHRegion(meta); } catch (IOException e) { - e = RemoteExceptionHandler.checkIOException(e); + e = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; LOG.error("bootstrap", e); throw e; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 6e2f4fd2076..6cd5b05adce 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -28,9 +28,9 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Chore; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.ipc.RemoteException; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; @@ -123,7 +123,8 @@ public abstract class CleanerChore extends Chore FileStatus[] files = FSUtils.listStatus(this.fs, this.oldFileDir); checkAndDeleteEntries(files); } catch (IOException e) { - e = RemoteExceptionHandler.checkIOException(e); + e = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; LOG.warn("Error while cleaning the logs", e); } } @@ -182,8 +183,9 @@ public abstract class CleanerChore extends Chore // if the directory still has children, we can't delete it, so we are done if (!allChildrenDeleted) return false; } catch (IOException e) { - e = RemoteExceptionHandler.checkIOException(e); - LOG.warn("Error while listing directory: " + dir, e); + e = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; + LOG.warn("Error while listing directory: " + dir, e); // couldn't list directory, so don't try to delete, and don't return success return false; } @@ -261,7 +263,8 @@ public abstract class CleanerChore extends Chore + ", but couldn't. Run cleaner chain and attempt to delete on next pass."); } } catch (IOException e) { - e = RemoteExceptionHandler.checkIOException(e); + e = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; LOG.warn("Error while deleting: " + filePath, e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java index 68c4d974f9d..c3ac0962110 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java @@ -37,11 +37,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import com.google.common.base.Preconditions; @@ -486,7 +486,8 @@ public class CompactSplitThread implements CompactionRequestor { } } } catch (IOException ex) { - IOException remoteEx = RemoteExceptionHandler.checkIOException(ex); + IOException remoteEx = + ex instanceof RemoteException ? ((RemoteException) ex).unwrapRemoteException() : ex; LOG.error("Compaction failed " + this, remoteEx); if (remoteEx != ex) { LOG.info("Compaction failed at original callstack: " + formatStackTrace(ex)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 06e75b47d81..882f1252538 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -64,15 +64,14 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HealthCheckChore; +import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NotServingRegionException; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableDescriptors; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.YouAreDeadException; import org.apache.hadoop.hbase.ZNodeClearer; -import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.client.ConnectionUtils; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; @@ -1112,7 +1111,8 @@ public class HRegionServer extends HasThread implements try { this.hlogForMeta.close(); } catch (Throwable e) { - LOG.error("Metalog close and delete failed", RemoteExceptionHandler.checkThrowable(e)); + e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e; + LOG.error("Metalog close and delete failed", e); } } if (this.hlog != null) { @@ -1123,7 +1123,8 @@ public class HRegionServer extends HasThread implements hlog.close(); } } catch (Throwable e) { - LOG.error("Close and delete failed", RemoteExceptionHandler.checkThrowable(e)); + e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e; + LOG.error("Close and delete failed", e); } } } @@ -2623,10 +2624,11 @@ public class HRegionServer extends HasThread implements LOG.debug("NotServingRegionException; " + t.getMessage()); return t; } + Throwable e = t instanceof RemoteException ? ((RemoteException) t).unwrapRemoteException() : t; if (msg == null) { - LOG.error("", RemoteExceptionHandler.checkThrowable(t)); + LOG.error("", e); } else { - LOG.error(msg, RemoteExceptionHandler.checkThrowable(t)); + LOG.error(msg, e); } if (!rpcServices.checkOOME(t)) { checkFileSystem(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 3f5729adda5..c783d146483 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -56,7 +56,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.compress.Compression; @@ -87,6 +86,7 @@ import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.ReflectionUtils; +import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import com.google.common.annotations.VisibleForTesting; @@ -1615,7 +1615,8 @@ public class HStore implements Store { this.fs.removeStoreFiles(this.getColumnFamilyName(), compactedFiles); } } catch (IOException e) { - e = RemoteExceptionHandler.checkIOException(e); + e = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; LOG.error("Failed removing compacted files in " + this + ". Files we were trying to remove are " + compactedFiles.toString() + "; some of them may have been already removed", e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java index 62441754ce9..16d82c91af9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java @@ -18,22 +18,26 @@ */ package org.apache.hadoop.hbase.regionserver; +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.ReentrantLock; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; -import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.HasThread; - -import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.ReentrantLock; +import org.apache.hadoop.ipc.RemoteException; /** * Runs periodically to determine if the HLog should be rolled. @@ -105,7 +109,7 @@ class LogRoller extends HasThread implements WALActionsListener { } catch (IOException ex) { // Abort if we get here. We probably won't recover an IOE. HBASE-1132 server.abort("IOE in log roller", - RemoteExceptionHandler.checkIOException(ex)); + ex instanceof RemoteException ? ((RemoteException) ex).unwrapRemoteException() : ex); } catch (Exception ex) { LOG.error("Log rolling failed", ex); server.abort("Log rolling failed", ex); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java index 165176fb0d8..c9d54cc6a37 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java @@ -43,15 +43,15 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Counter; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.HasThread; import org.apache.hadoop.hbase.util.Threads; +import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import org.htrace.Trace; import org.htrace.TraceScope; -import org.apache.hadoop.hbase.util.Counter; import com.google.common.base.Preconditions; @@ -434,9 +434,11 @@ class MemStoreFlusher implements FlushRequester { this.server.compactSplitThread.requestSystemCompaction( region, Thread.currentThread().getName()); } catch (IOException e) { - LOG.error( + e = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; + LOG.error( "Cache flush failed for region " + Bytes.toStringBinary(region.getRegionName()), - RemoteExceptionHandler.checkIOException(e)); + e); } } } @@ -494,9 +496,11 @@ class MemStoreFlusher implements FlushRequester { server.abort("Replay of HLog required. Forcing server shutdown", ex); return false; } catch (IOException ex) { - LOG.error("Cache flush failed" + - (region != null ? (" for region " + Bytes.toStringBinary(region.getRegionName())) : ""), - RemoteExceptionHandler.checkIOException(ex)); + ex = ex instanceof RemoteException ? ((RemoteException) ex).unwrapRemoteException() : ex; + LOG.error( + "Cache flush failed" + + (region != null ? (" for region " + Bytes.toStringBinary(region.getRegionName())) + : ""), ex); if (!server.checkFileSystem()) { return false; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java index b51f8e15d04..a8266751b68 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java @@ -23,9 +23,9 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.master.TableLockManager.TableLock; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import com.google.common.base.Preconditions; @@ -118,8 +118,8 @@ class RegionMergeRequest implements Runnable { + ". Region merge took " + StringUtils.formatTimeDiff(EnvironmentEdgeManager.currentTimeMillis(), startTime)); } catch (IOException ex) { - LOG.error("Merge failed " + this, - RemoteExceptionHandler.checkIOException(ex)); + ex = ex instanceof RemoteException ? ((RemoteException) ex).unwrapRemoteException() : ex; + LOG.error("Merge failed " + this, ex); server.checkFileSystem(); } finally { releaseTableLock(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java index 38e877120d7..4b57a2070f9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java @@ -23,9 +23,9 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.master.TableLockManager.TableLock; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import com.google.common.base.Preconditions; @@ -112,7 +112,8 @@ class SplitRequest implements Runnable { + st.getSecondDaughter().getRegionNameAsString() + ". Split took " + StringUtils.formatTimeDiff(System.currentTimeMillis(), startTime)); } catch (IOException ex) { - LOG.error("Split failed " + this, RemoteExceptionHandler.checkIOException(ex)); + ex = ex instanceof RemoteException ? ((RemoteException) ex).unwrapRemoteException() : ex; + LOG.error("Split failed " + this, ex); server.checkFileSystem(); } finally { if (this.parent.getCoprocessorHost() != null) { @@ -120,7 +121,7 @@ class SplitRequest implements Runnable { this.parent.getCoprocessorHost().postCompleteSplit(); } catch (IOException io) { LOG.error("Split failed " + this, - RemoteExceptionHandler.checkIOException(io)); + io instanceof RemoteException ? ((RemoteException) io).unwrapRemoteException() : io); } } releaseTableLock(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java index de31f24b40f..815730c5a26 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java @@ -56,15 +56,14 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.CoordinatedStateException; +import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; @@ -112,6 +111,7 @@ import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.zookeeper.ZKSplitLog; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.io.MultipleIOException; +import org.apache.hadoop.ipc.RemoteException; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -361,7 +361,7 @@ public class HLogSplitter { ZKSplitLog.markCorrupted(rootDir, logfile.getPath().getName(), fs); isCorrupted = true; } catch (IOException e) { - e = RemoteExceptionHandler.checkIOException(e); + e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e; throw e; } finally { LOG.debug("Finishing writing output logs and closing down."); @@ -1276,7 +1276,8 @@ public class HLogSplitter { wap.incrementEdits(editsCount); wap.incrementNanoTime(System.nanoTime() - startTime); } catch (IOException e) { - e = RemoteExceptionHandler.checkIOException(e); + e = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; LOG.fatal(" Got while writing log entry to log", e); throw e; } @@ -1625,7 +1626,7 @@ public class HLogSplitter { rsw.incrementEdits(actions.size()); rsw.incrementNanoTime(System.nanoTime() - startTime); } catch (IOException e) { - e = RemoteExceptionHandler.checkIOException(e); + e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e; LOG.fatal(" Got while writing log entry to log", e); throw e; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index 6c53f2a780e..36d95885a6c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -60,7 +60,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.fs.HFileSystem; @@ -73,6 +72,7 @@ import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; @@ -399,7 +399,8 @@ public abstract class FSUtils { return; } } catch (IOException e) { - exception = RemoteExceptionHandler.checkIOException(e); + exception = e instanceof RemoteException ? + ((RemoteException)e).unwrapRemoteException() : e; } try { fs.close(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java index 0f2c5db9888..f3ac5ccd83d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java @@ -30,13 +30,12 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.RemoteExceptionHandler; -import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.MetaTableAccessor; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnectable; @@ -48,6 +47,7 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogFactory; +import org.apache.hadoop.ipc.RemoteException; /** * A non-instantiable class that has a static method capable of compacting @@ -263,7 +263,7 @@ class HMerge { } return region; } catch (IOException e) { - e = RemoteExceptionHandler.checkIOException(e); + e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e; LOG.error("meta scanner error", e); metaScanner.close(); throw e;