From 6cea5c6462f279e38f6ba3af92f73088b9db5b54 Mon Sep 17 00:00:00 2001 From: niuyulin Date: Thu, 23 Apr 2020 23:20:38 +0800 Subject: [PATCH] HBASE-24222 remove FSUtils.checkAccess and replace with FileSystem.access in HBCK (#1557) Signed-off-by: Duo Zhang --- .../org/apache/hadoop/hbase/util/FSUtils.java | 30 ------------------- .../apache/hadoop/hbase/util/HBaseFsck.java | 10 +++---- 2 files changed, 4 insertions(+), 36 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index 6dfb79aef2e..35073379271 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -58,7 +58,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -74,14 +73,12 @@ import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; -import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSHedgedReadMetrics; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; @@ -1573,33 +1570,6 @@ public abstract class FSUtils extends CommonFSUtils { } } - /** - * Throw an exception if an action is not permitted by a user on a file. - * - * @param ugi - * the user - * @param file - * the file - * @param action - * the action - */ - public static void checkAccess(UserGroupInformation ugi, FileStatus file, - FsAction action) throws AccessDeniedException { - if (ugi.getShortUserName().equals(file.getOwner())) { - if (file.getPermission().getUserAction().implies(action)) { - return; - } - } else if (ArrayUtils.contains(ugi.getGroupNames(), file.getGroup())) { - if (file.getPermission().getGroupAction().implies(action)) { - return; - } - } else if (file.getPermission().getOtherAction().implies(action)) { - return; - } - throw new AccessDeniedException("Permission denied:" + " action=" + action - + " path=" + file.getPath() + " user=" + ugi.getShortUserName()); - } - /** * This function is to scan the root path of the file system to get the * degree of locality for each region on each of the servers having at least diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index bdad31b2470..c3d92e06c51 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -116,7 +116,6 @@ import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; import org.apache.hadoop.hbase.replication.ReplicationQueueStorage; import org.apache.hadoop.hbase.replication.ReplicationStorageFactory; -import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator; import org.apache.hadoop.hbase.util.HbckErrorReporter.ERROR_CODE; @@ -129,6 +128,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Tool; @@ -1929,7 +1929,7 @@ public class HBaseFsck extends Configured implements Closeable { } } - private void preCheckPermission() throws IOException, AccessDeniedException { + private void preCheckPermission() throws IOException { if (shouldIgnorePreCheckPermission()) { return; } @@ -1941,8 +1941,8 @@ public class HBaseFsck extends Configured implements Closeable { FileStatus[] files = fs.listStatus(hbaseDir); for (FileStatus file : files) { try { - FSUtils.checkAccess(ugi, file, FsAction.WRITE); - } catch (AccessDeniedException ace) { + fs.access(file.getPath(), FsAction.WRITE); + } catch (AccessControlException ace) { LOG.warn("Got AccessDeniedException when preCheckPermission ", ace); errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + ugi.getUserName() + " does not have write perms to " + file.getPath() @@ -3780,8 +3780,6 @@ public class HBaseFsck extends Configured implements Closeable { // pre-check current user has FS write permission or not try { preCheckPermission(); - } catch (AccessDeniedException ace) { - Runtime.getRuntime().exit(-1); } catch (IOException ioe) { Runtime.getRuntime().exit(-1); }