diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java index 7d494145839..97dcf816c16 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java @@ -69,4 +69,21 @@ public enum FsAction { public FsAction not() { return vals[7 - ordinal()]; } + + /** + * Get the FsAction enum for String representation of permissions + * + * @param permission + * 3-character string representation of permission. ex: rwx + * @return Returns FsAction enum if the corresponding FsAction exists for permission. + * Otherwise returns null + */ + public static FsAction getFsAction(String permission) { + for (FsAction fsAction : vals) { + if (fsAction.SYMBOL.equals(permission)) { + return fsAction; + } + } + return null; + } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java new file mode 100644 index 00000000000..5fad5f0ad39 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java @@ -0,0 +1,232 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.fs.shell; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.apache.hadoop.HadoopIllegalArgumentException; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclEntryScope; +import org.apache.hadoop.fs.permission.AclEntryType; +import org.apache.hadoop.fs.permission.AclStatus; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.util.StringUtils; + +/** + * Acl related operations + */ +@InterfaceAudience.Private +@InterfaceStability.Evolving +class AclCommands extends FsCommand { + private static String GET_FACL = "getfacl"; + private static String SET_FACL = "setfacl"; + + public static void registerCommands(CommandFactory factory) { + factory.addClass(GetfaclCommand.class, "-" + GET_FACL); + factory.addClass(SetfaclCommand.class, "-" + SET_FACL); + } + + /** + * Implementing the '-getfacl' command for the the FsShell. + */ + public static class GetfaclCommand extends FsCommand { + public static String NAME = GET_FACL; + public static String USAGE = "[-R] "; + public static String DESCRIPTION = "Displays the Access Control Lists" + + " (ACLs) of files and directories. If a directory has a default ACL," + + " then getfacl also displays the default ACL.\n" + + "-R: List the ACLs of all files and directories recursively.\n" + + ": File or directory to list.\n"; + + @Override + protected void processOptions(LinkedList args) throws IOException { + CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, "R"); + cf.parse(args); + setRecursive(cf.getOpt("R")); + if (args.isEmpty()) { + throw new HadoopIllegalArgumentException(" is missing"); + } + if (args.size() > 1) { + throw new HadoopIllegalArgumentException("Too many arguments"); + } + } + + @Override + protected void processPath(PathData item) throws IOException { + AclStatus aclStatus = item.fs.getAclStatus(item.path); + out.println("# file: " + item.path); + out.println("# owner: " + aclStatus.getOwner()); + out.println("# group: " + aclStatus.getGroup()); + List entries = aclStatus.getEntries(); + if (aclStatus.isStickyBit()) { + String stickyFlag = "T"; + for (AclEntry aclEntry : entries) { + if (aclEntry.getType() == AclEntryType.OTHER + && aclEntry.getScope() == AclEntryScope.ACCESS + && aclEntry.getPermission().implies(FsAction.EXECUTE)) { + stickyFlag = "t"; + break; + } + } + out.println("# flags: --" + stickyFlag); + } + for (AclEntry entry : entries) { + out.println(entry); + } + } + } + + /** + * Implementing the '-setfacl' command for the the FsShell. + */ + public static class SetfaclCommand extends FsCommand { + public static String NAME = SET_FACL; + public static String USAGE = "[-R] [{-b|-k} {-m|-x } ]" + + "|[--set ]"; + public static String DESCRIPTION = "Sets Access Control Lists (ACLs)" + + " of files and directories.\n" + + "Options:\n" + + "-b :Remove all but the base ACL entries. The entries for user," + + " group and others are retained for compatibility with permission " + + "bits.\n" + + "-k :Remove the default ACL.\n" + + "-R :Apply operations to all files and directories recursively.\n" + + "-m :Modify ACL. New entries are added to the ACL, and existing" + + " entries are retained.\n" + + "-x :Remove specified ACL entries. Other ACL entries are retained.\n" + + "--set :Fully replace the ACL, discarding all existing entries." + + " The must include entries for user, group, and others" + + " for compatibility with permission bits.\n" + + ": Comma separated list of ACL entries.\n" + + ": File or directory to modify.\n"; + + private static final String DEFAULT = "default"; + + Path path = null; + CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, "b", "k", "R", + "m", "x", "-set"); + List aclEntries = null; + + @Override + protected void processOptions(LinkedList args) throws IOException { + cf.parse(args); + setRecursive(cf.getOpt("R")); + // Mix of remove and modify acl flags are not allowed + boolean bothRemoveOptions = cf.getOpt("b") && cf.getOpt("k"); + boolean bothModifyOptions = cf.getOpt("m") && cf.getOpt("x"); + boolean oneRemoveOption = cf.getOpt("b") || cf.getOpt("k"); + boolean oneModifyOption = cf.getOpt("m") || cf.getOpt("x"); + boolean setOption = cf.getOpt("-set"); + if ((bothRemoveOptions || bothModifyOptions) + || (oneRemoveOption && oneModifyOption) + || (setOption && (oneRemoveOption || oneModifyOption))) { + throw new HadoopIllegalArgumentException( + "Specified flags contains both remove and modify flags"); + } + + // Only -m, -x and --set expects + if (oneModifyOption || setOption) { + if (args.size() < 2) { + throw new HadoopIllegalArgumentException(" is missing"); + } + aclEntries = parseAclSpec(args.removeFirst()); + } + + if (args.isEmpty()) { + throw new HadoopIllegalArgumentException(" is missing"); + } + if (args.size() > 1) { + throw new HadoopIllegalArgumentException("Too many arguments"); + } + path = new Path(args.removeFirst()); + } + + @Override + protected void processPath(PathData item) throws IOException { + if (cf.getOpt("b")) { + item.fs.removeAcl(item.path); + } else if (cf.getOpt("k")) { + item.fs.removeDefaultAcl(item.path); + } else if (cf.getOpt("m")) { + item.fs.modifyAclEntries(item.path, aclEntries); + } else if (cf.getOpt("x")) { + item.fs.removeAclEntries(item.path, aclEntries); + } else if (cf.getOpt("-set")) { + item.fs.setAcl(path, aclEntries); + } + } + + /** + * Parse the aclSpec and returns the list of AclEntry objects. + * + * @param aclSpec + * @return + */ + private List parseAclSpec(String aclSpec) { + List aclEntries = new ArrayList(); + Collection aclStrings = StringUtils.getStringCollection(aclSpec, + ","); + for (String aclStr : aclStrings) { + AclEntry.Builder builder = new AclEntry.Builder(); + // Here "::" represent one empty string. + // StringUtils.getStringCollection() will ignore this. + String[] split = aclSpec.split(":"); + if (split.length != 3 + && !(split.length == 4 && DEFAULT.equals(split[0]))) { + throw new HadoopIllegalArgumentException("Invalid : " + + aclStr); + } + int index = 0; + if (split.length == 4) { + assert DEFAULT.equals(split[0]); + // default entry + index++; + builder.setScope(AclEntryScope.DEFAULT); + } + String type = split[index++]; + AclEntryType aclType = null; + try { + aclType = Enum.valueOf(AclEntryType.class, type.toUpperCase()); + builder.setType(aclType); + } catch (IllegalArgumentException iae) { + throw new HadoopIllegalArgumentException( + "Invalid type of acl in :" + aclStr); + } + + builder.setName(split[index++]); + + String permission = split[index++]; + FsAction fsAction = FsAction.getFsAction(permission); + if (null == fsAction) { + throw new HadoopIllegalArgumentException( + "Invalid permission in : " + aclStr); + } + builder.setPermission(fsAction); + aclEntries.add(builder.build()); + } + return aclEntries; + } + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java index 78b47dc8860..c4a6d80754d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java @@ -43,6 +43,7 @@ abstract public class FsCommand extends Command { * @param factory where to register the class */ public static void registerCommands(CommandFactory factory) { + factory.registerCommands(AclCommands.class); factory.registerCommands(CopyCommands.class); factory.registerCommands(Count.class); factory.registerCommands(Delete.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java new file mode 100644 index 00000000000..d0137e24729 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.fs.shell; + +import static org.junit.Assert.assertFalse; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FsShell; +import org.apache.hadoop.util.ToolRunner; +import org.junit.Before; +import org.junit.Test; + +public class TestAclCommands { + + private Configuration conf = null; + + @Before + public void setup() throws IOException { + conf = new Configuration(); + } + + @Test + public void testGetfaclValidations() throws Exception { + assertFalse("getfacl should fail without path", + 0 == runCommand(new String[] { "-getfacl" })); + assertFalse("getfacl should fail with extra argument", + 0 == runCommand(new String[] { "-getfacl", "/test", "extraArg" })); + } + + @Test + public void testSetfaclValidations() throws Exception { + assertFalse("setfacl should fail without path", + 0 == runCommand(new String[] { "-setfacl" })); + assertFalse("setfacl should fail without aclSpec", + 0 == runCommand(new String[] { "-setfacl", "-m", "/path" })); + assertFalse("setfacl should fail with conflicting options", + 0 == runCommand(new String[] { "-setfacl", "-m", "/path" })); + assertFalse("setfacl should fail with extra arguments", + 0 == runCommand(new String[] { "-setfacl", "/path", "extra" })); + assertFalse("setfacl should fail with extra arguments", + 0 == runCommand(new String[] { "-setfacl", "--set", + "default:user::rwx", "/path", "extra" })); + } + + private int runCommand(String[] commands) throws Exception { + return ToolRunner.run(conf, new FsShell(), commands); + } +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt index 6dfbbb1574f..0cbb1bae04c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt @@ -21,6 +21,9 @@ HDFS-4685 (Unreleased) HDFS-5619. NameNode: record ACL modifications to edit log. (Haohui Mai via cnauroth) + HADOOP-10187. FsShell CLI: add getfacl and setfacl with minimal support for + getting and setting ACLs. (Vinay via cnauroth) + OPTIMIZATIONS BUG FIXES