HADOOP-10213. Fix bugs parsing ACL spec in FsShell setfacl. Contributed by Vinay.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-4685@1559793 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Chris Nauroth 2014-01-20 18:00:06 +00:00
parent 8a9a6dbd7e
commit cae96dfe6e
4 changed files with 118 additions and 63 deletions

View File

@ -17,12 +17,16 @@
*/ */
package org.apache.hadoop.fs.permission; package org.apache.hadoop.fs.permission;
import static org.apache.hadoop.fs.permission.AclEntryScope.*; import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import com.google.common.base.Objects; import com.google.common.base.Objects;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.StringUtils;
/** /**
* Defines a single entry in an ACL. An ACL entry has a type (user, group, * Defines a single entry in an ACL. An ACL entry has a type (user, group,
@ -193,4 +197,74 @@ private AclEntry(AclEntryType type, String name, FsAction permission, AclEntrySc
this.permission = permission; this.permission = permission;
this.scope = scope; this.scope = scope;
} }
/**
* Parses a string representation of an ACL spec into a list of AclEntry
* objects. Example: "user::rwx,user:foo:rw-,group::r--,other::---"
*
* @param aclSpec
* String representation of an ACL spec.
* @param includePermission
* for setAcl operations this will be true. i.e. AclSpec should
* include permissions.<br>
* But for removeAcl operation it will be false. i.e. AclSpec should
* not contain permissions.<br>
* Example: "user:foo,group:bar"
* @return Returns list of AclEntries parsed
*/
public static List<AclEntry> parseAclSpec(String aclSpec,
boolean includePermission) {
List<AclEntry> aclEntries = new ArrayList<AclEntry>();
Collection<String> aclStrings = StringUtils.getStringCollection(aclSpec,
",");
for (String aclStr : aclStrings) {
AclEntry.Builder builder = new AclEntry.Builder();
// Here "::" represent one empty string.
// StringUtils.getStringCollection() will ignore this.
String[] split = aclStr.split(":");
int expectedAclSpecLength = 2;
if (includePermission) {
expectedAclSpecLength = 3;
}
if (split.length != expectedAclSpecLength
&& !(split.length == expectedAclSpecLength + 1 && "default"
.equals(split[0]))) {
throw new HadoopIllegalArgumentException("Invalid <aclSpec> : "
+ aclStr);
}
int index = 0;
if (split.length == expectedAclSpecLength + 1) {
assert "default".equals(split[0]);
// default entry
index++;
builder.setScope(AclEntryScope.DEFAULT);
}
String type = split[index++];
AclEntryType aclType = null;
try {
aclType = Enum.valueOf(AclEntryType.class, type.toUpperCase());
builder.setType(aclType);
} catch (IllegalArgumentException iae) {
throw new HadoopIllegalArgumentException(
"Invalid type of acl in <aclSpec> :" + aclStr);
}
String name = split[index++];
if (!name.isEmpty()) {
builder.setName(name);
}
if (expectedAclSpecLength == 3) {
String permission = split[index++];
FsAction fsAction = FsAction.getFsAction(permission);
if (null == fsAction) {
throw new HadoopIllegalArgumentException(
"Invalid permission in <aclSpec> : " + aclStr);
}
builder.setPermission(fsAction);
}
aclEntries.add(builder.build());
}
return aclEntries;
}
} }

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.fs.shell; package org.apache.hadoop.fs.shell;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
@ -27,14 +25,12 @@
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclEntryScope; import org.apache.hadoop.fs.permission.AclEntryScope;
import org.apache.hadoop.fs.permission.AclEntryType; import org.apache.hadoop.fs.permission.AclEntryType;
import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.StringUtils;
/** /**
* Acl related operations * Acl related operations
@ -201,9 +197,6 @@ public static class SetfaclCommand extends FsCommand {
+ "<acl_spec>: Comma separated list of ACL entries.\n" + "<acl_spec>: Comma separated list of ACL entries.\n"
+ "<path>: File or directory to modify.\n"; + "<path>: File or directory to modify.\n";
private static final String DEFAULT = "default";
Path path = null;
CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, "b", "k", "R", CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, "b", "k", "R",
"m", "x", "-set"); "m", "x", "-set");
List<AclEntry> aclEntries = null; List<AclEntry> aclEntries = null;
@ -230,7 +223,7 @@ protected void processOptions(LinkedList<String> args) throws IOException {
if (args.size() < 2) { if (args.size() < 2) {
throw new HadoopIllegalArgumentException("<acl_spec> is missing"); throw new HadoopIllegalArgumentException("<acl_spec> is missing");
} }
aclEntries = parseAclSpec(args.removeFirst()); aclEntries = AclEntry.parseAclSpec(args.removeFirst(), !cf.getOpt("x"));
} }
if (args.isEmpty()) { if (args.isEmpty()) {
@ -239,7 +232,6 @@ protected void processOptions(LinkedList<String> args) throws IOException {
if (args.size() > 1) { if (args.size() > 1) {
throw new HadoopIllegalArgumentException("Too many arguments"); throw new HadoopIllegalArgumentException("Too many arguments");
} }
path = new Path(args.removeFirst());
} }
@Override @Override
@ -253,59 +245,8 @@ protected void processPath(PathData item) throws IOException {
} else if (cf.getOpt("x")) { } else if (cf.getOpt("x")) {
item.fs.removeAclEntries(item.path, aclEntries); item.fs.removeAclEntries(item.path, aclEntries);
} else if (cf.getOpt("-set")) { } else if (cf.getOpt("-set")) {
item.fs.setAcl(path, aclEntries); item.fs.setAcl(item.path, aclEntries);
} }
} }
/**
* Parse the aclSpec and returns the list of AclEntry objects.
*
* @param aclSpec
* @return
*/
private List<AclEntry> parseAclSpec(String aclSpec) {
List<AclEntry> aclEntries = new ArrayList<AclEntry>();
Collection<String> aclStrings = StringUtils.getStringCollection(aclSpec,
",");
for (String aclStr : aclStrings) {
AclEntry.Builder builder = new AclEntry.Builder();
// Here "::" represent one empty string.
// StringUtils.getStringCollection() will ignore this.
String[] split = aclSpec.split(":");
if (split.length != 3
&& !(split.length == 4 && DEFAULT.equals(split[0]))) {
throw new HadoopIllegalArgumentException("Invalid <aclSpec> : "
+ aclStr);
}
int index = 0;
if (split.length == 4) {
assert DEFAULT.equals(split[0]);
// default entry
index++;
builder.setScope(AclEntryScope.DEFAULT);
}
String type = split[index++];
AclEntryType aclType = null;
try {
aclType = Enum.valueOf(AclEntryType.class, type.toUpperCase());
builder.setType(aclType);
} catch (IllegalArgumentException iae) {
throw new HadoopIllegalArgumentException(
"Invalid type of acl in <aclSpec> :" + aclStr);
}
builder.setName(split[index++]);
String permission = split[index++];
FsAction fsAction = FsAction.getFsAction(permission);
if (null == fsAction) {
throw new HadoopIllegalArgumentException(
"Invalid permission in <aclSpec> : " + aclStr);
}
builder.setPermission(fsAction);
aclEntries.add(builder.build());
}
return aclEntries;
}
} }
} }

View File

@ -17,12 +17,18 @@
*/ */
package org.apache.hadoop.fs.shell; package org.apache.hadoop.fs.shell;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.*;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclEntryScope;
import org.apache.hadoop.fs.permission.AclEntryType;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -57,6 +63,37 @@ public void testSetfaclValidations() throws Exception {
assertFalse("setfacl should fail with extra arguments", assertFalse("setfacl should fail with extra arguments",
0 == runCommand(new String[] { "-setfacl", "--set", 0 == runCommand(new String[] { "-setfacl", "--set",
"default:user::rwx", "/path", "extra" })); "default:user::rwx", "/path", "extra" }));
assertFalse("setfacl should fail with permissions for -x",
0 == runCommand(new String[] { "-setfacl", "-x", "user:user1:rwx",
"/path" }));
assertFalse("setfacl should fail with permissions for -x",
0 == runCommand(new String[] { "-setfacl", "-m",
"", "/path" }));
}
@Test
public void testMultipleAclSpecParsing() throws Exception {
List<AclEntry> parsedList = AclEntry.parseAclSpec(
"group::rwx,user:user1:rwx,user:user2:rw-,group:group1:rw-,default:group:group1:rw-", true);
AclEntry basicAcl = new AclEntry.Builder().setType(AclEntryType.GROUP)
.setPermission(FsAction.ALL).build();
AclEntry user1Acl = new AclEntry.Builder().setType(AclEntryType.USER)
.setPermission(FsAction.ALL).setName("user1").build();
AclEntry user2Acl = new AclEntry.Builder().setType(AclEntryType.USER)
.setPermission(FsAction.READ_WRITE).setName("user2").build();
AclEntry group1Acl = new AclEntry.Builder().setType(AclEntryType.GROUP)
.setPermission(FsAction.READ_WRITE).setName("group1").build();
AclEntry defaultAcl = new AclEntry.Builder().setType(AclEntryType.GROUP)
.setPermission(FsAction.READ_WRITE).setName("group1")
.setScope(AclEntryScope.DEFAULT).build();
List<AclEntry> expectedList = new ArrayList<AclEntry>();
expectedList.add(basicAcl);
expectedList.add(user1Acl);
expectedList.add(user2Acl);
expectedList.add(group1Acl);
expectedList.add(defaultAcl);
assertEquals("Parsed Acl not correct", expectedList, parsedList);
} }
private int runCommand(String[] commands) throws Exception { private int runCommand(String[] commands) throws Exception {

View File

@ -45,3 +45,6 @@ HDFS-4685 (Unreleased)
HDFS-5739. ACL RPC must allow null name or unspecified permissions in ACL HDFS-5739. ACL RPC must allow null name or unspecified permissions in ACL
entries. (cnauroth) entries. (cnauroth)
HADOOP-10213. Fix bugs parsing ACL spec in FsShell setfacl.
(Vinay via cnauroth)