diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 8132db47f92..8e2d4efcddb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -2278,7 +2278,8 @@ public abstract class FileSystem extends Configured implements Closeable { * @param aclSpec List describing modifications * @throws IOException if an ACL could not be modified */ - public void modifyAclEntries(Path path, Iterable aclSpec) throws IOException { + public void modifyAclEntries(Path path, List aclSpec) + throws IOException { throw new UnsupportedOperationException(getClass().getSimpleName() + " doesn't support modifyAclEntries"); } @@ -2291,7 +2292,8 @@ public abstract class FileSystem extends Configured implements Closeable { * @param aclSpec List describing entries to remove * @throws IOException if an ACL could not be modified */ - public void removeAclEntries(Path path, Iterable aclSpec) throws IOException { + public void removeAclEntries(Path path, List aclSpec) + throws IOException { throw new UnsupportedOperationException(getClass().getSimpleName() + " doesn't support removeAclEntries"); } @@ -2331,7 +2333,7 @@ public abstract class FileSystem extends Configured implements Closeable { * for user, group, and others for compatibility with permission bits. * @throws IOException if an ACL could not be modified */ - public void setAcl(Path path, Iterable aclSpec) throws IOException { + public void setAcl(Path path, List aclSpec) throws IOException { throw new UnsupportedOperationException(getClass().getSimpleName() + " doesn't support setAcl"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java index 5072a4db081..d45ecbbf11a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java @@ -22,6 +22,7 @@ import java.io.*; import java.net.URI; import java.net.URISyntaxException; import java.util.EnumSet; +import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -512,29 +513,29 @@ public class FilterFileSystem extends FileSystem { } @Override - public void modifyAclEntries(Path path, Iterable aclSpec) throws IOException { + public void modifyAclEntries(Path path, List aclSpec) + throws IOException { fs.modifyAclEntries(path, aclSpec); } @Override - public void removeAclEntries(Path path, Iterable aclSpec) throws IOException { + public void removeAclEntries(Path path, List aclSpec) + throws IOException { fs.removeAclEntries(path, aclSpec); } @Override - public void removeDefaultAcl(Path path) - throws IOException { + public void removeDefaultAcl(Path path) throws IOException { fs.removeDefaultAcl(path); } @Override - public void removeAcl(Path path) - throws IOException { + public void removeAcl(Path path) throws IOException { fs.removeAcl(path); } @Override - public void setAcl(Path path, Iterable aclSpec) throws IOException { + public void setAcl(Path path, List aclSpec) throws IOException { fs.setAcl(path, aclSpec); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntryScope.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntryScope.java index 087a9566e33..6d941e7117d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntryScope.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntryScope.java @@ -38,5 +38,5 @@ public enum AclEntryScope { * entry is not inspected as part of permission enforcement on the directory * that owns it. */ - DEFAULT + DEFAULT; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntryType.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntryType.java index a9bc3e0786a..ffd62d7080b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntryType.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntryType.java @@ -54,5 +54,5 @@ public enum AclEntryType { * An ACL entry that applies to all other users that were not covered by one * of the more specific ACL entry types. */ - OTHER + OTHER; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java index 138154f40a0..c881b0faab8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java @@ -36,7 +36,7 @@ public class AclStatus { private final String owner; private final String group; private final boolean stickyBit; - private final Iterable entries; + private final List entries; /** * Returns the file owner. @@ -68,9 +68,9 @@ public class AclStatus { /** * Returns the list of all ACL entries, ordered by their natural ordering. * - * @return Iterable unmodifiable ordered list of all ACL entries + * @return List unmodifiable ordered list of all ACL entries */ - public Iterable getEntries() { + public List getEntries() { return entries; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java index d628bb1a331..0d3be9bac30 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java @@ -20,6 +20,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; import java.util.EnumSet; +import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -279,31 +280,31 @@ class ChRootedFileSystem extends FilterFileSystem { throws IOException { super.setTimes(fullPath(f), mtime, atime); } - + @Override - public void modifyAclEntries(Path path, Iterable aclSpec) throws IOException { + public void modifyAclEntries(Path path, List aclSpec) + throws IOException { super.modifyAclEntries(fullPath(path), aclSpec); } @Override - public void removeAclEntries(Path path, Iterable aclSpec) throws IOException { + public void removeAclEntries(Path path, List aclSpec) + throws IOException { super.removeAclEntries(fullPath(path), aclSpec); } @Override - public void removeDefaultAcl(Path path) - throws IOException { + public void removeDefaultAcl(Path path) throws IOException { super.removeDefaultAcl(fullPath(path)); } @Override - public void removeAcl(Path path) - throws IOException { + public void removeAcl(Path path) throws IOException { super.removeAcl(fullPath(path)); } @Override - public void setAcl(Path path, Iterable aclSpec) throws IOException { + public void setAcl(Path path, List aclSpec) throws IOException { super.setAcl(fullPath(path), aclSpec); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java index 861a7a3f915..ac2664effdb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java @@ -474,16 +474,18 @@ public class ViewFileSystem extends FileSystem { } @Override - public void modifyAclEntries(Path path, Iterable aclSpec) throws IOException { - InodeTree.ResolveResult res = - fsState.resolve(getUriPath(path), true); + public void modifyAclEntries(Path path, List aclSpec) + throws IOException { + InodeTree.ResolveResult res = fsState.resolve(getUriPath(path), + true); res.targetFileSystem.modifyAclEntries(res.remainingPath, aclSpec); } @Override - public void removeAclEntries(Path path, Iterable aclSpec) throws IOException { - InodeTree.ResolveResult res = - fsState.resolve(getUriPath(path), true); + public void removeAclEntries(Path path, List aclSpec) + throws IOException { + InodeTree.ResolveResult res = fsState.resolve(getUriPath(path), + true); res.targetFileSystem.removeAclEntries(res.remainingPath, aclSpec); } @@ -504,7 +506,7 @@ public class ViewFileSystem extends FileSystem { } @Override - public void setAcl(Path path, Iterable aclSpec) throws IOException { + public void setAcl(Path path, List aclSpec) throws IOException { InodeTree.ResolveResult res = fsState.resolve(getUriPath(path), true); res.targetFileSystem.setAcl(res.remainingPath, aclSpec); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java index 63b4298f839..200e9f27261 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java @@ -35,6 +35,7 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.EnumSet; import java.util.Iterator; +import java.util.List; import static org.apache.hadoop.fs.Options.ChecksumOpt; import static org.apache.hadoop.fs.Options.CreateOpts; @@ -167,11 +168,19 @@ public class TestHarFileSystem { String snapshotNewName) throws IOException; public void deleteSnapshot(Path path, String snapshotName) throws IOException; - public void modifyAclEntries(Path path, Iterable aclSpec) throws IOException; - public void removeAclEntries(Path path, Iterable aclSpec) throws IOException; + + public void modifyAclEntries(Path path, List aclSpec) + throws IOException; + + public void removeAclEntries(Path path, List aclSpec) + throws IOException; + public void removeDefaultAcl(Path path) throws IOException; + public void removeAcl(Path path) throws IOException; - public void setAcl(Path path, Iterable aclSpec) throws IOException; + + public void setAcl(Path path, List aclSpec) throws IOException; + public AclStatus getAclStatus(Path path) throws IOException; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt index fce9e3eb470..314af95662c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt @@ -12,6 +12,8 @@ HDFS-4685 (Unreleased) HDFS-5650. Remove AclReadFlag and AclWriteFlag in FileSystem API. (Haohui Mai via cnauroth) + HDFS-5596. Implement RPC stubs. (Haohui Mai via cnauroth) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml index 0b1e55d46c5..ebf19c3024e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml @@ -481,6 +481,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> ClientNamenodeProtocol.proto NamenodeProtocol.proto + acl.proto ${project.build.directory}/generated-sources/java diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java index f008878e48e..7f3279150b1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java @@ -104,6 +104,8 @@ import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.VolumeId; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.client.ClientMmapManager; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; @@ -2629,4 +2631,82 @@ public class DFSClient implements java.io.Closeable { public ClientMmapManager getMmapManager() { return mmapManager; } + + void modifyAclEntries(String src, List aclSpec) + throws IOException { + checkOpen(); + try { + namenode.modifyAclEntries(src, aclSpec); + } catch(RemoteException re) { + throw re.unwrapRemoteException(AccessControlException.class, + FileNotFoundException.class, + SafeModeException.class, + UnresolvedPathException.class, + SnapshotAccessControlException.class); + } + } + + void removeAclEntries(String src, List aclSpec) + throws IOException { + checkOpen(); + try { + namenode.removeAclEntries(src, aclSpec); + } catch(RemoteException re) { + throw re.unwrapRemoteException(AccessControlException.class, + FileNotFoundException.class, + SafeModeException.class, + UnresolvedPathException.class, + SnapshotAccessControlException.class); + } + } + + void removeDefaultAcl(String src) throws IOException { + checkOpen(); + try { + namenode.removeDefaultAcl(src); + } catch(RemoteException re) { + throw re.unwrapRemoteException(AccessControlException.class, + FileNotFoundException.class, + SafeModeException.class, + UnresolvedPathException.class, + SnapshotAccessControlException.class); + } + } + + void removeAcl(String src) throws IOException { + checkOpen(); + try { + namenode.removeAcl(src); + } catch(RemoteException re) { + throw re.unwrapRemoteException(AccessControlException.class, + FileNotFoundException.class, + SafeModeException.class, + UnresolvedPathException.class, + SnapshotAccessControlException.class); + } + } + + void setAcl(String src, List aclSpec) throws IOException { + checkOpen(); + try { + namenode.setAcl(src, aclSpec); + } catch(RemoteException re) { + throw re.unwrapRemoteException(AccessControlException.class, + FileNotFoundException.class, + SafeModeException.class, + UnresolvedPathException.class, + SnapshotAccessControlException.class); + } + } + + AclStatus getAclStatus(String src) throws IOException { + checkOpen(); + try { + return namenode.getAclStatus(src); + } catch(RemoteException re) { + throw re.unwrapRemoteException(AccessControlException.class, + FileNotFoundException.class, + UnresolvedPathException.class); + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java index be44c13aa52..489a8779d7b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java @@ -53,6 +53,8 @@ import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.fs.VolumeId; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.client.HdfsAdmin; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; @@ -1722,4 +1724,130 @@ public class DistributedFileSystem extends FileSystem { public RemoteIterator listCachePools() throws IOException { return dfs.listCachePools(); } + + /** + * {@inheritDoc} + */ + @Override + public void modifyAclEntries(Path path, final List aclSpec) + throws IOException { + Path absF = fixRelativePart(path); + new FileSystemLinkResolver() { + @Override + public Void doCall(final Path p) throws IOException { + dfs.modifyAclEntries(getPathName(p), aclSpec); + return null; + } + + @Override + public Void next(final FileSystem fs, final Path p) throws IOException { + fs.modifyAclEntries(p, aclSpec); + return null; + } + }.resolve(this, absF); + } + + /** + * {@inheritDoc} + */ + @Override + public void removeAclEntries(Path path, final List aclSpec) + throws IOException { + Path absF = fixRelativePart(path); + new FileSystemLinkResolver() { + @Override + public Void doCall(final Path p) throws IOException { + dfs.removeAclEntries(getPathName(p), aclSpec); + return null; + } + + @Override + public Void next(final FileSystem fs, final Path p) throws IOException { + fs.removeAclEntries(p, aclSpec); + return null; + } + }.resolve(this, absF); + } + + /** + * {@inheritDoc} + */ + @Override + public void removeDefaultAcl(Path path) throws IOException { + final Path absF = fixRelativePart(path); + new FileSystemLinkResolver() { + @Override + public Void doCall(final Path p) throws IOException { + dfs.removeDefaultAcl(getPathName(p)); + return null; + } + @Override + public Void next(final FileSystem fs, final Path p) + throws IOException, UnresolvedLinkException { + fs.removeDefaultAcl(p); + return null; + } + }.resolve(this, absF); + } + + /** + * {@inheritDoc} + */ + @Override + public void removeAcl(Path path) throws IOException { + final Path absF = fixRelativePart(path); + new FileSystemLinkResolver() { + @Override + public Void doCall(final Path p) throws IOException { + dfs.removeAcl(getPathName(p)); + return null; + } + @Override + public Void next(final FileSystem fs, final Path p) + throws IOException, UnresolvedLinkException { + fs.removeAcl(p); + return null; + } + }.resolve(this, absF); + } + + /** + * {@inheritDoc} + */ + @Override + public void setAcl(Path path, final List aclSpec) throws IOException { + Path absF = fixRelativePart(path); + new FileSystemLinkResolver() { + @Override + public Void doCall(final Path p) throws IOException { + dfs.setAcl(getPathName(p), aclSpec); + return null; + } + + @Override + public Void next(final FileSystem fs, final Path p) throws IOException { + fs.setAcl(p, aclSpec); + return null; + } + }.resolve(this, absF); + } + + /** + * {@inheritDoc} + */ + @Override + public AclStatus getAclStatus(Path path) throws IOException { + final Path absF = fixRelativePart(path); + return new FileSystemLinkResolver() { + @Override + public AclStatus doCall(final Path p) throws IOException { + return dfs.getAclStatus(getPathName(p)); + } + @Override + public AclStatus next(final FileSystem fs, final Path p) + throws IOException, UnresolvedLinkException { + return fs.getAclStatus(p); + } + }.resolve(this, absF); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java index 709047ac124..7490b156406 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.protocol; import java.io.FileNotFoundException; import java.io.IOException; +import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -32,6 +33,8 @@ import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries; import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.UnresolvedLinkException; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; @@ -1180,4 +1183,49 @@ public interface ClientProtocol { @Idempotent public BatchedEntries listCachePools(String prevPool) throws IOException; + + /** + * Modifies ACL entries of files and directories. This method can add new ACL + * entries or modify the permissions on existing ACL entries. All existing + * ACL entries that are not specified in this call are retained without + * changes. (Modifications are merged into the current ACL.) + */ + @Idempotent + public void modifyAclEntries(String src, List aclSpec) + throws IOException; + + /** + * Removes ACL entries from files and directories. Other ACL entries are + * retained. + */ + @Idempotent + public void removeAclEntries(String src, List aclSpec) + throws IOException; + + /** + * Removes all default ACL entries from files and directories. + */ + @Idempotent + public void removeDefaultAcl(String src) throws IOException; + + /** + * Removes all but the base ACL entries of files and directories. The entries + * for user, group, and others are retained for compatibility with permission + * bits. + */ + @Idempotent + public void removeAcl(String src) throws IOException; + + /** + * Fully replaces ACL of files and directories, discarding all existing + * entries. + */ + @Idempotent + public void setAcl(String src, List aclSpec) throws IOException; + + /** + * Gets the ACLs of files and directories. + */ + @Idempotent + public AclStatus getAclStatus(String src) throws IOException; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java index 478b661677c..2c4eeef3016 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java @@ -37,6 +37,18 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport; import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.GetAclStatusRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.GetAclStatusResponseProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.ModifyAclEntriesRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.ModifyAclEntriesResponseProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclEntriesRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclEntriesResponseProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclResponseProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveDefaultAclRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveDefaultAclResponseProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.SetAclRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.SetAclResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockRequestProto; @@ -270,6 +282,24 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements VOID_SETBALANCERBANDWIDTH_RESPONSE = SetBalancerBandwidthResponseProto.newBuilder().build(); + private static final SetAclResponseProto + VOID_SETACL_RESPONSE = SetAclResponseProto.getDefaultInstance(); + + private static final ModifyAclEntriesResponseProto + VOID_MODIFYACLENTRIES_RESPONSE = ModifyAclEntriesResponseProto + .getDefaultInstance(); + + private static final RemoveAclEntriesResponseProto + VOID_REMOVEACLENTRIES_RESPONSE = RemoveAclEntriesResponseProto + .getDefaultInstance(); + + private static final RemoveDefaultAclResponseProto + VOID_REMOVEDEFAULTACL_RESPONSE = RemoveDefaultAclResponseProto + .getDefaultInstance(); + + private static final RemoveAclResponseProto + VOID_REMOVEACL_RESPONSE = RemoveAclResponseProto.getDefaultInstance(); + /** * Constructor * @@ -1135,4 +1165,73 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements throw new ServiceException(e); } } + + @Override + public ModifyAclEntriesResponseProto modifyAclEntries( + RpcController controller, ModifyAclEntriesRequestProto req) + throws ServiceException { + try { + server.modifyAclEntries(req.getSrc(), PBHelper.convertAclEntry(req.getAclSpecList())); + } catch (IOException e) { + throw new ServiceException(e); + } + return VOID_MODIFYACLENTRIES_RESPONSE; + } + + @Override + public RemoveAclEntriesResponseProto removeAclEntries( + RpcController controller, RemoveAclEntriesRequestProto req) + throws ServiceException { + try { + server.removeAclEntries(req.getSrc(), + PBHelper.convertAclEntry(req.getAclSpecList())); + } catch (IOException e) { + throw new ServiceException(e); + } + return VOID_REMOVEACLENTRIES_RESPONSE; + } + + @Override + public RemoveDefaultAclResponseProto removeDefaultAcl( + RpcController controller, RemoveDefaultAclRequestProto req) + throws ServiceException { + try { + server.removeDefaultAcl(req.getSrc()); + } catch (IOException e) { + throw new ServiceException(e); + } + return VOID_REMOVEDEFAULTACL_RESPONSE; + } + + @Override + public RemoveAclResponseProto removeAcl(RpcController controller, + RemoveAclRequestProto req) throws ServiceException { + try { + server.removeAcl(req.getSrc()); + } catch (IOException e) { + throw new ServiceException(e); + } + return VOID_REMOVEACL_RESPONSE; + } + + @Override + public SetAclResponseProto setAcl(RpcController controller, + SetAclRequestProto req) throws ServiceException { + try { + server.setAcl(req.getSrc(), PBHelper.convertAclEntry(req.getAclSpecList())); + } catch (IOException e) { + throw new ServiceException(e); + } + return VOID_SETACL_RESPONSE; + } + + @Override + public GetAclStatusResponseProto getAclStatus(RpcController controller, + GetAclStatusRequestProto req) throws ServiceException { + try { + return PBHelper.convert(server.getAclStatus(req.getSrc())); + } catch (IOException e) { + throw new ServiceException(e); + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java index 0ceaba11b1c..803923fdb39 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java @@ -21,6 +21,7 @@ import java.io.Closeable; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Arrays; +import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -32,6 +33,8 @@ import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.UnresolvedLinkException; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; @@ -53,6 +56,12 @@ import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport; import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.GetAclStatusRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.ModifyAclEntriesRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclEntriesRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveAclRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.RemoveDefaultAclRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.SetAclRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddBlockRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddCachePoolRequestProto; @@ -1151,4 +1160,76 @@ public class ClientNamenodeProtocolTranslatorPB implements throw ProtobufHelper.getRemoteException(e); } } + + @Override + public void modifyAclEntries(String src, List aclSpec) + throws IOException { + ModifyAclEntriesRequestProto req = ModifyAclEntriesRequestProto + .newBuilder().setSrc(src) + .addAllAclSpec(PBHelper.convertAclEntryProto(aclSpec)).build(); + try { + rpcProxy.modifyAclEntries(null, req); + } catch (ServiceException e) { + throw ProtobufHelper.getRemoteException(e); + } + } + + @Override + public void removeAclEntries(String src, List aclSpec) + throws IOException { + RemoveAclEntriesRequestProto req = RemoveAclEntriesRequestProto + .newBuilder().setSrc(src) + .addAllAclSpec(PBHelper.convertAclEntryProto(aclSpec)).build(); + try { + rpcProxy.removeAclEntries(null, req); + } catch (ServiceException e) { + throw ProtobufHelper.getRemoteException(e); + } + } + + @Override + public void removeDefaultAcl(String src) throws IOException { + RemoveDefaultAclRequestProto req = RemoveDefaultAclRequestProto + .newBuilder().setSrc(src).build(); + try { + rpcProxy.removeDefaultAcl(null, req); + } catch (ServiceException e) { + throw ProtobufHelper.getRemoteException(e); + } + } + + @Override + public void removeAcl(String src) throws IOException { + RemoveAclRequestProto req = RemoveAclRequestProto.newBuilder() + .setSrc(src).build(); + try { + rpcProxy.removeAcl(null, req); + } catch (ServiceException e) { + throw ProtobufHelper.getRemoteException(e); + } + } + + @Override + public void setAcl(String src, List aclSpec) throws IOException { + SetAclRequestProto req = SetAclRequestProto.newBuilder() + .setSrc(src) + .addAllAclSpec(PBHelper.convertAclEntryProto(aclSpec)) + .build(); + try { + rpcProxy.setAcl(null, req); + } catch (ServiceException e) { + throw ProtobufHelper.getRemoteException(e); + } + } + + @Override + public AclStatus getAclStatus(String src) throws IOException { + GetAclStatusRequestProto req = GetAclStatusRequestProto.newBuilder() + .setSrc(src).build(); + try { + return PBHelper.convert(rpcProxy.getAclStatus(null, req)); + } catch (ServiceException e) { + throw ProtobufHelper.getRemoteException(e); + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java index 3280feaa671..e740615916b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java @@ -31,6 +31,11 @@ import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclEntryScope; +import org.apache.hadoop.fs.permission.AclEntryType; +import org.apache.hadoop.fs.permission.AclStatus; +import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.proto.HAServiceProtocolProtos; @@ -59,6 +64,12 @@ import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclStatusProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto.AclEntryScopeProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto.AclEntryTypeProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto.FsActionProto; +import org.apache.hadoop.hdfs.protocol.proto.AclProtos.GetAclStatusResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveEntryProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto; @@ -185,6 +196,13 @@ public class PBHelper { RegisterCommandProto.newBuilder().build(); private static final RegisterCommand REG_CMD = new RegisterCommand(); + private static final AclEntryScope[] ACL_ENTRY_SCOPE_VALUES = + AclEntryScope.values(); + private static final AclEntryType[] ACL_ENTRY_TYPE_VALUES = + AclEntryType.values(); + private static final FsAction[] FSACTION_VALUES = + FsAction.values(); + private PBHelper() { /** Hidden constructor */ } @@ -193,6 +211,10 @@ public class PBHelper { return ByteString.copyFrom(bytes); } + private static , U extends Enum> U castEnum(T from, U[] to) { + return to[from.ordinal()]; + } + public static NamenodeRole convert(NamenodeRoleProto role) { switch (role) { case NAMENODE: @@ -717,8 +739,9 @@ public class PBHelper { return REG_CMD; case BlockIdCommand: return PBHelper.convert(proto.getBlkIdCmd()); + default: + return null; } - return null; } public static BalancerBandwidthCommandProto convert( @@ -1755,4 +1778,63 @@ public class PBHelper { assert size >= 0; return new ExactSizeInputStream(input, size); } + + private static AclEntryScopeProto convert(AclEntryScope v) { + return AclEntryScopeProto.valueOf(v.ordinal()); + } + + private static AclEntryScope convert(AclEntryScopeProto v) { + return castEnum(v, ACL_ENTRY_SCOPE_VALUES); + } + + private static AclEntryTypeProto convert(AclEntryType e) { + return AclEntryTypeProto.valueOf(e.ordinal()); + } + + private static AclEntryType convert(AclEntryTypeProto v) { + return castEnum(v, ACL_ENTRY_TYPE_VALUES); + } + + private static FsActionProto convert(FsAction v) { + return FsActionProto.valueOf(v.ordinal()); + } + + private static FsAction convert(FsActionProto v) { + return castEnum(v, FSACTION_VALUES); + } + + public static List convertAclEntryProto( + List aclSpec) { + ArrayList r = Lists.newArrayListWithCapacity(aclSpec.size()); + for (AclEntry e : aclSpec) { + r.add(AclEntryProto.newBuilder().setType(convert(e.getType())) + .setName(e.getName()).setPermissions(convert(e.getPermission())) + .setScope(convert(e.getScope())).build()); + } + return r; + } + + public static List convertAclEntry(List aclSpec) { + ArrayList r = Lists.newArrayListWithCapacity(aclSpec.size()); + for (AclEntryProto e : aclSpec) { + r.add(new AclEntry.Builder().setType(convert(e.getType())) + .setName(e.getName()).setPermission(convert(e.getPermissions())) + .setScope(convert(e.getScope())).build()); + } + return r; + } + + public static AclStatus convert(GetAclStatusResponseProto e) { + AclStatusProto r = e.getResult(); + return new AclStatus.Builder().owner(r.getOwner()).group(r.getGroup()) + .stickyBit(r.getSticky()) + .addEntries(convertAclEntry(r.getEntriesList())).build(); + } + + public static GetAclStatusResponseProto convert(AclStatus e) { + AclStatusProto r = AclStatusProto.newBuilder().setOwner(e.getOwner()) + .setGroup(e.getGroup()).setSticky(e.isStickyBit()) + .addAllEntries(convertAclEntryProto(e.getEntries())).build(); + return GetAclStatusResponseProto.newBuilder().setResult(r).build(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index bb5bcfec092..665d641be0d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -138,6 +138,8 @@ import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnresolvedLinkException; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; @@ -7312,6 +7314,30 @@ public class FSNamesystem implements Namesystem, FSClusterStats, return results; } + void modifyAclEntries(String src, Iterable aclSpec) { + throw new UnsupportedOperationException("Unimplemented"); + } + + void removeAclEntries(String src, Iterable aclSpec) { + throw new UnsupportedOperationException("Unimplemented"); + } + + void removeDefaultAcl(String src) { + throw new UnsupportedOperationException("Unimplemented"); + } + + void removeAcl(String src) { + throw new UnsupportedOperationException("Unimplemented"); + } + + void setAcl(String src, Iterable aclSpec) { + throw new UnsupportedOperationException("Unimplemented"); + } + + AclStatus getAclStatus(String src) { + throw new UnsupportedOperationException("Unimplemented"); + } + /** * Default AuditLogger implementation; used when no access logger is * defined in the config file. It can also be explicitly listed in the diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java index cb235159a50..8755fe6bf29 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java @@ -47,6 +47,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.ha.HAServiceStatus; @@ -1279,4 +1281,36 @@ class NameNodeRpcServer implements NamenodeProtocols { throws IOException { return namesystem.listCachePools(prevKey != null ? prevKey : ""); } + + @Override + public void modifyAclEntries(String src, List aclSpec) + throws IOException { + namesystem.modifyAclEntries(src, aclSpec); + } + + @Override + public void removeAclEntries(String src, List aclSpec) + throws IOException { + namesystem.removeAclEntries(src, aclSpec); + } + + @Override + public void removeDefaultAcl(String src) throws IOException { + namesystem.removeDefaultAcl(src); + } + + @Override + public void removeAcl(String src) throws IOException { + namesystem.removeAcl(src); + } + + @Override + public void setAcl(String src, List aclSpec) throws IOException { + namesystem.setAcl(src, aclSpec); + } + + @Override + public AclStatus getAclStatus(String src) throws IOException { + return namesystem.getAclStatus(src); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto index bdd024973e1..86ec16ea881 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto @@ -30,6 +30,7 @@ package hadoop.hdfs; import "Security.proto"; import "hdfs.proto"; +import "acl.proto"; /** * The ClientNamenodeProtocol Service defines the interface between a client @@ -709,4 +710,16 @@ service ClientNamenodeProtocol { returns(GetSnapshotDiffReportResponseProto); rpc isFileClosed(IsFileClosedRequestProto) returns(IsFileClosedResponseProto); + rpc modifyAclEntries(ModifyAclEntriesRequestProto) + returns(ModifyAclEntriesResponseProto); + rpc removeAclEntries(RemoveAclEntriesRequestProto) + returns(RemoveAclEntriesResponseProto); + rpc removeDefaultAcl(RemoveDefaultAclRequestProto) + returns(RemoveDefaultAclResponseProto); + rpc removeAcl(RemoveAclRequestProto) + returns(RemoveAclResponseProto); + rpc setAcl(SetAclRequestProto) + returns(SetAclResponseProto); + rpc getAclStatus(GetAclStatusRequestProto) + returns(GetAclStatusResponseProto); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/acl.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/acl.proto new file mode 100644 index 00000000000..0d01dd9e634 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/acl.proto @@ -0,0 +1,107 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hdfs.protocol.proto"; +option java_outer_classname = "AclProtos"; +option java_generate_equals_and_hash = true; +package hadoop.hdfs; + +import "hdfs.proto"; + +message AclEntryProto { + enum AclEntryScopeProto { + ACCESS = 0x0; + DEFAULT = 0x1; + } + + enum AclEntryTypeProto { + USER = 0x0; + GROUP = 0x1; + MASK = 0x2; + OTHER = 0x3; + } + + enum FsActionProto { + NONE = 0x0; + EXECUTE = 0x1; + WRITE = 0x2; + WRITE_EXECUTE = 0x3; + READ = 0x4; + READ_EXECUTE = 0x5; + READ_WRITE = 0x6; + PERM_ALL = 0x7; + } + + required AclEntryTypeProto type = 1; + required AclEntryScopeProto scope = 2; + required FsActionProto permissions = 3; + required string name = 4; +} + +message AclStatusProto { + required string owner = 1; + required string group = 2; + required bool sticky = 3; + repeated AclEntryProto entries = 4; +} + +message ModifyAclEntriesRequestProto { + required string src = 1; + repeated AclEntryProto aclSpec = 2; +} + +message ModifyAclEntriesResponseProto { +} + +message RemoveAclRequestProto { + required string src = 1; +} + +message RemoveAclResponseProto { +} + +message RemoveAclEntriesRequestProto { + required string src = 1; + repeated AclEntryProto aclSpec = 2; +} + +message RemoveAclEntriesResponseProto { +} + +message RemoveDefaultAclRequestProto { + required string src = 1; +} + +message RemoveDefaultAclResponseProto { +} + +message SetAclRequestProto { + required string src = 1; + repeated AclEntryProto aclSpec = 2; +} + +message SetAclResponseProto { +} + +message GetAclStatusRequestProto { + required string src = 1; +} + +message GetAclStatusResponseProto { + required AclStatusProto result = 1; +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java index b6c2f6ec29e..8ad1086c4b2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java @@ -24,6 +24,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclEntryScope; +import org.apache.hadoop.fs.permission.AclEntryType; +import org.apache.hadoop.fs.permission.AclStatus; +import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DatanodeID; @@ -72,6 +77,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum; +import org.junit.Assert; import org.junit.Test; import com.google.common.base.Joiner; @@ -533,4 +539,28 @@ public class TestPBHelper { assertEquals(PBHelper.convert(DataChecksum.Type.CRC32C), HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C); } + + @Test + public void testAclEntryProto() { + AclEntry e = new AclEntry.Builder().setName("test") + .setPermission(FsAction.READ_EXECUTE).setScope(AclEntryScope.DEFAULT) + .setType(AclEntryType.OTHER).build(); + AclEntry[] lists = new AclEntry[] { e }; + + Assert.assertArrayEquals( + lists, + Lists.newArrayList( + PBHelper.convertAclEntry(PBHelper.convertAclEntryProto(Lists + .newArrayList(e)))).toArray()); + } + + @Test + public void testAclStatusProto() { + AclEntry e = new AclEntry.Builder().setName("test") + .setPermission(FsAction.READ_EXECUTE).setScope(AclEntryScope.DEFAULT) + .setType(AclEntryType.OTHER).build(); + AclStatus s = new AclStatus.Builder().owner("foo").group("bar").addEntry(e) + .build(); + Assert.assertEquals(s, PBHelper.convert(PBHelper.convert(s))); + } }