HDFS-6232. OfflineEditsViewer throws a NPE on edits containing ACL modifications (ajisakaa via cmccabe)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1586791 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0f6040a27b
commit
35a5a32f17
|
@ -134,6 +134,9 @@ Release 2.4.1 - UNRELEASED
|
||||||
HDFS-6229. Race condition in failover can cause RetryCache fail to work.
|
HDFS-6229. Race condition in failover can cause RetryCache fail to work.
|
||||||
(jing9)
|
(jing9)
|
||||||
|
|
||||||
|
HDFS-6232. OfflineEditsViewer throws a NPE on edits containing ACL
|
||||||
|
modifications (ajisakaa via cmccabe)
|
||||||
|
|
||||||
Release 2.4.0 - 2014-04-07
|
Release 2.4.0 - 2014-04-07
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -4084,7 +4084,9 @@ public abstract class FSEditLogOp {
|
||||||
contentHandler.startElement("", "", "ENTRY", new AttributesImpl());
|
contentHandler.startElement("", "", "ENTRY", new AttributesImpl());
|
||||||
XMLUtils.addSaxString(contentHandler, "SCOPE", e.getScope().name());
|
XMLUtils.addSaxString(contentHandler, "SCOPE", e.getScope().name());
|
||||||
XMLUtils.addSaxString(contentHandler, "TYPE", e.getType().name());
|
XMLUtils.addSaxString(contentHandler, "TYPE", e.getType().name());
|
||||||
|
if (e.getName() != null) {
|
||||||
XMLUtils.addSaxString(contentHandler, "NAME", e.getName());
|
XMLUtils.addSaxString(contentHandler, "NAME", e.getName());
|
||||||
|
}
|
||||||
fsActionToXml(contentHandler, e.getPermission());
|
fsActionToXml(contentHandler, e.getPermission());
|
||||||
contentHandler.endElement("", "", "ENTRY");
|
contentHandler.endElement("", "", "ENTRY");
|
||||||
}
|
}
|
||||||
|
@ -4100,7 +4102,7 @@ public abstract class FSEditLogOp {
|
||||||
AclEntry e = new AclEntry.Builder()
|
AclEntry e = new AclEntry.Builder()
|
||||||
.setScope(AclEntryScope.valueOf(s.getValue("SCOPE")))
|
.setScope(AclEntryScope.valueOf(s.getValue("SCOPE")))
|
||||||
.setType(AclEntryType.valueOf(s.getValue("TYPE")))
|
.setType(AclEntryType.valueOf(s.getValue("TYPE")))
|
||||||
.setName(s.getValue("NAME"))
|
.setName(s.getValueOrNull("NAME"))
|
||||||
.setPermission(fsActionFromXml(s)).build();
|
.setPermission(fsActionFromXml(s)).build();
|
||||||
aclEntries.add(e);
|
aclEntries.add(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,6 +32,9 @@ import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.FileSystem.Statistics;
|
import org.apache.hadoop.fs.FileSystem.Statistics;
|
||||||
import org.apache.hadoop.fs.Options.Rename;
|
import org.apache.hadoop.fs.Options.Rename;
|
||||||
import org.apache.hadoop.fs.permission.AclEntry;
|
import org.apache.hadoop.fs.permission.AclEntry;
|
||||||
|
import org.apache.hadoop.fs.permission.AclEntryScope;
|
||||||
|
import org.apache.hadoop.fs.permission.AclEntryType;
|
||||||
|
import org.apache.hadoop.fs.permission.FsAction;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster.NameNodeInfo;
|
import org.apache.hadoop.hdfs.MiniDFSCluster.NameNodeInfo;
|
||||||
import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
|
import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
|
||||||
|
@ -57,7 +60,6 @@ import org.apache.hadoop.hdfs.server.namenode.ha
|
||||||
.ConfiguredFailoverProxyProvider;
|
.ConfiguredFailoverProxyProvider;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
|
||||||
import org.apache.hadoop.hdfs.web.TestWebHDFSForHA;
|
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
|
@ -1124,7 +1126,33 @@ public class DFSTestUtil {
|
||||||
// OP_REMOVE_CACHE_POOL
|
// OP_REMOVE_CACHE_POOL
|
||||||
filesystem.removeCachePool("pool1");
|
filesystem.removeCachePool("pool1");
|
||||||
// OP_SET_ACL
|
// OP_SET_ACL
|
||||||
filesystem.setAcl(pathConcatTarget, Lists.<AclEntry> newArrayList());
|
List<AclEntry> aclEntryList = Lists.newArrayList();
|
||||||
|
aclEntryList.add(
|
||||||
|
new AclEntry.Builder()
|
||||||
|
.setPermission(FsAction.READ_WRITE)
|
||||||
|
.setScope(AclEntryScope.ACCESS)
|
||||||
|
.setType(AclEntryType.USER)
|
||||||
|
.build());
|
||||||
|
aclEntryList.add(
|
||||||
|
new AclEntry.Builder()
|
||||||
|
.setName("user")
|
||||||
|
.setPermission(FsAction.READ_WRITE)
|
||||||
|
.setScope(AclEntryScope.ACCESS)
|
||||||
|
.setType(AclEntryType.USER)
|
||||||
|
.build());
|
||||||
|
aclEntryList.add(
|
||||||
|
new AclEntry.Builder()
|
||||||
|
.setPermission(FsAction.WRITE)
|
||||||
|
.setScope(AclEntryScope.ACCESS)
|
||||||
|
.setType(AclEntryType.GROUP)
|
||||||
|
.build());
|
||||||
|
aclEntryList.add(
|
||||||
|
new AclEntry.Builder()
|
||||||
|
.setPermission(FsAction.NONE)
|
||||||
|
.setScope(AclEntryScope.ACCESS)
|
||||||
|
.setType(AclEntryType.OTHER)
|
||||||
|
.build());
|
||||||
|
filesystem.setAcl(pathConcatTarget, aclEntryList);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void abortStream(DFSOutputStream out) throws IOException {
|
public static void abortStream(DFSOutputStream out) throws IOException {
|
||||||
|
|
Loading…
Reference in New Issue