HDFS-9795. OIV Delimited should show which files are ACL-enabled (lei)
This commit is contained in:
parent
8fdef0bd9d
commit
c7fcec24b8
|
@ -223,6 +223,8 @@ Trunk (Unreleased)
|
|||
HDFS-9260. Improve the performance and GC friendliness of NameNode startup
|
||||
and full block reports (Staffan Friberg via cmccabe)
|
||||
|
||||
HDFS-9795. OIV Delimited should show which files are ACL-enabled (lei)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
BUG FIXES
|
||||
|
|
|
@ -85,11 +85,13 @@ public class PBImageDelimitedTextWriter extends PBImageTextWriter {
|
|||
buffer.append(path.toString());
|
||||
PermissionStatus p = null;
|
||||
boolean isDir = false;
|
||||
boolean hasAcl = false;
|
||||
|
||||
switch (inode.getType()) {
|
||||
case FILE:
|
||||
INodeFile file = inode.getFile();
|
||||
p = getPermission(file.getPermission());
|
||||
hasAcl = file.hasAcl() && file.getAcl().getEntriesCount() > 0;
|
||||
append(buffer, file.getReplication());
|
||||
append(buffer, formatDate(file.getModificationTime()));
|
||||
append(buffer, formatDate(file.getAccessTime()));
|
||||
|
@ -102,6 +104,7 @@ public class PBImageDelimitedTextWriter extends PBImageTextWriter {
|
|||
case DIRECTORY:
|
||||
INodeDirectory dir = inode.getDirectory();
|
||||
p = getPermission(dir.getPermission());
|
||||
hasAcl = dir.hasAcl() && dir.getAcl().getEntriesCount() > 0;
|
||||
append(buffer, 0); // Replication
|
||||
append(buffer, formatDate(dir.getModificationTime()));
|
||||
append(buffer, formatDate(0)); // Access time.
|
||||
|
@ -129,7 +132,8 @@ public class PBImageDelimitedTextWriter extends PBImageTextWriter {
|
|||
}
|
||||
assert p != null;
|
||||
String dirString = isDir ? "d" : "-";
|
||||
append(buffer, dirString + p.getPermission().toString());
|
||||
String aclString = hasAcl ? "+" : "";
|
||||
append(buffer, dirString + p.getPermission().toString() + aclString);
|
||||
append(buffer, p.getUserName());
|
||||
append(buffer, p.getGroupName());
|
||||
return buffer.toString();
|
||||
|
|
|
@ -17,10 +17,13 @@
|
|||
*/
|
||||
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.PrintStream;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.io.StringReader;
|
||||
|
@ -226,4 +229,37 @@ public class TestOfflineImageViewerForAcl {
|
|||
final String xml = output.toString();
|
||||
parser.parse(new InputSource(new StringReader(xml)), new DefaultHandler());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPBDelimitedWriterForAcl() throws Exception {
|
||||
final String DELIMITER = "\t";
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
|
||||
try (PrintStream o = new PrintStream(output)) {
|
||||
PBImageDelimitedTextWriter v =
|
||||
new PBImageDelimitedTextWriter(o, DELIMITER, ""); // run in memory.
|
||||
v.visit(new RandomAccessFile(originalFsimage, "r"));
|
||||
}
|
||||
|
||||
try (
|
||||
ByteArrayInputStream input =
|
||||
new ByteArrayInputStream(output.toByteArray());
|
||||
BufferedReader reader =
|
||||
new BufferedReader(new InputStreamReader(input))) {
|
||||
String line;
|
||||
boolean header = true;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
String[] fields = line.split(DELIMITER);
|
||||
if (!header) {
|
||||
String filePath = fields[0];
|
||||
String permission = fields[9];
|
||||
if (!filePath.equals("/")) {
|
||||
boolean hasAcl = !filePath.toLowerCase().contains("noacl");
|
||||
assertEquals(hasAcl, permission.endsWith("+"));
|
||||
}
|
||||
}
|
||||
header = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue