HDFS-13744. OIV tool should better handle control characters present in file or directory names. Contributed by Zsolt Venczel.

This commit is contained in:
Sean Mackrory 2018-09-07 12:34:31 -06:00
parent 3dc2988a37
commit 410dd3faa5
2 changed files with 32 additions and 3 deletions

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.hdfs.tools.offlineImageViewer; package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode;
@ -71,9 +73,19 @@ public class PBImageDelimitedTextWriter extends PBImageTextWriter {
buffer.append(field); buffer.append(field);
} }
static final String CRLF = StringUtils.CR + StringUtils.LF;
private void append(StringBuffer buffer, String field) { private void append(StringBuffer buffer, String field) {
buffer.append(delimiter); buffer.append(delimiter);
buffer.append(field);
String escapedField = StringEscapeUtils.escapeCsv(field);
if (escapedField.contains(CRLF)) {
escapedField = escapedField.replace(CRLF, "%x0D%x0A");
} else if (escapedField.contains(StringUtils.LF)) {
escapedField = escapedField.replace(StringUtils.LF, "%x0A");
}
buffer.append(escapedField);
} }
@Override @Override
@ -82,7 +94,7 @@ public class PBImageDelimitedTextWriter extends PBImageTextWriter {
String inodeName = inode.getName().toStringUtf8(); String inodeName = inode.getName().toStringUtf8();
Path path = new Path(parent.isEmpty() ? "/" : parent, Path path = new Path(parent.isEmpty() ? "/" : parent,
inodeName.isEmpty() ? "/" : inodeName); inodeName.isEmpty() ? "/" : inodeName);
buffer.append(path.toString()); append(buffer, path.toString());
PermissionStatus p = null; PermissionStatus p = null;
boolean isDir = false; boolean isDir = false;
boolean hasAcl = false; boolean hasAcl = false;
@ -136,7 +148,7 @@ public class PBImageDelimitedTextWriter extends PBImageTextWriter {
append(buffer, dirString + p.getPermission().toString() + aclString); append(buffer, dirString + p.getPermission().toString() + aclString);
append(buffer, p.getUserName()); append(buffer, p.getUserName());
append(buffer, p.getGroupName()); append(buffer, p.getGroupName());
return buffer.toString(); return buffer.substring(1);
} }
@Override @Override

View File

@ -27,6 +27,8 @@ import static org.apache.hadoop.fs.permission.AclEntryType.USER;
import static org.apache.hadoop.fs.permission.FsAction.ALL; import static org.apache.hadoop.fs.permission.FsAction.ALL;
import static org.apache.hadoop.fs.permission.FsAction.EXECUTE; import static org.apache.hadoop.fs.permission.FsAction.EXECUTE;
import static org.apache.hadoop.fs.permission.FsAction.READ_EXECUTE; import static org.apache.hadoop.fs.permission.FsAction.READ_EXECUTE;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
import static org.apache.hadoop.hdfs.server.namenode.AclTestHelpers.aclEntry; import static org.apache.hadoop.hdfs.server.namenode.AclTestHelpers.aclEntry;
@ -209,6 +211,21 @@ public class TestOfflineImageViewer {
writtenFiles.put(entityRefXMLDir.toString(), writtenFiles.put(entityRefXMLDir.toString(),
hdfs.getFileStatus(entityRefXMLDir)); hdfs.getFileStatus(entityRefXMLDir));
//Create directories with new line characters
Path newLFDir = new Path("/dirContainingNewLineChar"
+ StringUtils.LF + "here");
hdfs.mkdirs(newLFDir);
dirCount++;
writtenFiles.put("\"/dirContainingNewLineChar%x0Ahere\"",
hdfs.getFileStatus(newLFDir));
Path newCRLFDir = new Path("/dirContainingNewLineChar"
+ PBImageDelimitedTextWriter.CRLF + "here");
hdfs.mkdirs(newCRLFDir);
dirCount++;
writtenFiles.put("\"/dirContainingNewLineChar%x0D%x0Ahere\"",
hdfs.getFileStatus(newCRLFDir));
//Create a directory with sticky bits //Create a directory with sticky bits
Path stickyBitDir = new Path("/stickyBit"); Path stickyBitDir = new Path("/stickyBit");
hdfs.mkdirs(stickyBitDir); hdfs.mkdirs(stickyBitDir);