From e45407128d4e9a9804c777c8f845ad41e1280177 Mon Sep 17 00:00:00 2001 From: Kihwal Lee Date: Wed, 14 Oct 2020 17:26:04 -0500 Subject: [PATCH] HDFS-15628. HttpFS server throws NPE if a file is a symlink. Contributed by Ahmed Hussein. --- .../fs/http/client/HttpFSFileSystem.java | 9 ++++- .../hadoop/fs/http/server/FSOperations.java | 14 ++++++- .../fs/http/client/BaseTestHttpFSWith.java | 37 +++++++++++++++++++ 3 files changed, 56 insertions(+), 4 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java index ccd71d94249..c9074639656 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java @@ -199,6 +199,7 @@ public class HttpFSFileSystem extends FileSystem public static final String XATTR_VALUE_JSON = "value"; public static final String XATTRNAMES_JSON = "XAttrNames"; public static final String ECPOLICY_JSON = "ecPolicyObj"; + public static final String SYMLINK_JSON = "symlink"; public static final String FILE_CHECKSUM_JSON = "FileChecksum"; public static final String CHECKSUM_ALGORITHM_JSON = "algorithm"; @@ -1101,6 +1102,9 @@ public class HttpFSFileSystem extends FileSystem String pathSuffix = (String) json.get(PATH_SUFFIX_JSON); Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix); FILE_TYPE type = FILE_TYPE.valueOf((String) json.get(TYPE_JSON)); + String symLinkValue = + type == FILE_TYPE.SYMLINK ? (String) json.get(SYMLINK_JSON) : null; + Path symLink = symLinkValue == null ? null : new Path(symLinkValue); long len = (Long) json.get(LENGTH_JSON); String owner = (String) json.get(OWNER_JSON); String group = (String) json.get(GROUP_JSON); @@ -1125,11 +1129,12 @@ public class HttpFSFileSystem extends FileSystem new FsPermissionExtension(permission, aBit, eBit, ecBit); FileStatus fileStatus = new FileStatus(len, FILE_TYPE.DIRECTORY == type, replication, blockSize, mTime, aTime, deprecatedPerm, owner, group, - null, path, FileStatus.attributes(aBit, eBit, ecBit, seBit)); + symLink, path, FileStatus.attributes(aBit, eBit, ecBit, seBit)); return fileStatus; } else { return new FileStatus(len, FILE_TYPE.DIRECTORY == type, - replication, blockSize, mTime, aTime, permission, owner, group, path); + replication, blockSize, mTime, aTime, permission, owner, group, + symLink, path); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java index 6effb833485..67d4761543b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java @@ -33,6 +33,7 @@ import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.XAttrCodec; import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.http.client.HttpFSFileSystem; +import org.apache.hadoop.fs.http.client.HttpFSFileSystem.FILE_TYPE; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsAction; @@ -111,8 +112,17 @@ public class FSOperations { Map json = new LinkedHashMap(); json.put(HttpFSFileSystem.PATH_SUFFIX_JSON, (emptyPathSuffix) ? "" : fileStatus.getPath().getName()); - json.put(HttpFSFileSystem.TYPE_JSON, - HttpFSFileSystem.FILE_TYPE.getType(fileStatus).toString()); + FILE_TYPE fileType = HttpFSFileSystem.FILE_TYPE.getType(fileStatus); + json.put(HttpFSFileSystem.TYPE_JSON, fileType.toString()); + if (fileType.equals(FILE_TYPE.SYMLINK)) { + // put the symlink into Json + try { + json.put(HttpFSFileSystem.SYMLINK_JSON, + fileStatus.getSymlink().getName()); + } catch (IOException e) { + // Can't happen. + } + } json.put(HttpFSFileSystem.LENGTH_JSON, fileStatus.getLen()); json.put(HttpFSFileSystem.OWNER_JSON, fileStatus.getOwner()); json.put(HttpFSFileSystem.GROUP_JSON, fileStatus.getGroup()); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java index 90e1721746d..aeb0e7249c2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java @@ -366,6 +366,42 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase { fs.close(); } + private void testListSymLinkStatus() throws Exception { + if (isLocalFS()) { + // do not test the the symlink for local FS. + return; + } + FileSystem fs = FileSystem.get(getProxiedFSConf()); + boolean isWebhdfs = fs instanceof WebHdfsFileSystem; + Path path = + new Path(getProxiedFSTestDir() + "-symlink", "targetFoo.txt"); + OutputStream os = fs.create(path); + os.write(1); + os.close(); + Path linkPath = + new Path(getProxiedFSTestDir()+ "-symlink", "symlinkFoo.txt"); + fs.createSymlink(path, linkPath, false); + fs = getHttpFSFileSystem(); + FileStatus linkStatus = fs.getFileStatus(linkPath); + FileStatus status1 = fs.getFileStatus(path); + + FileStatus[] stati = fs.listStatus(path.getParent()); + assertEquals(2, stati.length); + + int countSymlink = 0; + for (int i = 0; i < stati.length; i++) { + FileStatus fStatus = stati[i]; + countSymlink += fStatus.isSymlink() ? 1 : 0; + } + assertEquals(1, countSymlink); + + assertFalse(status1.isSymlink()); + if (isWebhdfs) { + assertTrue(linkStatus.isSymlink()); + } + fs.close(); + } + private void testListStatus() throws Exception { FileSystem fs = FileSystem.get(getProxiedFSConf()); boolean isDFS = fs instanceof DistributedFileSystem; @@ -1191,6 +1227,7 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase { break; case LIST_STATUS: testListStatus(); + testListSymLinkStatus(); break; case WORKING_DIRECTORY: testWorkingdirectory();