HDFS-15628. HttpFS server throws NPE if a file is a symlink. Contributed by Ahmed Hussein.

(cherry picked from commit e45407128d)
This commit is contained in:
Kihwal Lee 2020-10-16 11:41:17 -05:00
parent 9aefc1759b
commit ffe6e39c96
3 changed files with 57 additions and 4 deletions

View File

@ -182,6 +182,7 @@ public class HttpFSFileSystem extends FileSystem
public static final String XATTR_NAME_JSON = "name";
public static final String XATTR_VALUE_JSON = "value";
public static final String XATTRNAMES_JSON = "XAttrNames";
public static final String SYMLINK_JSON = "symlink";
public static final String FILE_CHECKSUM_JSON = "FileChecksum";
public static final String CHECKSUM_ALGORITHM_JSON = "algorithm";
@ -1075,6 +1076,9 @@ public class HttpFSFileSystem extends FileSystem
String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
FILE_TYPE type = FILE_TYPE.valueOf((String) json.get(TYPE_JSON));
String symLinkValue =
type == FILE_TYPE.SYMLINK ? (String) json.get(SYMLINK_JSON) : null;
Path symLink = symLinkValue == null ? null : new Path(symLinkValue);
long len = (Long) json.get(LENGTH_JSON);
String owner = (String) json.get(OWNER_JSON);
String group = (String) json.get(GROUP_JSON);
@ -1099,11 +1103,12 @@ public class HttpFSFileSystem extends FileSystem
new FsPermissionExtension(permission, aBit, eBit, ecBit);
FileStatus fileStatus = new FileStatus(len, FILE_TYPE.DIRECTORY == type,
replication, blockSize, mTime, aTime, deprecatedPerm, owner, group,
null, path, FileStatus.attributes(aBit, eBit, ecBit, seBit));
symLink, path, FileStatus.attributes(aBit, eBit, ecBit, seBit));
return fileStatus;
} else {
return new FileStatus(len, FILE_TYPE.DIRECTORY == type,
replication, blockSize, mTime, aTime, permission, owner, group, path);
replication, blockSize, mTime, aTime, permission, owner, group,
symLink, path);
}
}

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.XAttrCodec;
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem.FILE_TYPE;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsPermission;
@ -103,8 +104,17 @@ public class FSOperations {
Map<String, Object> json = new LinkedHashMap<String, Object>();
json.put(HttpFSFileSystem.PATH_SUFFIX_JSON,
(emptyPathSuffix) ? "" : fileStatus.getPath().getName());
json.put(HttpFSFileSystem.TYPE_JSON,
HttpFSFileSystem.FILE_TYPE.getType(fileStatus).toString());
FILE_TYPE fileType = HttpFSFileSystem.FILE_TYPE.getType(fileStatus);
json.put(HttpFSFileSystem.TYPE_JSON, fileType.toString());
if (fileType.equals(FILE_TYPE.SYMLINK)) {
// put the symlink into Json
try {
json.put(HttpFSFileSystem.SYMLINK_JSON,
fileStatus.getSymlink().getName());
} catch (IOException e) {
// Can't happen.
}
}
json.put(HttpFSFileSystem.LENGTH_JSON, fileStatus.getLen());
json.put(HttpFSFileSystem.OWNER_JSON, fileStatus.getOwner());
json.put(HttpFSFileSystem.GROUP_JSON, fileStatus.getGroup());

View File

@ -44,6 +44,7 @@ import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.HFSTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
@ -344,6 +345,42 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs.close();
}
private void testListSymLinkStatus() throws Exception {
if (isLocalFS()) {
// do not test the the symlink for local FS.
return;
}
FileSystem fs = FileSystem.get(getProxiedFSConf());
boolean isWebhdfs = fs instanceof WebHdfsFileSystem;
Path path =
new Path(getProxiedFSTestDir() + "-symlink", "targetFoo.txt");
OutputStream os = fs.create(path);
os.write(1);
os.close();
Path linkPath =
new Path(getProxiedFSTestDir()+ "-symlink", "symlinkFoo.txt");
fs.createSymlink(path, linkPath, false);
fs = getHttpFSFileSystem();
FileStatus linkStatus = fs.getFileStatus(linkPath);
FileStatus status1 = fs.getFileStatus(path);
FileStatus[] stati = fs.listStatus(path.getParent());
assertEquals(2, stati.length);
int countSymlink = 0;
for (int i = 0; i < stati.length; i++) {
FileStatus fStatus = stati[i];
countSymlink += fStatus.isSymlink() ? 1 : 0;
}
assertEquals(1, countSymlink);
assertFalse(status1.isSymlink());
if (isWebhdfs) {
assertTrue(linkStatus.isSymlink());
}
fs.close();
}
private void testListStatus() throws Exception {
FileSystem fs = FileSystem.get(getProxiedFSConf());
boolean isDFS = fs instanceof DistributedFileSystem;
@ -1160,6 +1197,7 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
break;
case LIST_STATUS:
testListStatus();
testListSymLinkStatus();
break;
case WORKING_DIRECTORY:
testWorkingdirectory();