HDFS-12748. NameNode memory leak when accessing webhdfs GETHOMEDIRECTORY. Contributed by Weiwei Yang.

This commit is contained in:
Weiwei Yang 2019-07-11 09:46:27 +08:00
parent 5effeae1f3
commit 9c3806cf1b
3 changed files with 24 additions and 10 deletions

View File

@ -911,4 +911,24 @@ public class DFSUtilClient {
return new Path(sb.toString()); return new Path(sb.toString());
} }
/**
* Returns current user home directory under a home directory prefix.
* The home directory prefix can be defined by
* {@link HdfsClientConfigKeys#DFS_USER_HOME_DIR_PREFIX_KEY}.
* User info is obtained from given {@link UserGroupInformation}.
* @param conf configuration
* @param ugi {@link UserGroupInformation} of current user.
* @return the home directory of current user.
*/
public static Path getHomeDirectory(Configuration conf,
UserGroupInformation ugi) {
String userHomePrefix = HdfsClientConfigKeys
.DFS_USER_HOME_DIR_PREFIX_DEFAULT;
if (conf != null) {
userHomePrefix = conf.get(
HdfsClientConfigKeys.DFS_USER_HOME_DIR_PREFIX_KEY,
HdfsClientConfigKeys.DFS_USER_HOME_DIR_PREFIX_DEFAULT);
}
return new Path(userHomePrefix + "/" + ugi.getShortUserName());
}
} }

View File

@ -132,8 +132,6 @@ public class DistributedFileSystem extends FileSystem
implements KeyProviderTokenIssuer { implements KeyProviderTokenIssuer {
private Path workingDir; private Path workingDir;
private URI uri; private URI uri;
private String homeDirPrefix =
HdfsClientConfigKeys.DFS_USER_HOME_DIR_PREFIX_DEFAULT;
DFSClient dfs; DFSClient dfs;
private boolean verifyChecksum = true; private boolean verifyChecksum = true;
@ -170,9 +168,6 @@ public class DistributedFileSystem extends FileSystem
if (host == null) { if (host == null) {
throw new IOException("Incomplete HDFS URI, no host: "+ uri); throw new IOException("Incomplete HDFS URI, no host: "+ uri);
} }
homeDirPrefix = conf.get(
HdfsClientConfigKeys.DFS_USER_HOME_DIR_PREFIX_KEY,
HdfsClientConfigKeys.DFS_USER_HOME_DIR_PREFIX_DEFAULT);
this.dfs = new DFSClient(uri, conf, statistics); this.dfs = new DFSClient(uri, conf, statistics);
this.uri = URI.create(uri.getScheme()+"://"+uri.getAuthority()); this.uri = URI.create(uri.getScheme()+"://"+uri.getAuthority());
@ -215,8 +210,7 @@ public class DistributedFileSystem extends FileSystem
@Override @Override
public Path getHomeDirectory() { public Path getHomeDirectory() {
return makeQualified(new Path(homeDirPrefix + "/" return makeQualified(DFSUtilClient.getHomeDirectory(getConf(), dfs.ugi));
+ dfs.ugi.getShortUserName()));
} }
/** /**

View File

@ -72,6 +72,7 @@ import org.apache.hadoop.fs.permission.FsCreateModes;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DFSUtilClient;
import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.XAttrHelper;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
@ -1151,9 +1152,8 @@ public class NamenodeWebHdfsMethods {
return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
} }
case GETHOMEDIRECTORY: { case GETHOMEDIRECTORY: {
final String js = JsonUtil.toJsonString("Path", String userHome = DFSUtilClient.getHomeDirectory(conf, ugi).toString();
FileSystem.get(conf != null ? conf : new Configuration()) final String js = JsonUtil.toJsonString("Path", userHome);
.getHomeDirectory().toUri().getPath());
return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
} }
case GETACLSTATUS: { case GETACLSTATUS: {