Fix merge error.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1598873 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Kihwal Lee 2014-05-31 14:32:21 +00:00
parent 23c325ad47
commit 77805fb69b
1 changed files with 44 additions and 1 deletions

View File

@ -231,6 +231,9 @@ public class DataNode extends Configured
private boolean checkDiskErrorFlag = false; private boolean checkDiskErrorFlag = false;
private Object checkDiskErrorMutex = new Object(); private Object checkDiskErrorMutex = new Object();
private long lastDiskErrorCheck; private long lastDiskErrorCheck;
private String supergroup;
private boolean isPermissionEnabled;
private String dnUserName = null;
/** /**
* Create the DataNode given a configuration, an array of dataDirs, * Create the DataNode given a configuration, an array of dataDirs,
@ -252,6 +255,11 @@ public class DataNode extends Configured
this.getHdfsBlockLocationsEnabled = conf.getBoolean( this.getHdfsBlockLocationsEnabled = conf.getBoolean(
DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED, DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED,
DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED_DEFAULT); DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED_DEFAULT);
this.supergroup = conf.get(DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_KEY,
DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT);
this.isPermissionEnabled = conf.getBoolean(
DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY,
DFSConfigKeys.DFS_PERMISSIONS_ENABLED_DEFAULT);
confVersion = "core-" + confVersion = "core-" +
conf.get("hadoop.common.configuration.version", "UNSPECIFIED") + conf.get("hadoop.common.configuration.version", "UNSPECIFIED") +
@ -432,6 +440,33 @@ public class DataNode extends Configured
ipcServer.refreshServiceAcl(conf, new HDFSPolicyProvider()); ipcServer.refreshServiceAcl(conf, new HDFSPolicyProvider());
} }
} }
/** Check whether the current user is in the superuser group. */
private void checkSuperuserPrivilege() throws IOException, AccessControlException {
if (!isPermissionEnabled) {
return;
}
// Try to get the ugi in the RPC call.
UserGroupInformation callerUgi = ipcServer.getRemoteUser();
if (callerUgi == null) {
// This is not from RPC.
callerUgi = UserGroupInformation.getCurrentUser();
}
// Is this by the DN user itself?
assert dnUserName != null;
if (callerUgi.getShortUserName().equals(dnUserName)) {
return;
}
// Is the user a member of the super group?
List<String> groups = Arrays.asList(callerUgi.getGroupNames());
if (groups.contains(supergroup)) {
return;
}
// Not a superuser.
throw new AccessControlException();
}
/** /**
* Initialize the datanode's periodic scanners: * Initialize the datanode's periodic scanners:
@ -735,6 +770,11 @@ public class DataNode extends Configured
// BlockPoolTokenSecretManager is required to create ipc server. // BlockPoolTokenSecretManager is required to create ipc server.
this.blockPoolTokenSecretManager = new BlockPoolTokenSecretManager(); this.blockPoolTokenSecretManager = new BlockPoolTokenSecretManager();
// Login is done by now. Set the DN user name.
dnUserName = UserGroupInformation.getCurrentUser().getShortUserName();
LOG.info("dnUserName = " + dnUserName);
LOG.info("supergroup = " + supergroup);
initIpcServer(conf); initIpcServer(conf);
metrics = DataNodeMetrics.create(conf, getDisplayName()); metrics = DataNodeMetrics.create(conf, getDisplayName());
@ -2414,6 +2454,7 @@ public class DataNode extends Configured
@Override // ClientDatanodeProtocol @Override // ClientDatanodeProtocol
public void refreshNamenodes() throws IOException { public void refreshNamenodes() throws IOException {
checkSuperuserPrivilege();
conf = new Configuration(); conf = new Configuration();
refreshNamenodes(conf); refreshNamenodes(conf);
} }
@ -2421,6 +2462,7 @@ public class DataNode extends Configured
@Override // ClientDatanodeProtocol @Override // ClientDatanodeProtocol
public void deleteBlockPool(String blockPoolId, boolean force) public void deleteBlockPool(String blockPoolId, boolean force)
throws IOException { throws IOException {
checkSuperuserPrivilege();
LOG.info("deleteBlockPool command received for block pool " + blockPoolId LOG.info("deleteBlockPool command received for block pool " + blockPoolId
+ ", force=" + force); + ", force=" + force);
if (blockPoolManager.get(blockPoolId) != null) { if (blockPoolManager.get(blockPoolId) != null) {
@ -2436,6 +2478,7 @@ public class DataNode extends Configured
@Override // ClientDatanodeProtocol @Override // ClientDatanodeProtocol
public synchronized void shutdownDatanode(boolean forUpgrade) throws IOException { public synchronized void shutdownDatanode(boolean forUpgrade) throws IOException {
checkSuperuserPrivilege();
LOG.info("shutdownDatanode command received (upgrade=" + forUpgrade + LOG.info("shutdownDatanode command received (upgrade=" + forUpgrade +
"). Shutting down Datanode..."); "). Shutting down Datanode...");
@ -2602,4 +2645,4 @@ public class DataNode extends Configured
return lastDiskErrorCheck; return lastDiskErrorCheck;
} }
} }
} }