Revert HDFS-3654. TestJspHelper#testGetUgi fails with NPE.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1362764 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2012-07-18 05:16:09 +00:00
parent bd359a1a8e
commit 3f4826881d
6 changed files with 15 additions and 24 deletions

View File

@ -351,8 +351,6 @@ Release 2.0.1-alpha - UNRELEASED
HDFS-3609. libhdfs: don't force the URI to look like hdfs://hostname:port. HDFS-3609. libhdfs: don't force the URI to look like hdfs://hostname:port.
(Colin Patrick McCabe via eli) (Colin Patrick McCabe via eli)
HDFS-3654. TestJspHelper#testGetUgi fails with NPE. (eli)
BREAKDOWN OF HDFS-3042 SUBTASKS BREAKDOWN OF HDFS-3042 SUBTASKS
HDFS-2185. HDFS portion of ZK-based FailoverController (todd) HDFS-2185. HDFS portion of ZK-based FailoverController (todd)

View File

@ -540,7 +540,7 @@ public static UserGroupInformation getUGI(ServletContext context,
final String usernameFromQuery = getUsernameFromQuery(request, tryUgiParameter); final String usernameFromQuery = getUsernameFromQuery(request, tryUgiParameter);
final String doAsUserFromQuery = request.getParameter(DoAsParam.NAME); final String doAsUserFromQuery = request.getParameter(DoAsParam.NAME);
if (UserGroupInformation.isSecurityEnabled()) { if(UserGroupInformation.isSecurityEnabled()) {
final String remoteUser = request.getRemoteUser(); final String remoteUser = request.getRemoteUser();
String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME); String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME);
if (tokenString != null) { if (tokenString != null) {
@ -558,7 +558,7 @@ public static UserGroupInformation getUGI(ServletContext context,
DelegationTokenIdentifier id = new DelegationTokenIdentifier(); DelegationTokenIdentifier id = new DelegationTokenIdentifier();
id.readFields(in); id.readFields(in);
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context); final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
nn.verifyToken(id, token.getPassword()); nn.getNamesystem().verifyToken(id, token.getPassword());
ugi = id.getUser(); ugi = id.getUser();
if (ugi.getRealUser() == null) { if (ugi.getRealUser() == null) {
//non-proxy case //non-proxy case

View File

@ -5449,11 +5449,21 @@ public BlockManager getBlockManager() {
return blockManager; return blockManager;
} }
/**
* Verifies that the given identifier and password are valid and match.
* @param identifier Token identifier.
* @param password Password in the token.
* @throws InvalidToken
*/
public synchronized void verifyToken(DelegationTokenIdentifier identifier,
byte[] password) throws InvalidToken {
getDelegationTokenSecretManager().verifyToken(identifier, password);
}
@Override @Override
public boolean isGenStampInFuture(long genStamp) { public boolean isGenStampInFuture(long genStamp) {
return (genStamp > getGenerationStamp()); return (genStamp > getGenerationStamp());
} }
@VisibleForTesting @VisibleForTesting
public EditLogTailer getEditLogTailer() { public EditLogTailer getEditLogTailer() {
return editLogTailer; return editLogTailer;

View File

@ -51,7 +51,6 @@
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
@ -80,7 +79,6 @@
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.tools.GetUserMappingsProtocol; import org.apache.hadoop.tools.GetUserMappingsProtocol;
import org.apache.hadoop.util.ServicePlugin; import org.apache.hadoop.util.ServicePlugin;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
@ -1292,17 +1290,6 @@ private synchronized void doImmediateShutdown(Throwable t)
terminate(1, t); terminate(1, t);
} }
/**
* Verifies that the given identifier and password are valid and match.
* @param identifier Token identifier.
* @param password Password in the token.
* @throws InvalidToken
*/
public synchronized void verifyToken(DelegationTokenIdentifier identifier,
byte[] password) throws InvalidToken {
namesystem.getDelegationTokenSecretManager().verifyToken(identifier, password);
}
/** /**
* Class used to expose {@link NameNode} as context to {@link HAState} * Class used to expose {@link NameNode} as context to {@link HAState}
*/ */

View File

@ -63,7 +63,7 @@ public class NameNodeHttpServer {
public static final String NAMENODE_ADDRESS_ATTRIBUTE_KEY = "name.node.address"; public static final String NAMENODE_ADDRESS_ATTRIBUTE_KEY = "name.node.address";
public static final String FSIMAGE_ATTRIBUTE_KEY = "name.system.image"; public static final String FSIMAGE_ATTRIBUTE_KEY = "name.system.image";
public static final String NAMENODE_ATTRIBUTE_KEY = "name.node"; protected static final String NAMENODE_ATTRIBUTE_KEY = "name.node";
public NameNodeHttpServer( public NameNodeHttpServer(
Configuration conf, Configuration conf,

View File

@ -30,7 +30,6 @@
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer; import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
@ -70,7 +69,6 @@ public void testGetUgi() throws IOException {
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/"); conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
HttpServletRequest request = mock(HttpServletRequest.class); HttpServletRequest request = mock(HttpServletRequest.class);
ServletContext context = mock(ServletContext.class); ServletContext context = mock(ServletContext.class);
NameNode nn = mock(NameNode.class);
String user = "TheDoctor"; String user = "TheDoctor";
Text userText = new Text(user); Text userText = new Text(user);
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(userText, DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(userText,
@ -81,8 +79,6 @@ public void testGetUgi() throws IOException {
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn( when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString); tokenString);
when(request.getRemoteUser()).thenReturn(user); when(request.getRemoteUser()).thenReturn(user);
when(context.getAttribute(
NameNodeHttpServer.NAMENODE_ATTRIBUTE_KEY)).thenReturn(nn);
//Test attribute in the url to be used as service in the token. //Test attribute in the url to be used as service in the token.
when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn( when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn(