HADOOP-10012. Secure Oozie jobs fail with delegation token renewal exception in Namenode HA setup. Contributed by Daryn Sharp and Suresh Srinivas.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1528301 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2013-10-02 04:00:06 +00:00
parent c7cd620fa4
commit 703838c598
4 changed files with 92 additions and 50 deletions

View File

@ -33,6 +33,7 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -1325,7 +1326,14 @@ public class UserGroupInformation {
* @return Credentials of tokens associated with this user
*/
public synchronized Credentials getCredentials() {
return new Credentials(getCredentialsInternal());
Credentials creds = new Credentials(getCredentialsInternal());
Iterator<Token<?>> iter = creds.getAllTokens().iterator();
while (iter.hasNext()) {
if (iter.next() instanceof Token.PrivateToken) {
iter.remove();
}
}
return creds;
}
/**

View File

@ -19,31 +19,20 @@
package org.apache.hadoop.security.token;
import com.google.common.collect.Maps;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import java.util.ServiceLoader;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.io.*;
import org.apache.hadoop.util.ReflectionUtils;
import java.io.*;
import java.util.Arrays;
import java.util.Map;
import java.util.ServiceLoader;
/**
* The client-side form of the token.
*/
@ -195,6 +184,19 @@ public class Token<T extends TokenIdentifier> implements Writable {
service = newService;
}
/**
* Indicates whether the token is a clone. Used by HA failover proxy
* to indicate a token should not be visible to the user via
* UGI.getCredentials()
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public static class PrivateToken<T extends TokenIdentifier> extends Token<T> {
public PrivateToken(Token<T> token) {
super(token);
}
}
@Override
public void readFields(DataInput in) throws IOException {
int len = WritableUtils.readVInt(in);

View File

@ -16,11 +16,21 @@
*/
package org.apache.hadoop.security;
import static org.junit.Assert.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.TestSaslRPC;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Shell;
import org.junit.*;
import static org.mockito.Mockito.*;
import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.LoginContext;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
@ -30,21 +40,13 @@ import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.Set;
import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.LoginContext;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import static org.apache.hadoop.test.MetricsAsserts.*;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
import org.apache.hadoop.util.Shell;
import static org.apache.hadoop.ipc.TestSaslRPC.*;
import static org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier;
import static org.apache.hadoop.test.MetricsAsserts.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestUserGroupInformation {
final private static String USER_NAME = "user1@HADOOP.APACHE.ORG";
@ -786,4 +788,29 @@ public class TestUserGroupInformation {
UserGroupInformation.setLoginUser(ugi);
assertEquals(ugi, UserGroupInformation.getLoginUser());
}
/**
* In some scenario, such as HA, delegation tokens are associated with a
* logical name. The tokens are cloned and are associated with the
* physical address of the server where the service is provided.
* This test ensures cloned delegated tokens are locally used
* and are not returned in {@link UserGroupInformation#getCredentials()}
*/
@Test
public void testPrivateTokenExclusion() throws Exception {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
TestTokenIdentifier tokenId = new TestTokenIdentifier();
Token<TestTokenIdentifier> token = new Token<TestTokenIdentifier>(
tokenId.getBytes(), "password".getBytes(),
tokenId.getKind(), null);
ugi.addToken(new Text("regular-token"), token);
// Now add cloned private token
ugi.addToken(new Text("private-token"), new Token.PrivateToken<TestTokenIdentifier>(token));
ugi.addToken(new Text("private-token1"), new Token.PrivateToken<TestTokenIdentifier>(token));
// Ensure only non-private tokens are returned
Collection<Token<? extends TokenIdentifier>> tokens = ugi.getCredentials().getAllTokens();
assertEquals(1, tokens.size());
}
}

View File

@ -17,15 +17,9 @@
*/
package org.apache.hadoop.hdfs;
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
@ -41,11 +35,17 @@ import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import static org.apache.hadoop.hdfs.protocol.HdfsConstants.HA_DT_SERVICE_PREFIX;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
import static org.apache.hadoop.hdfs.protocol.HdfsConstants.HA_DT_SERVICE_PREFIX;
public class HAUtil {
@ -265,10 +265,15 @@ public class HAUtil {
tokenSelector.selectToken(haService, ugi.getTokens());
if (haToken != null) {
for (InetSocketAddress singleNNAddr : nnAddrs) {
// this is a minor hack to prevent physical HA tokens from being
// exposed to the user via UGI.getCredentials(), otherwise these
// cloned tokens may be inadvertently propagated to jobs
Token<DelegationTokenIdentifier> specificToken =
new Token<DelegationTokenIdentifier>(haToken);
new Token.PrivateToken<DelegationTokenIdentifier>(haToken);
SecurityUtil.setTokenService(specificToken, singleNNAddr);
ugi.addToken(specificToken);
Text alias =
new Text(HA_DT_SERVICE_PREFIX + "//" + specificToken.getService());
ugi.addToken(alias, specificToken);
LOG.debug("Mapped HA service delegation token for logical URI " +
haUri + " to namenode " + singleNNAddr);
}