HADOOP-6845. Renames the TokenStorage class to Credentials. Contributed by Jitendra Pandey.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@962677 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Devaraj Das 2010-07-09 20:07:45 +00:00
parent 86e833858c
commit d2ade6aec5
5 changed files with 21 additions and 11 deletions

View File

@ -62,6 +62,9 @@ Trunk (unreleased changes)
HADOOP-6835. Add support for concatenated gzip input. (Greg Roelofs via HADOOP-6835. Add support for concatenated gzip input. (Greg Roelofs via
cdouglas) cdouglas)
HADOOP-6845. Renames the TokenStorage class to Credentials.
(Jitendra Pandey via ddas)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -33,17 +33,17 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
/** /**
* A class that provides the facilities of reading and writing * A class that provides the facilities of reading and writing
* secret keys and Tokens. * secret keys and Tokens.
*/ */
@InterfaceAudience.LimitedPrivate({"MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class TokenStorage implements Writable { public class Credentials implements Writable {
private Map<Text, byte[]> secretKeysMap = new HashMap<Text, byte[]>(); private Map<Text, byte[]> secretKeysMap = new HashMap<Text, byte[]>();
private Map<Text, Token<? extends TokenIdentifier>> tokenMap = private Map<Text, Token<? extends TokenIdentifier>> tokenMap =
@ -119,7 +119,7 @@ public class TokenStorage implements Writable {
Path localTokensFile = new Path (filename); Path localTokensFile = new Path (filename);
FileSystem localFS = FileSystem.getLocal(conf); FileSystem localFS = FileSystem.getLocal(conf);
FSDataInputStream in = localFS.open(localTokensFile); FSDataInputStream in = localFS.open(localTokensFile);
TokenStorage ts = new TokenStorage(); Credentials ts = new Credentials();
ts.readFields(in); ts.readFields(in);
for (Token<? extends TokenIdentifier> token : ts.getAllTokens()) { for (Token<? extends TokenIdentifier> token : ts.getAllTokens()) {
ugi.addToken(token); ugi.addToken(token);
@ -175,4 +175,4 @@ public class TokenStorage implements Writable {
secretKeysMap.put(alias, key); secretKeysMap.put(alias, key);
} }
} }
} }

View File

@ -400,7 +400,7 @@ public class UserGroupInformation {
loginUser = new UserGroupInformation(login.getSubject()); loginUser = new UserGroupInformation(login.getSubject());
String tokenFile = System.getenv(HADOOP_TOKEN_FILE_LOCATION); String tokenFile = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
if (tokenFile != null && isSecurityEnabled()) { if (tokenFile != null && isSecurityEnabled()) {
TokenStorage.readTokensAndLoadInUGI(tokenFile, new Configuration(), loginUser); Credentials.readTokensAndLoadInUGI(tokenFile, new Configuration(), loginUser);
} }
} catch (LoginException le) { } catch (LoginException le) {
throw new IOException("failure to login", le); throw new IOException("failure to login", le);

View File

@ -46,7 +46,7 @@ public class ServiceAuthorizationManager {
* *
* @deprecated Use * @deprecated Use
* {@link CommonConfigurationKeys#HADOOP_SECURITY_AUTHORIZATION} * {@link CommonConfigurationKeys#HADOOP_SECURITY_AUTHORIZATION}
* Instead. * instead.
*/ */
@Deprecated @Deprecated
public static final String SERVICE_AUTHORIZATION_CONFIG = public static final String SERVICE_AUTHORIZATION_CONFIG =

View File

@ -40,14 +40,15 @@ import javax.crypto.KeyGenerator;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.security.TokenStorage; import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;
public class TestTokenStorage { public class TestCredentials {
private static final String DEFAULT_HMAC_ALGORITHM = "HmacSHA1"; private static final String DEFAULT_HMAC_ALGORITHM = "HmacSHA1";
private static final File tmpDir = private static final File tmpDir =
new File(System.getProperty("test.build.data", "/tmp"), "mapred"); new File(System.getProperty("test.build.data", "/tmp"), "mapred");
@ -57,12 +58,17 @@ public class TestTokenStorage {
tmpDir.mkdir(); tmpDir.mkdir();
} }
@After
public void tearDown() {
tmpDir.delete();
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Test @Test
public <T extends TokenIdentifier> void testReadWriteStorage() public <T extends TokenIdentifier> void testReadWriteStorage()
throws IOException, NoSuchAlgorithmException{ throws IOException, NoSuchAlgorithmException{
// create tokenStorage Object // create tokenStorage Object
TokenStorage ts = new TokenStorage(); Credentials ts = new Credentials();
Token<T> token1 = new Token(); Token<T> token1 = new Token();
Token<T> token2 = new Token(); Token<T> token2 = new Token();
@ -98,7 +104,7 @@ public class TestTokenStorage {
// open and read it back // open and read it back
DataInputStream dis = DataInputStream dis =
new DataInputStream(new FileInputStream(tmpFileName)); new DataInputStream(new FileInputStream(tmpFileName));
ts = new TokenStorage(); ts = new Credentials();
ts.readFields(dis); ts.readFields(dis);
dis.close(); dis.close();
@ -129,5 +135,6 @@ public class TestTokenStorage {
WritableComparator.compareBytes(kTS, 0, kTS.length, kLocal, WritableComparator.compareBytes(kTS, 0, kTS.length, kLocal,
0, kLocal.length)==0); 0, kLocal.length)==0);
} }
tmpFileName.delete();
} }
} }