MAPREDUCE-4148. MapReduce should not have a compile-time dependency on HDFS.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1337199 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2012-05-11 15:00:48 +00:00
parent 5dbd09ee76
commit aea890f7d2
16 changed files with 106 additions and 17 deletions

View File

@ -18,10 +18,15 @@
package org.apache.hadoop.security.token;
import com.google.common.collect.Maps;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import java.util.ServiceLoader;
import org.apache.commons.codec.binary.Base64;
@ -37,6 +42,7 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.util.ReflectionUtils;
/**
* The client-side form of the token.
@ -45,6 +51,9 @@ import org.apache.hadoop.io.WritableUtils;
@InterfaceStability.Evolving
public class Token<T extends TokenIdentifier> implements Writable {
public static final Log LOG = LogFactory.getLog(Token.class);
private static Map<Text, Class<? extends TokenIdentifier>> tokenKindMap;
private byte[] identifier;
private byte[] password;
private Text kind;
@ -100,13 +109,49 @@ public class Token<T extends TokenIdentifier> implements Writable {
}
/**
* Get the token identifier
* @return the token identifier
* Get the token identifier's byte representation
* @return the token identifier's byte representation
*/
public byte[] getIdentifier() {
return identifier;
}
private static synchronized Class<? extends TokenIdentifier>
getClassForIdentifier(Text kind) {
if (tokenKindMap == null) {
tokenKindMap = Maps.newHashMap();
for (TokenIdentifier id : ServiceLoader.load(TokenIdentifier.class)) {
tokenKindMap.put(id.getKind(), id.getClass());
}
}
Class<? extends TokenIdentifier> cls = tokenKindMap.get(kind);
if (cls == null) {
LOG.warn("Cannot find class for token kind " + kind);
return null;
}
return cls;
}
/**
* Get the token identifier object, or null if it could not be constructed
* (because the class could not be loaded, for example).
* @return the token identifier, or null
* @throws IOException
*/
@SuppressWarnings("unchecked")
public T decodeIdentifier() throws IOException {
Class<? extends TokenIdentifier> cls = getClassForIdentifier(getKind());
if (cls == null) {
return null;
}
TokenIdentifier tokenIdentifier = ReflectionUtils.newInstance(cls, null);
ByteArrayInputStream buf = new ByteArrayInputStream(identifier);
DataInputStream in = new DataInputStream(buf);
tokenIdentifier.readFields(in);
in.close();
return (T) tokenIdentifier;
}
/**
* Get the token password/secret
* @return the token password/secret
@ -260,16 +305,31 @@ public class Token<T extends TokenIdentifier> implements Writable {
buffer.append(num);
}
}
private void identifierToString(StringBuilder buffer) {
T id = null;
try {
id = decodeIdentifier();
} catch (IOException e) {
// handle in the finally block
} finally {
if (id != null) {
buffer.append("(").append(id).append(")");
} else {
addBinaryBuffer(buffer, identifier);
}
}
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append("Ident: ");
addBinaryBuffer(buffer, identifier);
buffer.append(", Kind: ");
buffer.append("Kind: ");
buffer.append(kind.toString());
buffer.append(", Service: ");
buffer.append(service.toString());
buffer.append(", Ident: ");
identifierToString(buffer);
return buffer.toString();
}

View File

@ -18,11 +18,15 @@
package org.apache.hadoop.security.token;
import static junit.framework.Assert.assertEquals;
import java.io.*;
import java.util.Arrays;
import org.apache.hadoop.io.*;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier;
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenSecretManager;
import junit.framework.TestCase;
@ -94,5 +98,20 @@ public class TestToken extends TestCase {
checkUrlSafe(encode);
}
}
public void testDecodeIdentifier() throws IOException {
TestDelegationTokenSecretManager secretManager =
new TestDelegationTokenSecretManager(0, 0, 0, 0);
secretManager.startThreads();
TestDelegationTokenIdentifier id = new TestDelegationTokenIdentifier(
new Text("owner"), new Text("renewer"), new Text("realUser"));
Token<TestDelegationTokenIdentifier> token =
new Token<TestDelegationTokenIdentifier>(id, secretManager);
TokenIdentifier idCopy = token.decodeIdentifier();
assertNotSame(id, idCopy);
assertEquals(id, idCopy);
}
}

View File

@ -0,0 +1,2 @@
org.apache.hadoop.ipc.TestSaslRPC$TestTokenIdentifier
org.apache.hadoop.security.token.delegation.TestDelegationToken$TestDelegationTokenIdentifier

View File

@ -0,0 +1,2 @@
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier

View File

@ -287,6 +287,9 @@ Release 2.0.0 - UNRELEASED
MAPREDUCE-4231. Update RAID to use the new BlockCollection interface.
(szetszwo)
MAPREDUCE-4148. MapReduce should not have a compile-time dependency on
HDFS. (tomwhite)
Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -37,6 +37,7 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -38,7 +38,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.QueueACL;
@ -433,8 +432,7 @@ class JobSubmitter {
LOG.debug("Printing tokens for job: " + jobId);
for(Token<?> token: credentials.getAllTokens()) {
if (token.getKind().toString().equals("HDFS_DELEGATION_TOKEN")) {
LOG.debug("Submitting with " +
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier.stringifyToken(token));
LOG.debug("Submitting with " + token);
}
}
}

View File

@ -30,7 +30,6 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Master;
@ -179,16 +178,14 @@ public class TokenCache {
* @param namenode
* @return delegation token
*/
@SuppressWarnings("unchecked")
@InterfaceAudience.Private
public static Token<DelegationTokenIdentifier> getDelegationToken(
public static Token<?> getDelegationToken(
Credentials credentials, String namenode) {
//No fs specific tokens issues by this fs. It may however issue tokens
// for other filesystems - which would be keyed by that filesystems name.
if (namenode == null)
return null;
return (Token<DelegationTokenIdentifier>) credentials.getToken(new Text(
namenode));
return (Token<?>) credentials.getToken(new Text(namenode));
}
/**

View File

@ -39,7 +39,6 @@ import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.util.StringUtils;

View File

@ -0,0 +1,2 @@
org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier
org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier

View File

@ -114,8 +114,8 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<!-- needed for security and runtime -->
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>

View File

@ -57,7 +57,7 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>

View File

@ -0,0 +1,4 @@
org.apache.hadoop.yarn.security.ContainerTokenIdentifier
org.apache.hadoop.yarn.security.ApplicationTokenIdentifier
org.apache.hadoop.yarn.security.client.ClientTokenIdentifier
org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier

View File

@ -0,0 +1 @@
org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier

View File

@ -128,8 +128,8 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<!-- needed for security and runtime -->
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>