HADOOP-15808. Harden Token service loader use.

Contributed by Steve Loughran.
This commit is contained in:
Steve Loughran 2018-12-11 17:31:17 +00:00
parent 1a25bbe9ec
commit 202926ac33
No known key found for this signature in database
GPG Key ID: D22CF846DBB162A0
4 changed files with 65 additions and 30 deletions

View File

@ -24,6 +24,8 @@ import java.io.PrintStream;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.ServiceConfigurationError;
import java.util.ServiceLoader;
import org.apache.commons.lang3.StringUtils;
@ -130,7 +132,7 @@ public final class DtFileOperations {
* @param creds the Credentials object to be printed out.
* @param alias print only tokens matching alias (null matches all).
* @param out print to this stream.
* @throws IOException
* @throws IOException failure to unmarshall a token identifier.
*/
public static void printCredentials(
Credentials creds, Text alias, PrintStream out)
@ -145,8 +147,13 @@ public final class DtFileOperations {
out.println(StringUtils.repeat("-", 80));
tokenHeader = false;
}
AbstractDelegationTokenIdentifier id =
(AbstractDelegationTokenIdentifier) token.decodeIdentifier();
AbstractDelegationTokenIdentifier id;
try {
id = (AbstractDelegationTokenIdentifier) token.decodeIdentifier();
} catch (IllegalStateException e) {
LOG.debug("Failed to decode token identifier", e);
id = null;
}
out.printf(fmt, token.getKind(), token.getService(),
(id != null) ? id.getRenewer() : NA_STRING,
(id != null) ? formatDate(id.getMaxDate()) : NA_STRING,
@ -172,7 +179,17 @@ public final class DtFileOperations {
Credentials creds = tokenFile.exists() ?
Credentials.readTokenStorageFile(tokenFile, conf) : new Credentials();
ServiceLoader<DtFetcher> loader = ServiceLoader.load(DtFetcher.class);
for (DtFetcher fetcher : loader) {
Iterator<DtFetcher> iterator = loader.iterator();
while (iterator.hasNext()) {
DtFetcher fetcher;
try {
fetcher = iterator.next();
} catch (ServiceConfigurationError e) {
// failure to load a token implementation
// log at debug and continue.
LOG.debug("Failed to load token fetcher implementation", e);
continue;
}
if (matchService(fetcher, service, url)) {
if (!fetcher.isTokenRequired()) {
String message = "DtFetcher for service '" + service +

View File

@ -35,7 +35,9 @@ import org.slf4j.LoggerFactory;
import java.io.*;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import java.util.ServiceConfigurationError;
import java.util.ServiceLoader;
import java.util.UUID;
@ -147,14 +149,25 @@ public class Token<T extends TokenIdentifier> implements Writable {
synchronized (Token.class) {
if (tokenKindMap == null) {
tokenKindMap = Maps.newHashMap();
for (TokenIdentifier id : ServiceLoader.load(TokenIdentifier.class)) {
tokenKindMap.put(id.getKind(), id.getClass());
// start the service load process; it's only in the "next()" calls
// where implementations are loaded.
final Iterator<TokenIdentifier> tokenIdentifiers =
ServiceLoader.load(TokenIdentifier.class).iterator();
while (tokenIdentifiers.hasNext()) {
try {
TokenIdentifier id = tokenIdentifiers.next();
tokenKindMap.put(id.getKind(), id.getClass());
} catch (ServiceConfigurationError e) {
// failure to load a token implementation
// log at debug and continue.
LOG.debug("Failed to load token identifier implementation", e);
}
}
}
cls = tokenKindMap.get(kind);
}
if (cls == null) {
LOG.debug("Cannot find class for token kind " + kind);
LOG.debug("Cannot find class for token kind {}", kind);
return null;
}
return cls;
@ -163,8 +176,9 @@ public class Token<T extends TokenIdentifier> implements Writable {
/**
* Get the token identifier object, or null if it could not be constructed
* (because the class could not be loaded, for example).
* @return the token identifier, or null
* @throws IOException
* @return the token identifier, or null if there was no class found for it
* @throws IOException failure to unmarshall the data
* @throws RuntimeException if the token class could not be instantiated.
*/
@SuppressWarnings("unchecked")
public T decodeIdentifier() throws IOException {
@ -263,7 +277,7 @@ public class Token<T extends TokenIdentifier> implements Writable {
assert !publicToken.isPrivate();
publicService = publicToken.service;
if (LOG.isDebugEnabled()) {
LOG.debug("Cloned private token " + this + " from " + publicToken);
LOG.debug("Cloned private token {} from {}", this, publicToken);
}
}
@ -465,14 +479,22 @@ public class Token<T extends TokenIdentifier> implements Writable {
}
renewer = TRIVIAL_RENEWER;
synchronized (renewers) {
for (TokenRenewer canidate : renewers) {
if (canidate.handleKind(this.kind)) {
renewer = canidate;
return renewer;
Iterator<TokenRenewer> it = renewers.iterator();
while (it.hasNext()) {
try {
TokenRenewer candidate = it.next();
if (candidate.handleKind(this.kind)) {
renewer = candidate;
return renewer;
}
} catch (ServiceConfigurationError e) {
// failure to load a token implementation
// log at debug and continue.
LOG.debug("Failed to load token renewer implementation", e);
}
}
}
LOG.warn("No TokenRenewer defined for token kind " + this.kind);
LOG.warn("No TokenRenewer defined for token kind {}", kind);
return renewer;
}

View File

@ -926,7 +926,9 @@ public class TestSaslRPC extends TestRpcBase {
private static void assertAuthEquals(Pattern expect, String actual) {
// this allows us to see the regexp and the value it didn't match
if (!expect.matcher(actual).matches()) {
fail(); // it failed
// it failed
fail(String.format("\"%s\" did not match pattern %s",
actual, expect));
}
}

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDeleg
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenSecretManager;
import org.junit.Test;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
import static org.junit.Assert.*;
/** Unit tests for Token */
@ -86,13 +87,12 @@ public class TestToken {
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLM" +
"NOPQRSTUVWXYZ01234567890!@#$%^&*()-=_+[]{}|;':,./<>?"};
Token<AbstractDelegationTokenIdentifier> orig;
Token<AbstractDelegationTokenIdentifier> copy =
new Token<AbstractDelegationTokenIdentifier>();
Token<AbstractDelegationTokenIdentifier> copy = new Token<>();
// ensure that for each string the input and output values match
for(int i=0; i< values.length; ++i) {
String val = values[i];
System.out.println("Input = " + val);
orig = new Token<AbstractDelegationTokenIdentifier>(val.getBytes(),
Token.LOG.info("Input = {}", val);
orig = new Token<>(val.getBytes(),
val.getBytes(), new Text(val), new Text(val));
String encode = orig.encodeToUrlString();
copy.decodeFromUrlString(encode);
@ -107,15 +107,9 @@ public class TestToken {
*/
@Test
public void testDecodeWritableArgSanityCheck() throws Exception {
Token<AbstractDelegationTokenIdentifier> token =
new Token<AbstractDelegationTokenIdentifier>();
try {
token.decodeFromUrlString(null);
fail("Should have thrown HadoopIllegalArgumentException");
}
catch (HadoopIllegalArgumentException e) {
Token.LOG.info("Test decodeWritable() sanity check success.");
}
Token<AbstractDelegationTokenIdentifier> token = new Token<>();
intercept(HadoopIllegalArgumentException.class,
() -> token.decodeFromUrlString(null));
}
@Test
@ -127,7 +121,7 @@ public class TestToken {
new Text("owner"), new Text("renewer"), new Text("realUser"));
Token<TestDelegationTokenIdentifier> token =
new Token<TestDelegationTokenIdentifier>(id, secretManager);
new Token<>(id, secretManager);
TokenIdentifier idCopy = token.decodeIdentifier();
assertNotSame(id, idCopy);