From 7634d404b750eafa135a37fa275325d0398255fb Mon Sep 17 00:00:00 2001 From: Chris Nauroth Date: Sun, 28 Feb 2016 11:22:55 -0800 Subject: [PATCH] HADOOP-12846. Credential Provider Recursive Dependencies. Contributed by Larry McCay. --- .../hadoop-common/CHANGES.txt | 3 + .../apache/hadoop/security/ProviderUtils.java | 107 +++++++++++++++++- .../hadoop/fs/azure/SimpleKeyProvider.java | 5 +- .../fs/azure/TestWasbUriAndConfiguration.java | 35 ++++++ 4 files changed, 143 insertions(+), 7 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 9f952211c09..473956492e5 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -1761,6 +1761,9 @@ Release 2.8.0 - UNRELEASED HADOOP-12849. TestSymlinkLocalFSFileSystem fails intermittently. (Mingliang Liu via cnauroth) + HADOOP-12846. Credential Provider Recursive Dependencies. + (Larry McCay via cnauroth) + Release 2.7.3 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java index b7645062578..ae08fbae35c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java @@ -18,14 +18,34 @@ package org.apache.hadoop.security; +import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.security.alias.JavaKeyStoreProvider; import org.apache.hadoop.security.alias.LocalJavaKeyStoreProvider; -public class ProviderUtils { +/** + * Utility methods for both key and credential provider APIs. + * + */ +public final class ProviderUtils { + private static final Log LOG = LogFactory.getLog(ProviderUtils.class); + + /** + * Hidden ctor to ensure that this utility class isn't + * instantiated explicitly. + */ + private ProviderUtils() { + // hide ctor for checkstyle compliance + } + /** * Convert a nested URI to decode the underlying path. The translation takes * the authority and parses it into the underlying scheme and authority. @@ -35,11 +55,15 @@ public class ProviderUtils { * @return the unnested path */ public static Path unnestUri(URI nestedUri) { - String[] parts = nestedUri.getAuthority().split("@", 2); - StringBuilder result = new StringBuilder(parts[0]); - result.append("://"); - if (parts.length == 2) { - result.append(parts[1]); + StringBuilder result = new StringBuilder(); + String authority = nestedUri.getAuthority(); + if (authority != null) { + String[] parts = nestedUri.getAuthority().split("@", 2); + result.append(parts[0]); + result.append("://"); + if (parts.length == 2) { + result.append(parts[1]); + } } result.append(nestedUri.getPath()); if (nestedUri.getQuery() != null) { @@ -79,4 +103,75 @@ public class ProviderUtils { "//file" + localFile.getSchemeSpecificPart(), localFile.getFragment()); } + /** + * There are certain integrations of the credential provider API in + * which a recursive dependency between the provider and the hadoop + * filesystem abstraction causes a problem. These integration points + * need to leverage this utility method to remove problematic provider + * types from the existing provider path within the configuration. + * + * @param config the existing configuration with provider path + * @param fileSystemClass the class which providers must be compatible + * @return Configuration clone with new provider path + */ + public static Configuration excludeIncompatibleCredentialProviders( + Configuration config, Class fileSystemClass) + throws IOException { + + String providerPath = config.get( + CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH); + + if (providerPath == null) { + return config; + } + StringBuffer newProviderPath = new StringBuffer(); + String[] providers = providerPath.split(","); + Path path = null; + for (String provider: providers) { + try { + path = unnestUri(new URI(provider)); + Class clazz = null; + try { + String scheme = path.toUri().getScheme(); + clazz = FileSystem.getFileSystemClass(scheme, config); + } catch (IOException ioe) { + // not all providers are filesystem based + // for instance user:/// will not be able to + // have a filesystem class associated with it. + if (newProviderPath.length() > 0) { + newProviderPath.append(","); + } + newProviderPath.append(provider); + } + if (clazz != null) { + if (fileSystemClass.isAssignableFrom(clazz)) { + LOG.debug("Filesystem based provider" + + " excluded from provider path due to recursive dependency: " + + provider); + } else { + if (newProviderPath.length() > 0) { + newProviderPath.append(","); + } + newProviderPath.append(provider); + } + } + } catch (URISyntaxException e) { + LOG.warn("Credential Provider URI is invalid." + provider); + } + } + + String effectivePath = newProviderPath.toString(); + if (effectivePath.equals(providerPath)) { + return config; + } + + Configuration conf = new Configuration(config); + if (effectivePath.equals("")) { + conf.unset(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH); + } else { + conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, + effectivePath); + } + return conf; + } } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java index 28e307e60da..5596f7e67c2 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java @@ -24,6 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.ProviderUtils; /** * Key provider that simply returns the storage account key from the @@ -41,7 +42,9 @@ public class SimpleKeyProvider implements KeyProvider { throws KeyProviderException { String key = null; try { - char[] keyChars = conf.getPassword(getStorageAccountKeyName(accountName)); + Configuration c = ProviderUtils.excludeIncompatibleCredentialProviders( + conf, NativeAzureFileSystem.class); + char[] keyChars = c.getPassword(getStorageAccountKeyName(accountName)); if (keyChars != null) { key = new String(keyChars); } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestWasbUriAndConfiguration.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestWasbUriAndConfiguration.java index 06a5b621eb9..cd9d1d41dd1 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestWasbUriAndConfiguration.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestWasbUriAndConfiguration.java @@ -461,4 +461,39 @@ public class TestWasbUriAndConfiguration { FileSystem.closeAll(); } } + + @Test + public void testCredentialProviderPathExclusions() throws Exception { + String providerPath = + "user:///,jceks://wasb/user/hrt_qa/sqoopdbpasswd.jceks," + + "jceks://hdfs@nn1.example.com/my/path/test.jceks"; + Configuration config = new Configuration(); + config.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, + providerPath); + String newPath = "user:///,jceks://hdfs@nn1.example.com/my/path/test.jceks"; + + excludeAndTestExpectations(config, newPath); + } + + @Test + public void testExcludeAllProviderTypesFromConfig() throws Exception { + String providerPath = + "jceks://wasb/tmp/test.jceks," + + "jceks://wasb@/my/path/test.jceks"; + Configuration config = new Configuration(); + config.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, + providerPath); + String newPath = null; + + excludeAndTestExpectations(config, newPath); + } + + void excludeAndTestExpectations(Configuration config, String newPath) + throws Exception { + Configuration conf = ProviderUtils.excludeIncompatibleCredentialProviders( + config, NativeAzureFileSystem.class); + String effectivePath = conf.get( + CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, null); + assertEquals(newPath, effectivePath); + } }