HADOOP-12555. WASB to read credentials from a credential provider. Contributed by Larry McCay.
(cherry picked from commit 27b77751c1
)
This commit is contained in:
parent
9b10373b2b
commit
a5511debdb
|
@ -489,6 +489,9 @@ Release 2.8.0 - UNRELEASED
|
|||
HADOOP-12668. Support excluding weak Ciphers in HttpServer2 through
|
||||
ssl-server.conf. (Vijay Singh via zhz)
|
||||
|
||||
HADOOP-12555. WASB to read credentials from a credential provider.
|
||||
(Larry McCay via cnauroth)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HADOOP-11785. Reduce the number of listStatus operation in distcp
|
||||
|
|
|
@ -110,6 +110,7 @@ In summary, first, provision the credentials into a provider then configure the
|
|||
|HDFS |DFSUtil leverages Configuration.getPassword method to use the credential provider API and/or fallback to the clear text value stored in ssl-server.xml.|TODO|
|
||||
|YARN |WebAppUtils uptakes the use of the credential provider API through the new method on Configuration called getPassword. This provides an alternative to storing the passwords in clear text within the ssl-server.xml file while maintaining backward compatibility.|TODO|
|
||||
|AWS <br/> S3/S3A |Uses Configuration.getPassword to get the S3 credentials. They may be resolved through the credential provider API or from the config for backward compatibility.|[AWS S3/S3A Usage](../../hadoop-aws/tools/hadoop-aws/index.html)|
|
||||
|Azure <br/> WASB |Uses Configuration.getPassword to get the WASB credentials. They may be resolved through the credential provider API or from the config for backward compatibility.|[Azure WASB Usage](../../hadoop-azure/index.html)|
|
||||
|Apache <br/> Accumulo|The trace.password property is used by the Tracer to authenticate with Accumulo and persist the traces in the trace table. The credential provider API is used to acquire the trace.password from a provider or from configuration for backward compatibility.|TODO|
|
||||
|Apache <br/> Slider |A capability has been added to Slider to prompt the user for needed passwords and store them using CredentialProvider so they can be retrieved by an app later.|TODO|
|
||||
|Apache <br/> Hive |Protection of the metastore password, SSL related passwords and JDO string password has been added through the use of the Credential Provider API|TODO|
|
||||
|
|
|
@ -18,6 +18,10 @@
|
|||
|
||||
package org.apache.hadoop.fs.azure;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
|
@ -27,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class SimpleKeyProvider implements KeyProvider {
|
||||
private static final Log LOG = LogFactory.getLog(SimpleKeyProvider.class);
|
||||
|
||||
protected static final String KEY_ACCOUNT_KEY_PREFIX =
|
||||
"fs.azure.account.key.";
|
||||
|
@ -34,7 +39,16 @@ public class SimpleKeyProvider implements KeyProvider {
|
|||
@Override
|
||||
public String getStorageAccountKey(String accountName, Configuration conf)
|
||||
throws KeyProviderException {
|
||||
return conf.get(getStorageAccountKeyName(accountName));
|
||||
String key = null;
|
||||
try {
|
||||
char[] keyChars = conf.getPassword(getStorageAccountKeyName(accountName));
|
||||
if (keyChars != null) {
|
||||
key = new String(keyChars);
|
||||
}
|
||||
} catch(IOException ioe) {
|
||||
LOG.warn("Unable to get key from credential providers.", ioe);
|
||||
}
|
||||
return key;
|
||||
}
|
||||
|
||||
protected String getStorageAccountKeyName(String accountName) {
|
||||
|
|
|
@ -88,9 +88,55 @@ For example:
|
|||
<value>YOUR ACCESS KEY</value>
|
||||
</property>
|
||||
|
||||
In many Hadoop clusters, the core-site.xml file is world-readable. If it's
|
||||
undesirable for the access key to be visible in core-site.xml, then it's also
|
||||
possible to configure it in encrypted form. An additional configuration property
|
||||
In many Hadoop clusters, the core-site.xml file is world-readable. It is possible to
|
||||
protect the access key within a credential provider as well. This provides an encrypted
|
||||
file format along with protection with file permissions.
|
||||
|
||||
#### Protecting the Azure Credentials for WASB with Credential Providers
|
||||
|
||||
To protect these credentials from prying eyes, it is recommended that you use
|
||||
the credential provider framework to securely store them and access them
|
||||
through configuration. The following describes its use for Azure credentials
|
||||
in WASB FileSystem.
|
||||
|
||||
For additional reading on the credential provider API see:
|
||||
[Credential Provider API](../hadoop-project-dist/hadoop-common/CredentialProviderAPI.html).
|
||||
|
||||
##### End to End Steps for Distcp and WASB with Credential Providers
|
||||
|
||||
###### provision
|
||||
|
||||
```
|
||||
% hadoop credential create fs.azure.account.key.youraccount.blob.core.windows.net -value 123
|
||||
-provider localjceks://file/home/lmccay/wasb.jceks
|
||||
```
|
||||
|
||||
###### configure core-site.xml or command line system property
|
||||
|
||||
```
|
||||
<property>
|
||||
<name>hadoop.security.credential.provider.path</name>
|
||||
<value>localjceks://file/home/lmccay/wasb.jceks</value>
|
||||
<description>Path to interrogate for protected credentials.</description>
|
||||
</property>
|
||||
```
|
||||
|
||||
###### distcp
|
||||
|
||||
```
|
||||
% hadoop distcp
|
||||
[-D hadoop.security.credential.provider.path=localjceks://file/home/lmccay/wasb.jceks]
|
||||
hdfs://hostname:9001/user/lmccay/007020615 wasb://yourcontainer@youraccount.blob.core.windows.net/testDir/
|
||||
```
|
||||
|
||||
NOTE: You may optionally add the provider path property to the distcp command line instead of
|
||||
added job specific configuration to a generic core-site.xml. The square brackets above illustrate
|
||||
this capability.
|
||||
|
||||
#### Protecting the Azure Credentials for WASB within an Encrypted File
|
||||
|
||||
In addition to using the credential provider framework to protect your credentials, it's
|
||||
also possible to configure it in encrypted form. An additional configuration property
|
||||
specifies an external program to be invoked by Hadoop processes to decrypt the
|
||||
key. The encrypted key value is passed to this external program as a command
|
||||
line argument:
|
||||
|
|
|
@ -34,6 +34,11 @@ import java.io.OutputStream;
|
|||
import java.net.URI;
|
||||
import java.util.Date;
|
||||
import java.util.EnumSet;
|
||||
import java.io.File;
|
||||
|
||||
import org.apache.hadoop.security.ProviderUtils;
|
||||
import org.apache.hadoop.security.alias.CredentialProvider;
|
||||
import org.apache.hadoop.security.alias.CredentialProviderFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.AbstractFileSystem;
|
||||
|
@ -43,7 +48,9 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.fs.azure.AzureBlobStorageTestAccount.CreateOptions;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import com.microsoft.azure.storage.blob.CloudBlobContainer;
|
||||
import com.microsoft.azure.storage.blob.CloudBlockBlob;
|
||||
|
@ -57,6 +64,9 @@ public class TestWasbUriAndConfiguration {
|
|||
protected String accountKey;
|
||||
protected static Configuration conf = null;
|
||||
|
||||
@Rule
|
||||
public final TemporaryFolder tempDir = new TemporaryFolder();
|
||||
|
||||
private AzureBlobStorageTestAccount testAccount;
|
||||
|
||||
@After
|
||||
|
@ -306,6 +316,40 @@ public class TestWasbUriAndConfiguration {
|
|||
assertEquals(key, result);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCredsFromCredentialProvider() throws Exception {
|
||||
String account = "testacct";
|
||||
String key = "testkey";
|
||||
// set up conf to have a cred provider
|
||||
final Configuration conf = new Configuration();
|
||||
final File file = tempDir.newFile("test.jks");
|
||||
final URI jks = ProviderUtils.nestURIForLocalJavaKeyStoreProvider(
|
||||
file.toURI());
|
||||
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
|
||||
jks.toString());
|
||||
|
||||
provisionAccountKey(conf, account, key);
|
||||
|
||||
// also add to configuration as clear text that should be overridden
|
||||
conf.set(SimpleKeyProvider.KEY_ACCOUNT_KEY_PREFIX + account,
|
||||
key + "cleartext");
|
||||
|
||||
String result = AzureNativeFileSystemStore.getAccountKeyFromConfiguration(
|
||||
account, conf);
|
||||
// result should contain the credential provider key not the config key
|
||||
assertEquals("AccountKey incorrect.", key, result);
|
||||
}
|
||||
|
||||
void provisionAccountKey(
|
||||
final Configuration conf, String account, String key) throws Exception {
|
||||
// add our creds to the provider
|
||||
final CredentialProvider provider =
|
||||
CredentialProviderFactory.getProviders(conf).get(0);
|
||||
provider.createCredentialEntry(
|
||||
SimpleKeyProvider.KEY_ACCOUNT_KEY_PREFIX + account, key.toCharArray());
|
||||
provider.flush();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidKeyProvider() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
|
@ -366,7 +410,6 @@ public class TestWasbUriAndConfiguration {
|
|||
String authority = testAccount.getFileSystem().getUri().getAuthority();
|
||||
URI defaultUri = new URI(defaultScheme, authority, null, null, null);
|
||||
conf.set(FS_DEFAULT_NAME_KEY, defaultUri.toString());
|
||||
|
||||
// Add references to file system implementations for wasb and wasbs.
|
||||
conf.addResource("azure-test.xml");
|
||||
URI wantedUri = new URI(wantedScheme + ":///random/path");
|
||||
|
|
Loading…
Reference in New Issue