HADOOP-12059. S3Credentials should support use of CredentialProvider. Contributed by Sean Busbey.

(cherry picked from commit 2dbc40e608)
This commit is contained in:
Andrew Wang 2015-06-05 13:11:01 -07:00
parent 4d385b48f0
commit 83a97907cc
8 changed files with 195 additions and 8 deletions

1
.gitignore vendored
View File

@ -21,5 +21,6 @@ hadoop-common-project/hadoop-common/src/test/resources/contract-test-options.xml
hadoop-tools/hadoop-openstack/src/test/resources/contract-test-options.xml
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/tla/yarnregistry.toolbox
yarnregistry.pdf
hadoop-tools/hadoop-aws/src/test/resources/auth-keys.xml
hadoop-tools/hadoop-aws/src/test/resources/contract-test-options.xml
patchprocess/

View File

@ -144,6 +144,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-12037. Fix wrong classname in example configuration of hadoop-auth
documentation. (Masatake Iwasaki via wang)
HADOOP-12059. S3Credentials should support use of CredentialProvider.
(Sean Busbey via wang)
OPTIMIZATIONS
HADOOP-11785. Reduce the number of listStatus operation in distcp

View File

@ -19,8 +19,11 @@
package org.apache.hadoop.security;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.security.alias.LocalJavaKeyStoreProvider;
public class ProviderUtils {
/**
@ -49,4 +52,31 @@ public class ProviderUtils {
}
return new Path(result.toString());
}
/**
* Mangle given local java keystore file URI to allow use as a
* LocalJavaKeyStoreProvider.
* @param localFile absolute URI with file scheme and no authority component.
* i.e. return of File.toURI,
* e.g. file:///home/larry/creds.jceks
* @return URI of the form localjceks://file/home/larry/creds.jceks
* @throws IllegalArgumentException if localFile isn't not a file uri or if it
* has an authority component.
* @throws URISyntaxException if the wrapping process violates RFC 2396
*/
public static URI nestURIForLocalJavaKeyStoreProvider(final URI localFile)
throws URISyntaxException {
if (!("file".equals(localFile.getScheme()))) {
throw new IllegalArgumentException("passed URI had a scheme other than " +
"file.");
}
if (localFile.getAuthority() != null) {
throw new IllegalArgumentException("passed URI must not have an " +
"authority component. For non-local keystores, please use " +
JavaKeyStoreProvider.class.getName());
}
return new URI(LocalJavaKeyStoreProvider.SCHEME_NAME,
"//file" + localFile.getSchemeSpecificPart(), localFile.getFragment());
}
}

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.security.alias;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@ -60,6 +62,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
*/
@InterfaceAudience.Private
public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider {
public static final Log LOG = LogFactory.getLog(
AbstractJavaKeyStoreProvider.class);
public static final String CREDENTIAL_PASSWORD_NAME =
"HADOOP_CREDSTORE_PASSWORD";
public static final String KEYSTORE_PASSWORD_FILE_KEY =
@ -197,6 +201,9 @@ public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider {
protected void initFileSystem(URI keystoreUri, Configuration conf)
throws IOException {
path = ProviderUtils.unnestUri(keystoreUri);
if (LOG.isDebugEnabled()) {
LOG.debug("backing jks path initialized to " + path);
}
}
@Override
@ -318,9 +325,10 @@ public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider {
writeLock.lock();
try {
if (!changed) {
LOG.debug("Keystore hasn't changed, returning.");
return;
}
// write out the keystore
LOG.debug("Writing out keystore.");
try (OutputStream out = getOutputStreamForKeystore()) {
keyStore.store(out, password);
} catch (KeyStoreException e) {

View File

@ -65,13 +65,17 @@ public final class LocalJavaKeyStoreProvider extends
@Override
protected OutputStream getOutputStreamForKeystore() throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("using '" + file + "' for output stream.");
}
FileOutputStream out = new FileOutputStream(file);
return out;
}
@Override
protected boolean keystoreExists() throws IOException {
return file.exists();
/* The keystore loader doesn't handle zero length files. */
return file.exists() && (file.length() > 0);
}
@Override
@ -122,6 +126,22 @@ public final class LocalJavaKeyStoreProvider extends
super.initFileSystem(uri, conf);
try {
file = new File(new URI(getPath().toString()));
if (LOG.isDebugEnabled()) {
LOG.debug("initialized local file as '" + file + "'.");
if (file.exists()) {
LOG.debug("the local file exists and is size " + file.length());
if (LOG.isTraceEnabled()) {
if (file.canRead()) {
LOG.trace("we can read the local file.");
}
if (file.canWrite()) {
LOG.trace("we can write the local file.");
}
}
} else {
LOG.debug("the local file does not exist.");
}
}
} catch (URISyntaxException e) {
throw new IOException(e);
}
@ -130,6 +150,9 @@ public final class LocalJavaKeyStoreProvider extends
@Override
public void flush() throws IOException {
super.flush();
if (LOG.isDebugEnabled()) {
LOG.debug("Reseting permissions to '" + permissions + "'");
}
if (!Shell.WINDOWS) {
Files.setPosixFilePermissions(Paths.get(file.getCanonicalPath()),
permissions);

View File

@ -23,6 +23,8 @@ import java.net.URI;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@ -32,13 +34,26 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.ProviderUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class TestCredentialProviderFactory {
public static final Log LOG = LogFactory.getLog(TestCredentialProviderFactory.class);
@Rule
public final TestName test = new TestName();
@Before
public void announce() {
LOG.info("Running test " + test.getMethodName());
}
private static char[] chars = { 'a', 'b', 'c', 'd', 'e', 'f', 'g',
'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w',

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.s3;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.classification.InterfaceAudience;
@ -39,8 +40,10 @@ public class S3Credentials {
/**
* @throws IllegalArgumentException if credentials for S3 cannot be
* determined.
* @throws IOException if credential providers are misconfigured and we have
* to talk to them.
*/
public void initialize(URI uri, Configuration conf) {
public void initialize(URI uri, Configuration conf) throws IOException {
if (uri.getHost() == null) {
throw new IllegalArgumentException("Invalid hostname in URI " + uri);
}
@ -64,7 +67,10 @@ public class S3Credentials {
accessKey = conf.getTrimmed(accessKeyProperty);
}
if (secretAccessKey == null) {
secretAccessKey = conf.getTrimmed(secretAccessKeyProperty);
final char[] pass = conf.getPassword(secretAccessKeyProperty);
if (pass != null) {
secretAccessKey = (new String(pass)).trim();
}
}
if (accessKey == null && secretAccessKey == null) {
throw new IllegalArgumentException("AWS " +

View File

@ -17,13 +17,40 @@
*/
package org.apache.hadoop.fs.s3;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.ProviderUtils;
import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import java.io.File;
import java.net.URI;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.TestName;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.apache.hadoop.conf.Configuration;
public class TestS3Credentials {
public static final Log LOG = LogFactory.getLog(TestS3Credentials.class);
public class TestS3Credentials extends TestCase {
@Rule
public final TestName test = new TestName();
@Before
public void announce() {
LOG.info("Running test " + test.getMethodName());
}
private static final String EXAMPLE_ID = "AKASOMEACCESSKEY";
private static final String EXAMPLE_KEY =
"RGV0cm9pdCBSZ/WQgY2xl/YW5lZCB1cAEXAMPLE";
@Test
public void testInvalidHostnameWithUnderscores() throws Exception {
S3Credentials s3Credentials = new S3Credentials();
try {
@ -33,4 +60,78 @@ public class TestS3Credentials extends TestCase {
assertEquals("Invalid hostname in URI s3://a:b@c_d", e.getMessage());
}
}
@Test
public void testPlaintextConfigPassword() throws Exception {
S3Credentials s3Credentials = new S3Credentials();
Configuration conf = new Configuration();
conf.set("fs.s3.awsAccessKeyId", EXAMPLE_ID);
conf.set("fs.s3.awsSecretAccessKey", EXAMPLE_KEY);
s3Credentials.initialize(new URI("s3://foobar"), conf);
assertEquals("Could not retrieve proper access key", EXAMPLE_ID,
s3Credentials.getAccessKey());
assertEquals("Could not retrieve proper secret", EXAMPLE_KEY,
s3Credentials.getSecretAccessKey());
}
@Test
public void testPlaintextConfigPasswordWithWhitespace() throws Exception {
S3Credentials s3Credentials = new S3Credentials();
Configuration conf = new Configuration();
conf.set("fs.s3.awsAccessKeyId", "\r\n " + EXAMPLE_ID +
" \r\n");
conf.set("fs.s3.awsSecretAccessKey", "\r\n " + EXAMPLE_KEY +
" \r\n");
s3Credentials.initialize(new URI("s3://foobar"), conf);
assertEquals("Could not retrieve proper access key", EXAMPLE_ID,
s3Credentials.getAccessKey());
assertEquals("Could not retrieve proper secret", EXAMPLE_KEY,
s3Credentials.getSecretAccessKey());
}
@Rule
public final TemporaryFolder tempDir = new TemporaryFolder();
@Test
public void testCredentialProvider() throws Exception {
// set up conf to have a cred provider
final Configuration conf = new Configuration();
final File file = tempDir.newFile("test.jks");
final URI jks = ProviderUtils.nestURIForLocalJavaKeyStoreProvider(
file.toURI());
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
jks.toString());
// add our creds to the provider
final CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
provider.createCredentialEntry("fs.s3.awsSecretAccessKey",
EXAMPLE_KEY.toCharArray());
provider.flush();
// make sure S3Creds can retrieve things.
S3Credentials s3Credentials = new S3Credentials();
conf.set("fs.s3.awsAccessKeyId", EXAMPLE_ID);
s3Credentials.initialize(new URI("s3://foobar"), conf);
assertEquals("Could not retrieve proper access key", EXAMPLE_ID,
s3Credentials.getAccessKey());
assertEquals("Could not retrieve proper secret", EXAMPLE_KEY,
s3Credentials.getSecretAccessKey());
}
@Test(expected=IllegalArgumentException.class)
public void noSecretShouldThrow() throws Exception {
S3Credentials s3Credentials = new S3Credentials();
Configuration conf = new Configuration();
conf.set("fs.s3.awsAccessKeyId", EXAMPLE_ID);
s3Credentials.initialize(new URI("s3://foobar"), conf);
}
@Test(expected=IllegalArgumentException.class)
public void noAccessIdShouldThrow() throws Exception {
S3Credentials s3Credentials = new S3Credentials();
Configuration conf = new Configuration();
conf.set("fs.s3.awsSecretAccessKey", EXAMPLE_KEY);
s3Credentials.initialize(new URI("s3://foobar"), conf);
}
}