HADOOP-10853. Refactor get instance of CryptoCodec and support create via algorithm/mode/padding. (yliu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/fs-encryption@1612513 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Yi Liu 2014-07-22 08:38:38 +00:00
parent 6fac3e9b61
commit b52b80d7bd
8 changed files with 99 additions and 43 deletions

View File

@ -43,6 +43,9 @@ fs-encryption (Unreleased)
HADOOP-10870. Failed to load OpenSSL cipher error logs on systems with old
openssl versions (cmccabe)
HADOOP-10853. Refactor get instance of CryptoCodec and support create via
algorithm/mode/padding. (Yi Liu)
OPTIMIZATIONS
BUG FIXES

View File

@ -82,4 +82,34 @@ public enum CipherSuite {
}
throw new IllegalArgumentException("Invalid cipher suite name: " + name);
}
/**
* Convert to CipherSuite from name, {@link #algoBlockSize} is fixed for
* certain cipher suite, just need to compare the name.
* @param name cipher suite name
* @return CipherSuite cipher suite
*/
public static CipherSuite convert(String name) {
CipherSuite[] suites = CipherSuite.values();
for (CipherSuite suite : suites) {
if (suite.getName().equals(name)) {
return suite;
}
}
throw new IllegalArgumentException("Invalid cipher suite name: " + name);
}
/**
* Returns suffix of cipher suite configuration.
* @return String configuration suffix
*/
public String getConfigSuffix() {
String[] parts = name.split("/");
StringBuilder suffix = new StringBuilder();
for (String part : parts) {
suffix.append(".").append(part.toLowerCase());
}
return suffix.toString();
}
}

View File

@ -31,8 +31,7 @@ import org.slf4j.LoggerFactory;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASS_KEY;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASS_DEFAULT;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_DEFAULT;
@ -44,23 +43,28 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
public abstract class CryptoCodec implements Configurable {
public static Logger LOG = LoggerFactory.getLogger(CryptoCodec.class);
public static CryptoCodec getInstance(Configuration conf) {
List<Class<? extends CryptoCodec>> klasses = getCodecClasses(conf);
String name = conf.get(HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY,
HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_DEFAULT);
CipherSuite.checkName(name);
/**
* Get crypto codec for specified algorithm/mode/padding.
* @param conf the configuration
* @param CipherSuite algorithm/mode/padding
* @return CryptoCodec the codec object
*/
public static CryptoCodec getInstance(Configuration conf,
CipherSuite cipherSuite) {
List<Class<? extends CryptoCodec>> klasses = getCodecClasses(
conf, cipherSuite);
CryptoCodec codec = null;
for (Class<? extends CryptoCodec> klass : klasses) {
try {
CryptoCodec c = ReflectionUtils.newInstance(klass, conf);
if (c.getCipherSuite().getName().equalsIgnoreCase(name)) {
if (c.getCipherSuite().getName().equals(cipherSuite.getName())) {
if (codec == null) {
LOG.debug("Using crypto codec {}.", klass.getName());
codec = c;
}
} else {
LOG.warn("Crypto codec {} doesn't meet the cipher suite {}.",
klass.getName(), name);
klass.getName(), cipherSuite.getName());
}
} catch (Exception e) {
LOG.warn("Crypto codec {} is not available.", klass.getName());
@ -72,14 +76,27 @@ public abstract class CryptoCodec implements Configurable {
}
throw new RuntimeException("No available crypto codec which meets " +
"the cipher suite " + name + ".");
"the cipher suite " + cipherSuite.getName() + ".");
}
/**
* Get crypto codec for algorithm/mode/padding in config value
* hadoop.security.crypto.cipher.suite
* @param conf the configuration
* @return CryptoCodec the codec object
*/
public static CryptoCodec getInstance(Configuration conf) {
String name = conf.get(HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY,
HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_DEFAULT);
return getInstance(conf, CipherSuite.convert(name));
}
private static List<Class<? extends CryptoCodec>> getCodecClasses(
Configuration conf) {
Configuration conf, CipherSuite cipherSuite) {
List<Class<? extends CryptoCodec>> result = Lists.newArrayList();
String codecString = conf.get(HADOOP_SECURITY_CRYPTO_CODEC_CLASS_KEY,
HADOOP_SECURITY_CRYPTO_CODEC_CLASS_DEFAULT);
String configName = HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX +
cipherSuite.getConfigSuffix();
String codecString = conf.get(configName);
for (String c : Splitter.on(',').trimResults().omitEmptyStrings().
split(codecString)) {
try {

View File

@ -26,6 +26,8 @@ import javax.crypto.Cipher;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@ -40,6 +42,9 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
*/
@InterfaceAudience.Private
public class JceAesCtrCryptoCodec extends AesCtrCryptoCodec {
private static final Log LOG =
LogFactory.getLog(JceAesCtrCryptoCodec.class.getName());
private Configuration conf;
private String provider;
private SecureRandom random;
@ -64,7 +69,8 @@ public class JceAesCtrCryptoCodec extends AesCtrCryptoCodec {
SecureRandom.getInstance(secureRandomAlg, provider) :
SecureRandom.getInstance(secureRandomAlg);
} catch (GeneralSecurityException e) {
throw new IllegalArgumentException(e);
LOG.warn(e.getMessage());
random = new SecureRandom();
}
}

View File

@ -282,12 +282,8 @@ public class CommonConfigurationKeysPublic {
/** Class to override Sasl Properties for a connection */
public static final String HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS =
"hadoop.security.saslproperties.resolver.class";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_CRYPTO_CODEC_CLASS_KEY =
"hadoop.security.crypto.codec.class";
public static final String HADOOP_SECURITY_CRYPTO_CODEC_CLASS_DEFAULT =
"org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec," +
"org.apache.hadoop.crypto.JceAesCtrCryptoCodec";
public static final String HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX =
"hadoop.security.crypto.codec.classes";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY =
"hadoop.security.crypto.cipher.suite";
@ -295,10 +291,10 @@ public class CommonConfigurationKeysPublic {
"AES/CTR/NoPadding";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY =
"hadoop.security.crypto.jce.provider";
"hadoop.security.crypto.jce.provider";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY =
"hadoop.security.crypto.buffer.size";
"hadoop.security.crypto.buffer.size";
/** Defalt value for HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY */
public static final int HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT = 8192;
/** Class to override Impersonation provider */
@ -334,17 +330,17 @@ public class CommonConfigurationKeysPublic {
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY =
"hadoop.security.java.secure.random.algorithm";
"hadoop.security.java.secure.random.algorithm";
/** Defalt value for HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY */
public static final String HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_DEFAULT =
"SHA1PRNG";
"SHA1PRNG";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_SECURE_RANDOM_IMPL_KEY =
"hadoop.security.secure.random.impl";
"hadoop.security.secure.random.impl";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY =
"hadoop.security.random.device.file.path";
"hadoop.security.random.device.file.path";
public static final String HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT =
"/dev/urandom";
"/dev/urandom";
}

View File

@ -1452,13 +1452,21 @@ for ldap providers in the same way as above does.
</property>
<property>
<name>hadoop.security.crypto.codec.class</name>
<value>org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec,
org.apache.hadoop.crypto.JceAesCtrCryptoCodec</value>
<name>hadoop.security.crypto.codec.classes.EXAMPLECIPHERSUITE</name>
<value></value>
<description>
Comma list of CryptoCodec implementations which are used for encryption
and decryption. The first implementation will be used if avaiable, others
are fallbacks.
The prefix for a given crypto codec, contains a comma-separated
list of implementation classes for a given crypto codec (eg EXAMPLECIPHERSUITE).
The first implementation will be used if available, others are fallbacks.
</description>
</property>
<property>
<name>hadoop.security.crypto.codec.classes.aes.ctr.nopadding</name>
<value>org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec,org.apache.hadoop.crypto.JceAesCtrCryptoCodec</value>
<description>
Comma-separated list of crypto codec implementations for AES/CTR/NoPadding.
The first implementation will be used if available, others are fallbacks.
</description>
</property>
@ -1488,7 +1496,7 @@ for ldap providers in the same way as above does.
<property>
<name>hadoop.security.java.secure.random.algorithm</name>
<value></value>
<value>SHA1PRNG</value>
<description>
The java secure random algorithm.
</description>
@ -1504,9 +1512,9 @@ for ldap providers in the same way as above does.
<property>
<name>hadoop.security.random.device.file.path</name>
<value></value>
<value>/dev/urandom</value>
<description>
OS security random dev path, it's /dev/urandom in linux.
OS security random device file path.
</description>
</property>

View File

@ -20,17 +20,12 @@ package org.apache.hadoop.crypto;
import org.apache.hadoop.conf.Configuration;
import org.junit.BeforeClass;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASS_KEY;
public class TestCryptoStreamsWithOpensslAesCtrCryptoCodec
extends TestCryptoStreams {
@BeforeClass
public static void init() throws Exception {
Configuration conf = new Configuration();
conf.set(HADOOP_SECURITY_CRYPTO_CODEC_CLASS_KEY,
OpensslAesCtrCryptoCodec.class.getName() + "," +
JceAesCtrCryptoCodec.class.getName());
codec = CryptoCodec.getInstance(conf);
}
}

View File

@ -1301,8 +1301,9 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
if (feInfo != null) {
// File is encrypted, wrap the stream in a crypto stream.
final CryptoInputStream cryptoIn =
new CryptoInputStream(dfsis, codec,
feInfo.getEncryptedDataEncryptionKey(), feInfo.getIV());
new CryptoInputStream(dfsis, CryptoCodec.getInstance(conf,
feInfo.getCipherSuite()), feInfo.getEncryptedDataEncryptionKey(),
feInfo.getIV());
return new HdfsDataInputStream(cryptoIn);
} else {
// No key/IV pair so no encryption.