diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java index c787efe1623..d89d96c1056 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java @@ -26,7 +26,10 @@ import java.security.SecureRandom; import javax.crypto.spec.SecretKeySpec; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -41,10 +44,17 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Private @InterfaceStability.Evolving -public class EncryptionUtil { +public final class EncryptionUtil { + static private final Log LOG = LogFactory.getLog(EncryptionUtil.class); static private final SecureRandom RNG = new SecureRandom(); + /** + * Private constructor to keep this class from being instantiated. + */ + private EncryptionUtil() { + } + /** * Protect a key by encrypting it with the secret key of the given subject. * The configuration must be set up correctly for key alias resolution. @@ -159,4 +169,90 @@ public class EncryptionUtil { return getUnwrapKey(conf, subject, wrappedKey, cipher); } + /** + * Helper to create an encyption context. + * + * @param conf The current configuration. + * @param family The current column descriptor. + * @return The created encryption context. + * @throws IOException if an encryption key for the column cannot be unwrapped + */ + public static Encryption.Context createEncryptionContext(Configuration conf, + HColumnDescriptor family) throws IOException { + Encryption.Context cryptoContext = Encryption.Context.NONE; + String cipherName = family.getEncryptionType(); + if (cipherName != null) { + Cipher cipher; + Key key; + byte[] keyBytes = family.getEncryptionKey(); + if (keyBytes != null) { + // Family provides specific key material + key = unwrapKey(conf, keyBytes); + // Use the algorithm the key wants + cipher = Encryption.getCipher(conf, key.getAlgorithm()); + if (cipher == null) { + throw new RuntimeException("Cipher '" + key.getAlgorithm() + "' is not available"); + } + // Fail if misconfigured + // We use the encryption type specified in the column schema as a sanity check on + // what the wrapped key is telling us + if (!cipher.getName().equalsIgnoreCase(cipherName)) { + throw new RuntimeException("Encryption for family '" + family.getNameAsString() + + "' configured with type '" + cipherName + "' but key specifies algorithm '" + + cipher.getName() + "'"); + } + } else { + // Family does not provide key material, create a random key + cipher = Encryption.getCipher(conf, cipherName); + if (cipher == null) { + throw new RuntimeException("Cipher '" + cipherName + "' is not available"); + } + key = cipher.getRandomKey(); + } + cryptoContext = Encryption.newContext(conf); + cryptoContext.setCipher(cipher); + cryptoContext.setKey(key); + } + return cryptoContext; + } + + /** + * Helper for {@link #unwrapKey(Configuration, String, byte[])} which automatically uses the + * configured master and alternative keys, rather than having to specify a key type to unwrap + * with. + * + * The configuration must be set up correctly for key alias resolution. + * + * @param conf the current configuration + * @param keyBytes the key encrypted by master (or alternative) to unwrap + * @return the key bytes, decrypted + * @throws IOException if the key cannot be unwrapped + */ + public static Key unwrapKey(Configuration conf, byte[] keyBytes) throws IOException { + Key key; + String masterKeyName = conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, + User.getCurrent().getShortName()); + try { + // First try the master key + key = unwrapKey(conf, masterKeyName, keyBytes); + } catch (KeyException e) { + // If the current master key fails to unwrap, try the alternate, if + // one is configured + if (LOG.isDebugEnabled()) { + LOG.debug("Unable to unwrap key with current master key '" + masterKeyName + "'"); + } + String alternateKeyName = + conf.get(HConstants.CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY); + if (alternateKeyName != null) { + try { + key = unwrapKey(conf, alternateKeyName, keyBytes); + } catch (KeyException ex) { + throw new IOException(ex); + } + } else { + throw new IOException(e); + } + } + return key; + } } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java index b0e3464f4b2..edcee70eb64 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import java.io.IOException; import java.security.Key; import java.security.KeyException; import java.security.SecureRandom; @@ -28,7 +29,9 @@ import java.security.SecureRandom; import javax.crypto.spec.SecretKeySpec; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; import org.apache.hadoop.hbase.io.crypto.aes.AES; import org.apache.hadoop.hbase.testclassification.ClientTests; @@ -39,6 +42,9 @@ import org.junit.experimental.categories.Category; @Category({ClientTests.class, SmallTests.class}) public class TestEncryptionUtil { + // There does not seem to be a ready way to test either getKeyFromBytesOrMasterKey + // or createEncryptionContext, and the existing code under MobUtils appeared to be + // untested. Not ideal! @Test public void testKeyWrapping() throws Exception { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java index 930f42a0606..4e2ca7d5b5e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java @@ -21,26 +21,25 @@ import java.io.DataInput; import java.io.IOException; import java.nio.ByteBuffer; import java.security.Key; -import java.security.KeyException; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.OffheapKeyValue; import org.apache.hadoop.hbase.ShareableMemory; import org.apache.hadoop.hbase.SizeCachedKeyValue; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue; +import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.compress.Compression; @@ -52,7 +51,6 @@ import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.security.EncryptionUtil; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.IdLock; @@ -1817,29 +1815,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { if (keyBytes != null) { Encryption.Context cryptoContext = Encryption.newContext(conf); Key key; - String masterKeyName = conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, - User.getCurrent().getShortName()); - try { - // First try the master key - key = EncryptionUtil.unwrapKey(conf, masterKeyName, keyBytes); - } catch (KeyException e) { - // If the current master key fails to unwrap, try the alternate, if - // one is configured - if (LOG.isDebugEnabled()) { - LOG.debug("Unable to unwrap key with current master key '" + masterKeyName + "'"); - } - String alternateKeyName = - conf.get(HConstants.CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY); - if (alternateKeyName != null) { - try { - key = EncryptionUtil.unwrapKey(conf, alternateKeyName, keyBytes); - } catch (KeyException ex) { - throw new IOException(ex); - } - } else { - throw new IOException(e); - } - } + key = EncryptionUtil.unwrapKey(conf, keyBytes); // Use the algorithm the key wants Cipher cipher = Encryption.getCipher(conf, key.getAlgorithm()); if (cipher == null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java index 424a39bdfbe..d6547886582 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mob; import java.io.FileNotFoundException; import java.io.IOException; -import java.security.Key; -import java.security.KeyException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -58,7 +56,6 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.compress.Compression; -import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; @@ -70,8 +67,6 @@ import org.apache.hadoop.hbase.mob.compactions.PartitionedMobCompactor; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.StoreFile; -import org.apache.hadoop.hbase.security.EncryptionUtil; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; @@ -82,7 +77,7 @@ import org.apache.hadoop.hbase.util.Threads; * The mob utilities */ @InterfaceAudience.Private -public class MobUtils { +public final class MobUtils { private static final Log LOG = LogFactory.getLog(MobUtils.class); @@ -94,6 +89,13 @@ public class MobUtils { } }; + + /** + * Private constructor to keep this class from being instantiated. + */ + private MobUtils() { + } + /** * Formats a date to a string. * @param date The date. @@ -774,74 +776,6 @@ public class MobUtils { return pool; } - /** - * Creates the encyption context. - * @param conf The current configuration. - * @param family The current column descriptor. - * @return The encryption context. - * @throws IOException - */ - public static Encryption.Context createEncryptionContext(Configuration conf, - HColumnDescriptor family) throws IOException { - // TODO the code is repeated, and needs to be unified. - Encryption.Context cryptoContext = Encryption.Context.NONE; - String cipherName = family.getEncryptionType(); - if (cipherName != null) { - Cipher cipher; - Key key; - byte[] keyBytes = family.getEncryptionKey(); - if (keyBytes != null) { - // Family provides specific key material - String masterKeyName = conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User - .getCurrent().getShortName()); - try { - // First try the master key - key = EncryptionUtil.unwrapKey(conf, masterKeyName, keyBytes); - } catch (KeyException e) { - // If the current master key fails to unwrap, try the alternate, if - // one is configured - if (LOG.isDebugEnabled()) { - LOG.debug("Unable to unwrap key with current master key '" + masterKeyName + "'"); - } - String alternateKeyName = conf.get(HConstants.CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY); - if (alternateKeyName != null) { - try { - key = EncryptionUtil.unwrapKey(conf, alternateKeyName, keyBytes); - } catch (KeyException ex) { - throw new IOException(ex); - } - } else { - throw new IOException(e); - } - } - // Use the algorithm the key wants - cipher = Encryption.getCipher(conf, key.getAlgorithm()); - if (cipher == null) { - throw new RuntimeException("Cipher '" + key.getAlgorithm() + "' is not available"); - } - // Fail if misconfigured - // We use the encryption type specified in the column schema as a sanity check on - // what the wrapped key is telling us - if (!cipher.getName().equalsIgnoreCase(cipherName)) { - throw new RuntimeException("Encryption for family '" + family.getNameAsString() - + "' configured with type '" + cipherName + "' but key specifies algorithm '" - + cipher.getName() + "'"); - } - } else { - // Family does not provide key material, create a random key - cipher = Encryption.getCipher(conf, cipherName); - if (cipher == null) { - throw new RuntimeException("Cipher '" + cipherName + "' is not available"); - } - key = cipher.getRandomKey(); - } - cryptoContext = Encryption.newContext(conf); - cryptoContext.setCipher(cipher); - cryptoContext.setKey(key); - } - return cryptoContext; - } - /** * Checks whether this table has mob-enabled columns. * @param htd The current table descriptor. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java index dabedfdc249..ab9ee7e599e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java @@ -72,6 +72,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFile.Writer; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.regionserver.StoreFileScanner; import org.apache.hadoop.hbase.regionserver.StoreScanner; +import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; @@ -113,7 +114,7 @@ public class PartitionedMobCompactor extends MobCompactor { copyOfConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0f); compactionCacheConfig = new CacheConfig(copyOfConf); tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName()); - cryptoContext = MobUtils.createEncryptionContext(copyOfConf, column); + cryptoContext = EncryptionUtil.createEncryptionContext(copyOfConf, column); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java index 08e6753dd51..3daef7e37f4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.regionserver.KeyValueScanner; import org.apache.hadoop.hbase.regionserver.MemStore; import org.apache.hadoop.hbase.regionserver.MemStoreSnapshot; import org.apache.hadoop.hbase.regionserver.StoreFile; +import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Reducer.Context; @@ -90,7 +91,7 @@ public class MemStoreWrapper { flushSize = this.conf.getLong(MobConstants.MOB_SWEEP_TOOL_COMPACTION_MEMSTORE_FLUSH_SIZE, MobConstants.DEFAULT_MOB_SWEEP_TOOL_COMPACTION_MEMSTORE_FLUSH_SIZE); mobFamilyDir = MobUtils.getMobFamilyPath(conf, table.getName(), hcd.getNameAsString()); - cryptoContext = MobUtils.createEncryptionContext(conf, hcd); + cryptoContext = EncryptionUtil.createEncryptionContext(conf, hcd); } public void setPartitionId(CompactionPartitionId partitionId) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 50b3de7d46c..49b6c50b532 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.io.InterruptedIOException; import java.net.InetSocketAddress; -import java.security.Key; -import java.security.KeyException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collection; @@ -65,7 +63,6 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.conf.ConfigurationManager; import org.apache.hadoop.hbase.io.compress.Compression; -import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFile; @@ -82,9 +79,9 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; +import org.apache.hadoop.hbase.regionserver.compactions.CompactionThroughputController; import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor; import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours; -import org.apache.hadoop.hbase.regionserver.compactions.CompactionThroughputController; import org.apache.hadoop.hbase.regionserver.wal.WALUtil; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.User; @@ -280,62 +277,7 @@ public class HStore implements Store { conf.getInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_KEY, 10)); completionService = new ExecutorCompletionService(compactionCleanerthreadPoolExecutor); - // Crypto context for new store files - String cipherName = family.getEncryptionType(); - if (cipherName != null) { - Cipher cipher; - Key key; - byte[] keyBytes = family.getEncryptionKey(); - if (keyBytes != null) { - // Family provides specific key material - String masterKeyName = conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, - User.getCurrent().getShortName()); - try { - // First try the master key - key = EncryptionUtil.unwrapKey(conf, masterKeyName, keyBytes); - } catch (KeyException e) { - // If the current master key fails to unwrap, try the alternate, if - // one is configured - if (LOG.isDebugEnabled()) { - LOG.debug("Unable to unwrap key with current master key '" + masterKeyName + "'"); - } - String alternateKeyName = - conf.get(HConstants.CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY); - if (alternateKeyName != null) { - try { - key = EncryptionUtil.unwrapKey(conf, alternateKeyName, keyBytes); - } catch (KeyException ex) { - throw new IOException(ex); - } - } else { - throw new IOException(e); - } - } - // Use the algorithm the key wants - cipher = Encryption.getCipher(conf, key.getAlgorithm()); - if (cipher == null) { - throw new RuntimeException("Cipher '" + key.getAlgorithm() + "' is not available"); - } - // Fail if misconfigured - // We use the encryption type specified in the column schema as a sanity check on - // what the wrapped key is telling us - if (!cipher.getName().equalsIgnoreCase(cipherName)) { - throw new RuntimeException("Encryption for family '" + family.getNameAsString() + - "' configured with type '" + cipherName + - "' but key specifies algorithm '" + cipher.getName() + "'"); - } - } else { - // Family does not provide key material, create a random key - cipher = Encryption.getCipher(conf, cipherName); - if (cipher == null) { - throw new RuntimeException("Cipher '" + cipherName + "' is not available"); - } - key = cipher.getRandomKey(); - } - cryptoContext = Encryption.newContext(conf); - cryptoContext.setCipher(cipher); - cryptoContext.setKey(key); - } + cryptoContext = EncryptionUtil.createEncryptionContext(conf, family); } /**