HBASE-12606 Sanity check encryption configuration before opening WAL or onlining regions
Also disables IntegrationTestIngestWithEncryption if distributed cluster configuration is missing prerequisites.
This commit is contained in:
parent
ca0c1774c9
commit
bb15fd5fe0
|
@ -31,16 +31,18 @@ import org.apache.hadoop.hbase.wal.WALProvider.Writer;
|
|||
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
|
||||
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
|
||||
import org.apache.hadoop.hbase.util.EncryptionTest;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(IntegrationTests.class)
|
||||
public class IntegrationTestIngestWithEncryption extends IntegrationTestIngest {
|
||||
|
||||
boolean initialized = false;
|
||||
|
||||
static {
|
||||
// These log level changes are only useful when running on a localhost
|
||||
// cluster.
|
||||
|
@ -54,11 +56,9 @@ public class IntegrationTestIngestWithEncryption extends IntegrationTestIngest {
|
|||
public void setUpCluster() throws Exception {
|
||||
util = getTestingUtil(null);
|
||||
Configuration conf = util.getConfiguration();
|
||||
conf.setInt(HFile.FORMAT_VERSION_KEY, 3);
|
||||
if (!util.isDistributedCluster()) {
|
||||
// Inject the test key provider and WAL alternative if running on a
|
||||
// localhost cluster; otherwise, whether or not the schema change below
|
||||
// takes effect depends on the distributed cluster site configuration.
|
||||
// Inject required configuration if we are not running in distributed mode
|
||||
conf.setInt(HFile.FORMAT_VERSION_KEY, 3);
|
||||
conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
|
||||
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
|
||||
conf.setClass("hbase.regionserver.hlog.reader.impl", SecureProtobufLogReader.class,
|
||||
|
@ -67,7 +67,15 @@ public class IntegrationTestIngestWithEncryption extends IntegrationTestIngest {
|
|||
Writer.class);
|
||||
conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
|
||||
}
|
||||
// Check if the cluster configuration can support this test
|
||||
try {
|
||||
EncryptionTest.testEncryption(conf, "AES", null);
|
||||
} catch (Exception e) {
|
||||
LOG.warn("Encryption configuration test did not pass, skipping test");
|
||||
return;
|
||||
}
|
||||
super.setUpCluster();
|
||||
initialized = true;
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -77,6 +85,10 @@ public class IntegrationTestIngestWithEncryption extends IntegrationTestIngest {
|
|||
// will create the test table, appropriately pre-split
|
||||
super.setUp();
|
||||
|
||||
if (!initialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Update the test table schema so HFiles from this point will be written with
|
||||
// encryption features enabled.
|
||||
final Admin admin = util.getHBaseAdmin();
|
||||
|
@ -98,6 +110,22 @@ public class IntegrationTestIngestWithEncryption extends IntegrationTestIngest {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int runTestFromCommandLine() throws Exception {
|
||||
if (!initialized) {
|
||||
return 0;
|
||||
}
|
||||
return super.runTestFromCommandLine();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cleanUp() throws Exception {
|
||||
if (!initialized) {
|
||||
return;
|
||||
}
|
||||
super.cleanUp();
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
Configuration conf = HBaseConfiguration.create();
|
||||
IntegrationTestingUtility.setUseDistributedCluster(conf);
|
||||
|
|
|
@ -112,6 +112,7 @@ import org.apache.hadoop.hbase.security.UserProvider;
|
|||
import org.apache.hadoop.hbase.util.Addressing;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.CompressionTest;
|
||||
import org.apache.hadoop.hbase.util.EncryptionTest;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
|
@ -222,6 +223,9 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
|
|||
//should we check the compression codec type at master side, default true, HBASE-6370
|
||||
private final boolean masterCheckCompression;
|
||||
|
||||
//should we check encryption settings at master side, default true
|
||||
private final boolean masterCheckEncryption;
|
||||
|
||||
Map<String, Service> coprocessorServiceHandlers = Maps.newHashMap();
|
||||
|
||||
// monitor for snapshot of hbase tables
|
||||
|
@ -290,6 +294,9 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
|
|||
// should we check the compression codec type at master side, default true, HBASE-6370
|
||||
this.masterCheckCompression = conf.getBoolean("hbase.master.check.compression", true);
|
||||
|
||||
// should we check encryption settings at master side, default true
|
||||
this.masterCheckEncryption = conf.getBoolean("hbase.master.check.encryption", true);
|
||||
|
||||
this.metricsMaster = new MetricsMaster( new MetricsMasterWrapperImpl(this));
|
||||
|
||||
// preload table descriptor at startup
|
||||
|
@ -1225,7 +1232,18 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
|
|||
}
|
||||
|
||||
// check compression can be loaded
|
||||
try {
|
||||
checkCompression(htd);
|
||||
} catch (IOException e) {
|
||||
throw new DoNotRetryIOException(e.getMessage(), e);
|
||||
}
|
||||
|
||||
// check encryption can be loaded
|
||||
try {
|
||||
checkEncryption(conf, htd);
|
||||
} catch (IOException e) {
|
||||
throw new DoNotRetryIOException(e.getMessage(), e);
|
||||
}
|
||||
|
||||
// check that we have at least 1 CF
|
||||
if (htd.getColumnFamilies().length == 0) {
|
||||
|
@ -1347,6 +1365,20 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
|
|||
CompressionTest.testCompression(hcd.getCompactionCompression());
|
||||
}
|
||||
|
||||
private void checkEncryption(final Configuration conf, final HTableDescriptor htd)
|
||||
throws IOException {
|
||||
if (!this.masterCheckEncryption) return;
|
||||
for (HColumnDescriptor hcd : htd.getColumnFamilies()) {
|
||||
checkEncryption(conf, hcd);
|
||||
}
|
||||
}
|
||||
|
||||
private void checkEncryption(final Configuration conf, final HColumnDescriptor hcd)
|
||||
throws IOException {
|
||||
if (!this.masterCheckEncryption) return;
|
||||
EncryptionTest.testEncryption(conf, hcd.getEncryptionType(), hcd.getEncryptionKey());
|
||||
}
|
||||
|
||||
private HRegionInfo[] getHRegionInfos(HTableDescriptor hTableDescriptor,
|
||||
byte[][] splitKeys) {
|
||||
long regionId = System.currentTimeMillis();
|
||||
|
@ -1407,6 +1439,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
|
|||
throws IOException {
|
||||
checkInitialized();
|
||||
checkCompression(columnDescriptor);
|
||||
checkEncryption(conf, columnDescriptor);
|
||||
if (cpHost != null) {
|
||||
if (cpHost.preAddColumn(tableName, columnDescriptor)) {
|
||||
return;
|
||||
|
@ -1424,6 +1457,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
|
|||
throws IOException {
|
||||
checkInitialized();
|
||||
checkCompression(descriptor);
|
||||
checkEncryption(conf, descriptor);
|
||||
if (cpHost != null) {
|
||||
if (cpHost.preModifyColumn(tableName, descriptor)) {
|
||||
return;
|
||||
|
|
|
@ -147,6 +147,7 @@ import org.apache.hadoop.hbase.util.CancelableProgressable;
|
|||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
import org.apache.hadoop.hbase.util.CompressionTest;
|
||||
import org.apache.hadoop.hbase.util.Counter;
|
||||
import org.apache.hadoop.hbase.util.EncryptionTest;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
|
@ -4853,7 +4854,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver { //
|
|||
protected HRegion openHRegion(final CancelableProgressable reporter)
|
||||
throws IOException {
|
||||
checkCompressionCodecs();
|
||||
|
||||
checkEncryption();
|
||||
this.openSeqNum = initialize(reporter);
|
||||
this.setSequenceId(openSeqNum);
|
||||
if (wal != null && getRegionServerServices() != null) {
|
||||
|
@ -4869,6 +4870,12 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver { //
|
|||
}
|
||||
}
|
||||
|
||||
private void checkEncryption() throws IOException {
|
||||
for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
|
||||
EncryptionTest.testEncryption(conf, fam.getEncryptionType(), fam.getEncryptionKey());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a daughter region from given a temp directory with the region data.
|
||||
* @param hri Spec. for daughter region to open.
|
||||
|
|
|
@ -34,9 +34,9 @@ import org.apache.hadoop.hbase.io.crypto.Cipher;
|
|||
import org.apache.hadoop.hbase.io.crypto.Decryptor;
|
||||
import org.apache.hadoop.hbase.io.crypto.Encryption;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader;
|
||||
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.WALHdrResult;
|
||||
import org.apache.hadoop.hbase.security.EncryptionUtil;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.util.EncryptionTest;
|
||||
|
||||
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
|
||||
public class SecureProtobufLogReader extends ProtobufLogReader {
|
||||
|
@ -67,6 +67,9 @@ public class SecureProtobufLogReader extends ProtobufLogReader {
|
|||
// Serialized header data has been merged into the builder from the
|
||||
// stream.
|
||||
|
||||
EncryptionTest.testKeyProvider(conf);
|
||||
EncryptionTest.testCipherProvider(conf);
|
||||
|
||||
// Retrieve a usable key
|
||||
|
||||
byte[] keyBytes = builder.getEncryptionKey().toByteArray();
|
||||
|
|
|
@ -26,6 +26,7 @@ import javax.crypto.spec.SecretKeySpec;
|
|||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||
import org.apache.hadoop.hbase.util.EncryptionTest;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
@ -51,6 +52,9 @@ public class SecureProtobufLogWriter extends ProtobufLogWriter {
|
|||
throws IOException {
|
||||
builder.setWriterClsName(SecureProtobufLogWriter.class.getSimpleName());
|
||||
if (conf.getBoolean(HConstants.ENABLE_WAL_ENCRYPTION, false)) {
|
||||
EncryptionTest.testKeyProvider(conf);
|
||||
EncryptionTest.testCipherProvider(conf);
|
||||
|
||||
// Get an instance of our cipher
|
||||
final String cipherName = conf.get(HConstants.CRYPTO_WAL_ALGORITHM_CONF_KEY, DEFAULT_CIPHER);
|
||||
Cipher cipher = Encryption.getCipher(conf, cipherName);
|
||||
|
|
|
@ -0,0 +1,158 @@
|
|||
/**
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.util;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
|
||||
import org.apache.hadoop.hbase.io.crypto.Encryption;
|
||||
import org.apache.hadoop.hbase.io.crypto.KeyStoreKeyProvider;
|
||||
import org.apache.hadoop.hbase.security.EncryptionUtil;
|
||||
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class EncryptionTest {
|
||||
static final Log LOG = LogFactory.getLog(EncryptionTest.class);
|
||||
|
||||
static final Map<String, Boolean> keyProviderResults = new ConcurrentHashMap<String, Boolean>();
|
||||
static final Map<String, Boolean> cipherProviderResults =
|
||||
new ConcurrentHashMap<String, Boolean>();
|
||||
static final Map<String, Boolean> cipherResults = new ConcurrentHashMap<String, Boolean>();
|
||||
|
||||
private EncryptionTest() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the configured key provider can be loaded and initialized, or
|
||||
* throw an exception.
|
||||
*
|
||||
* @param conf
|
||||
* @throws IOException
|
||||
*/
|
||||
public static void testKeyProvider(final Configuration conf) throws IOException {
|
||||
String providerClassName = conf.get(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY,
|
||||
KeyStoreKeyProvider.class.getName());
|
||||
Boolean result = keyProviderResults.get(providerClassName);
|
||||
if (result == null) {
|
||||
try {
|
||||
Encryption.getKeyProvider(conf);
|
||||
keyProviderResults.put(providerClassName, true);
|
||||
} catch (Exception e) { // most likely a RuntimeException
|
||||
keyProviderResults.put(providerClassName, false);
|
||||
throw new IOException("Key provider " + providerClassName + " failed test: " +
|
||||
e.getMessage(), e);
|
||||
}
|
||||
} else if (result.booleanValue() == false) {
|
||||
throw new IOException("Key provider " + providerClassName + " previously failed test");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the configured cipher provider can be loaded and initialized, or
|
||||
* throw an exception.
|
||||
*
|
||||
* @param conf
|
||||
* @throws IOException
|
||||
*/
|
||||
public static void testCipherProvider(final Configuration conf) throws IOException {
|
||||
String providerClassName = conf.get(HConstants.CRYPTO_CIPHERPROVIDER_CONF_KEY,
|
||||
DefaultCipherProvider.class.getName());
|
||||
Boolean result = cipherProviderResults.get(providerClassName);
|
||||
if (result == null) {
|
||||
try {
|
||||
Encryption.getCipherProvider(conf);
|
||||
cipherProviderResults.put(providerClassName, true);
|
||||
} catch (Exception e) { // most likely a RuntimeException
|
||||
cipherProviderResults.put(providerClassName, false);
|
||||
throw new IOException("Cipher provider " + providerClassName + " failed test: " +
|
||||
e.getMessage(), e);
|
||||
}
|
||||
} else if (result.booleanValue() == false) {
|
||||
throw new IOException("Cipher provider " + providerClassName + " previously failed test");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the specified cipher can be loaded and initialized, or throw
|
||||
* an exception. Verifies key and cipher provider configuration as a
|
||||
* prerequisite for cipher verification.
|
||||
*
|
||||
* @param conf
|
||||
* @param cipher
|
||||
* @param key
|
||||
* @throws IOException
|
||||
*/
|
||||
public static void testEncryption(final Configuration conf, final String cipher,
|
||||
byte[] key) throws IOException {
|
||||
if (cipher == null) {
|
||||
return;
|
||||
}
|
||||
testKeyProvider(conf);
|
||||
testCipherProvider(conf);
|
||||
Boolean result = cipherResults.get(cipher);
|
||||
if (result == null) {
|
||||
try {
|
||||
Encryption.Context context = Encryption.newContext(conf);
|
||||
context.setCipher(Encryption.getCipher(conf, cipher));
|
||||
if (key == null) {
|
||||
// Make a random key since one was not provided
|
||||
context.setKey(context.getCipher().getRandomKey());
|
||||
} else {
|
||||
// This will be a wrapped key from schema
|
||||
context.setKey(EncryptionUtil.unwrapKey(conf,
|
||||
conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"),
|
||||
key));
|
||||
}
|
||||
byte[] iv = null;
|
||||
if (context.getCipher().getIvLength() > 0) {
|
||||
iv = new byte[context.getCipher().getIvLength()];
|
||||
Bytes.random(iv);
|
||||
}
|
||||
byte[] plaintext = new byte[1024];
|
||||
Bytes.random(plaintext);
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
Encryption.encrypt(out, new ByteArrayInputStream(plaintext), context, iv);
|
||||
byte[] ciphertext = out.toByteArray();
|
||||
out.reset();
|
||||
Encryption.decrypt(out, new ByteArrayInputStream(ciphertext), plaintext.length,
|
||||
context, iv);
|
||||
byte[] test = out.toByteArray();
|
||||
if (!Bytes.equals(plaintext, test)) {
|
||||
throw new IOException("Did not pass encrypt/decrypt test");
|
||||
}
|
||||
cipherResults.put(cipher, true);
|
||||
} catch (Exception e) {
|
||||
cipherResults.put(cipher, false);
|
||||
throw new IOException("Cipher " + cipher + " failed test: " + e.getMessage(), e);
|
||||
}
|
||||
} else if (result.booleanValue() == false) {
|
||||
throw new IOException("Cipher " + cipher + " previously failed test");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
/*
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.util;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.security.Key;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.io.crypto.Cipher;
|
||||
import org.apache.hadoop.hbase.io.crypto.CipherProvider;
|
||||
import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
|
||||
import org.apache.hadoop.hbase.io.crypto.KeyProvider;
|
||||
import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
public class TestEncryptionTest {
|
||||
|
||||
@Test
|
||||
public void testTestKeyProvider() {
|
||||
Configuration conf = HBaseConfiguration.create();
|
||||
try {
|
||||
conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
|
||||
EncryptionTest.testKeyProvider(conf);
|
||||
} catch (Exception e) {
|
||||
fail("Instantiation of test key provider should have passed");
|
||||
}
|
||||
try {
|
||||
conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, FailingKeyProvider.class.getName());
|
||||
EncryptionTest.testKeyProvider(conf);
|
||||
fail("Instantiation of bad test key provider should have failed check");
|
||||
} catch (Exception e) { }
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTestCipherProvider() {
|
||||
Configuration conf = HBaseConfiguration.create();
|
||||
try {
|
||||
conf.set(HConstants.CRYPTO_CIPHERPROVIDER_CONF_KEY, DefaultCipherProvider.class.getName());
|
||||
EncryptionTest.testCipherProvider(conf);
|
||||
} catch (Exception e) {
|
||||
fail("Instantiation of test cipher provider should have passed");
|
||||
}
|
||||
try {
|
||||
conf.set(HConstants.CRYPTO_CIPHERPROVIDER_CONF_KEY, FailingCipherProvider.class.getName());
|
||||
EncryptionTest.testCipherProvider(conf);
|
||||
fail("Instantiation of bad test cipher provider should have failed check");
|
||||
} catch (Exception e) { }
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTestCipher() {
|
||||
Configuration conf = HBaseConfiguration.create();
|
||||
conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
|
||||
try {
|
||||
EncryptionTest.testEncryption(conf, "AES", null);
|
||||
} catch (Exception e) {
|
||||
fail("Test for cipher AES should have succeeded");
|
||||
}
|
||||
try {
|
||||
EncryptionTest.testEncryption(conf, "foobar", null);
|
||||
fail("Test for bogus cipher should have failed");
|
||||
} catch (Exception e) { }
|
||||
}
|
||||
|
||||
public static class FailingKeyProvider implements KeyProvider {
|
||||
|
||||
@Override
|
||||
public void init(String params) {
|
||||
throw new RuntimeException("BAD!");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Key getKey(String alias) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Key[] getKeys(String[] aliases) {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class FailingCipherProvider implements CipherProvider {
|
||||
|
||||
public FailingCipherProvider() {
|
||||
super();
|
||||
throw new RuntimeException("BAD!");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Configuration getConf() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setConf(Configuration conf) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getSupportedCiphers() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Cipher getCipher(String name) {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue