HBASE-15754 Add testcase for AES encryption

This commit is contained in:
zhangduo 2016-05-03 17:59:52 +08:00
parent 66213c9f28
commit 6abe1879dd
2 changed files with 31 additions and 14 deletions

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.asyncfs;
import static io.netty.handler.timeout.IdleState.READER_IDLE;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
@ -107,9 +108,13 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
private static final String MECHANISM = "DIGEST-MD5";
private static final int SASL_TRANSFER_MAGIC_NUMBER = 0xDEADBEEF;
private static final String NAME_DELIMITER = " ";
private static final String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY =
@VisibleForTesting
static final String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY =
"dfs.encrypt.data.transfer.cipher.suites";
private static final String AES_CTR_NOPADDING = "AES/CTR/NoPadding";
@VisibleForTesting
static final String AES_CTR_NOPADDING = "AES/CTR/NoPadding";
private interface SaslAdaptor {
@ -184,11 +189,11 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
CREATE_DECRYPTOR = cryptoCodecClass.getMethod("createDecryptor");
Class<?> encryptorClass = Class.forName("org.apache.hadoop.crypto.Encryptor");
INIT_ENCRYPTOR = encryptorClass.getMethod("init");
INIT_ENCRYPTOR = encryptorClass.getMethod("init", byte[].class, byte[].class);
ENCRYPT = encryptorClass.getMethod("encrypt", ByteBuffer.class, ByteBuffer.class);
Class<?> decryptorClass = Class.forName("org.apache.hadoop.crypto.Decryptor");
INIT_DECRYPTOR = decryptorClass.getMethod("init");
INIT_DECRYPTOR = decryptorClass.getMethod("init", byte[].class, byte[].class);
DECRYPT = decryptorClass.getMethod("decrypt", ByteBuffer.class, ByteBuffer.class);
} catch (NoSuchMethodException | ClassNotFoundException e) {
throw new Error(e);
@ -879,7 +884,7 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
}
ByteBuffer inBuffer = inBuf.nioBuffer();
ByteBuf outBuf = ctx.alloc().directBuffer(inBuf.readableBytes());
ByteBuffer outBuffer = outBuf.nioBuffer();
ByteBuffer outBuffer = outBuf.nioBuffer(0, inBuf.readableBytes());
codec.decrypt(inBuffer, outBuffer);
outBuf.writerIndex(inBuf.readableBytes());
if (release) {
@ -920,7 +925,7 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper {
release = true;
}
ByteBuffer inBuffer = inBuf.nioBuffer();
ByteBuffer outBuffer = out.nioBuffer();
ByteBuffer outBuffer = out.nioBuffer(0, inBuf.readableBytes());
codec.encrypt(inBuffer, outBuffer);
out.writerIndex(inBuf.readableBytes());
if (release) {

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase.io.asyncfs;
import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.AES_CTR_NOPADDING;
import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATA_ENCRYPTION_ALGORITHM_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY;
@ -47,8 +49,6 @@ import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@ -95,12 +95,17 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput {
@Parameter(1)
public String encryptionAlgorithm;
@Parameters(name = "{index}: protection={0}, encryption={1}")
@Parameter(2)
public String cipherSuite;
@Parameters(name = "{index}: protection={0}, encryption={1}, cipherSuite={2}")
public static Iterable<Object[]> data() {
List<Object[]> params = new ArrayList<>();
for (String protection : Arrays.asList("authentication", "integrity", "privacy")) {
for (String encryptionAlgorithm : Arrays.asList("", "3des", "rc4")) {
params.add(new Object[] { protection, encryptionAlgorithm });
for (String cipherSuite : Arrays.asList("", AES_CTR_NOPADDING)) {
params.add(new Object[] { protection, encryptionAlgorithm, cipherSuite });
}
}
}
return params;
@ -129,8 +134,6 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Logger.getLogger("org.apache.hadoop.hdfs.StateChange").setLevel(Level.DEBUG);
Logger.getLogger("BlockStateChange").setLevel(Level.DEBUG);
EVENT_LOOP_GROUP = new NioEventLoopGroup();
TEST_UTIL.getConfiguration().setInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT_MS);
Properties conf = MiniKdc.createConf();
@ -161,13 +164,22 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput {
@Before
public void setUp() throws Exception {
TEST_UTIL.getConfiguration().set("dfs.data.transfer.protection", protection);
if (StringUtils.isBlank(encryptionAlgorithm)) {
if (StringUtils.isBlank(encryptionAlgorithm) && StringUtils.isBlank(cipherSuite)) {
TEST_UTIL.getConfiguration().setBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, false);
TEST_UTIL.getConfiguration().unset(DFS_DATA_ENCRYPTION_ALGORITHM_KEY);
} else {
TEST_UTIL.getConfiguration().setBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, true);
}
if (StringUtils.isBlank(encryptionAlgorithm)) {
TEST_UTIL.getConfiguration().unset(DFS_DATA_ENCRYPTION_ALGORITHM_KEY);
} else {
TEST_UTIL.getConfiguration().set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, encryptionAlgorithm);
}
if (StringUtils.isBlank(cipherSuite)) {
TEST_UTIL.getConfiguration().unset(DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY);
} else {
TEST_UTIL.getConfiguration().set(DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY, cipherSuite);
}
TEST_UTIL.startMiniDFSCluster(3);
FS = TEST_UTIL.getDFSCluster().getFileSystem();
}