HBASE-10062. Reconsider storing plaintext length in the encrypted block header

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1547373 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Kyle Purtell 2013-12-03 12:48:30 +00:00
parent e991ef297a
commit c525383e8d
3 changed files with 19 additions and 23 deletions

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.io.crypto.Decryptor;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.util.Bytes;
/**
* A default implementation of {@link HFileBlockDecodingContext}. It assumes the
@ -64,28 +65,25 @@ public class HFileBlockDefaultDecodingContext implements
// Encrypted block format:
// +--------------------------+
// | vint plaintext length |
// +--------------------------+
// | vint iv length |
// | byte iv length |
// +--------------------------+
// | iv data ... |
// +--------------------------+
// | encrypted block data ... |
// +--------------------------+
int plaintextLength = StreamUtils.readRawVarint32(in);
int ivLength = StreamUtils.readRawVarint32(in);
int ivLength = in.read();
if (ivLength > 0) {
byte[] iv = new byte[ivLength];
IOUtils.readFully(in, iv);
decryptor.setIv(iv);
// All encrypted blocks will have a nonzero IV length. If we see an IV
// length of zero, this means the encoding context had 0 bytes of
// plaintext to encode.
decryptor.reset();
in = decryptor.createDecryptionStream(in);
}
if (plaintextLength == 0) {
return;
}
decryptor.reset();
in = decryptor.createDecryptionStream(in);
onDiskSizeWithoutHeader = plaintextLength;
onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength;
}
Compression.Algorithm compression = fileContext.getCompression();

View File

@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.crypto.Encryptor;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.Compressor;
@ -165,9 +164,7 @@ public class HFileBlockDefaultEncodingContext implements
// Encrypted block format:
// +--------------------------+
// | vint plaintext length |
// +--------------------------+
// | vint iv length |
// | byte iv length |
// +--------------------------+
// | iv data ... |
// +--------------------------+
@ -199,29 +196,29 @@ public class HFileBlockDefaultEncodingContext implements
if (plaintextLength > 0) {
// Set up the cipher
Cipher cipher = cryptoContext.getCipher();
Encryptor encryptor = cipher.getEncryptor();
encryptor.setKey(cryptoContext.getKey());
// Write the encryption header and IV (plaintext)
// Set up the IV
int ivLength = iv.length;
StreamUtils.writeRawVInt32(cryptoByteStream, plaintextLength);
StreamUtils.writeRawVInt32(cryptoByteStream, ivLength);
Preconditions.checkState(ivLength <= Byte.MAX_VALUE, "IV length out of range");
cryptoByteStream.write(ivLength);
if (ivLength > 0) {
Encryption.incrementIv(iv);
encryptor.setIv(iv);
cryptoByteStream.write(iv);
}
// Write the block contents (ciphertext)
// Encrypt the data
Encryption.encrypt(cryptoByteStream, in, encryptor);
onDiskBytesWithHeader = cryptoByteStream.toByteArray();
} else {
StreamUtils.writeRawVInt32(cryptoByteStream, 0);
StreamUtils.writeRawVInt32(cryptoByteStream, 0);
cryptoByteStream.write(0);
onDiskBytesWithHeader = cryptoByteStream.toByteArray();
}

View File

@ -151,8 +151,9 @@ public class TestHFileEncryption {
Configuration conf = TEST_UTIL.getConfiguration();
CacheConfig cacheConf = new CacheConfig(conf);
HFileContext fileContext = new HFileContext();
fileContext.setEncryptionContext(cryptoContext);
HFileContext fileContext = new HFileContextBuilder()
.withEncryptionContext(cryptoContext)
.build();
// write a simple encrypted hfile
Path path = new Path(TEST_UTIL.getDataTestDir(), "cryptometa.hfile");