HBASE-10062. Reconsider storing plaintext length in the encrypted block header

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1547373 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Kyle Purtell 2013-12-03 12:48:30 +00:00
parent e991ef297a
commit c525383e8d
3 changed files with 19 additions and 23 deletions

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.io.crypto.Decryptor;
import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.util.Bytes;
/** /**
* A default implementation of {@link HFileBlockDecodingContext}. It assumes the * A default implementation of {@link HFileBlockDecodingContext}. It assumes the
@ -64,28 +65,25 @@ public class HFileBlockDefaultDecodingContext implements
// Encrypted block format: // Encrypted block format:
// +--------------------------+ // +--------------------------+
// | vint plaintext length | // | byte iv length |
// +--------------------------+
// | vint iv length |
// +--------------------------+ // +--------------------------+
// | iv data ... | // | iv data ... |
// +--------------------------+ // +--------------------------+
// | encrypted block data ... | // | encrypted block data ... |
// +--------------------------+ // +--------------------------+
int plaintextLength = StreamUtils.readRawVarint32(in); int ivLength = in.read();
int ivLength = StreamUtils.readRawVarint32(in);
if (ivLength > 0) { if (ivLength > 0) {
byte[] iv = new byte[ivLength]; byte[] iv = new byte[ivLength];
IOUtils.readFully(in, iv); IOUtils.readFully(in, iv);
decryptor.setIv(iv); decryptor.setIv(iv);
} // All encrypted blocks will have a nonzero IV length. If we see an IV
if (plaintextLength == 0) { // length of zero, this means the encoding context had 0 bytes of
return; // plaintext to encode.
}
decryptor.reset(); decryptor.reset();
in = decryptor.createDecryptionStream(in); in = decryptor.createDecryptionStream(in);
onDiskSizeWithoutHeader = plaintextLength; }
onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength;
} }
Compression.Algorithm compression = fileContext.getCompression(); Compression.Algorithm compression = fileContext.getCompression();

View File

@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor;
import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Compressor;
@ -165,9 +164,7 @@ public class HFileBlockDefaultEncodingContext implements
// Encrypted block format: // Encrypted block format:
// +--------------------------+ // +--------------------------+
// | vint plaintext length | // | byte iv length |
// +--------------------------+
// | vint iv length |
// +--------------------------+ // +--------------------------+
// | iv data ... | // | iv data ... |
// +--------------------------+ // +--------------------------+
@ -199,29 +196,29 @@ public class HFileBlockDefaultEncodingContext implements
if (plaintextLength > 0) { if (plaintextLength > 0) {
// Set up the cipher
Cipher cipher = cryptoContext.getCipher(); Cipher cipher = cryptoContext.getCipher();
Encryptor encryptor = cipher.getEncryptor(); Encryptor encryptor = cipher.getEncryptor();
encryptor.setKey(cryptoContext.getKey()); encryptor.setKey(cryptoContext.getKey());
// Write the encryption header and IV (plaintext) // Set up the IV
int ivLength = iv.length; int ivLength = iv.length;
StreamUtils.writeRawVInt32(cryptoByteStream, plaintextLength); Preconditions.checkState(ivLength <= Byte.MAX_VALUE, "IV length out of range");
StreamUtils.writeRawVInt32(cryptoByteStream, ivLength); cryptoByteStream.write(ivLength);
if (ivLength > 0) { if (ivLength > 0) {
Encryption.incrementIv(iv); Encryption.incrementIv(iv);
encryptor.setIv(iv); encryptor.setIv(iv);
cryptoByteStream.write(iv); cryptoByteStream.write(iv);
} }
// Write the block contents (ciphertext) // Encrypt the data
Encryption.encrypt(cryptoByteStream, in, encryptor); Encryption.encrypt(cryptoByteStream, in, encryptor);
onDiskBytesWithHeader = cryptoByteStream.toByteArray(); onDiskBytesWithHeader = cryptoByteStream.toByteArray();
} else { } else {
StreamUtils.writeRawVInt32(cryptoByteStream, 0); cryptoByteStream.write(0);
StreamUtils.writeRawVInt32(cryptoByteStream, 0);
onDiskBytesWithHeader = cryptoByteStream.toByteArray(); onDiskBytesWithHeader = cryptoByteStream.toByteArray();
} }

View File

@ -151,8 +151,9 @@ public class TestHFileEncryption {
Configuration conf = TEST_UTIL.getConfiguration(); Configuration conf = TEST_UTIL.getConfiguration();
CacheConfig cacheConf = new CacheConfig(conf); CacheConfig cacheConf = new CacheConfig(conf);
HFileContext fileContext = new HFileContext(); HFileContext fileContext = new HFileContextBuilder()
fileContext.setEncryptionContext(cryptoContext); .withEncryptionContext(cryptoContext)
.build();
// write a simple encrypted hfile // write a simple encrypted hfile
Path path = new Path(TEST_UTIL.getDataTestDir(), "cryptometa.hfile"); Path path = new Path(TEST_UTIL.getDataTestDir(), "cryptometa.hfile");