diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java index 3858011c18e..ba9021a9f10 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.util.StreamUtils; +import org.apache.hadoop.hbase.util.Bytes; /** * A default implementation of {@link HFileBlockDecodingContext}. It assumes the @@ -64,28 +65,25 @@ public class HFileBlockDefaultDecodingContext implements // Encrypted block format: // +--------------------------+ - // | vint plaintext length | - // +--------------------------+ - // | vint iv length | + // | byte iv length | // +--------------------------+ // | iv data ... | // +--------------------------+ // | encrypted block data ... | // +--------------------------+ - int plaintextLength = StreamUtils.readRawVarint32(in); - int ivLength = StreamUtils.readRawVarint32(in); + int ivLength = in.read(); if (ivLength > 0) { byte[] iv = new byte[ivLength]; IOUtils.readFully(in, iv); decryptor.setIv(iv); + // All encrypted blocks will have a nonzero IV length. If we see an IV + // length of zero, this means the encoding context had 0 bytes of + // plaintext to encode. + decryptor.reset(); + in = decryptor.createDecryptionStream(in); } - if (plaintextLength == 0) { - return; - } - decryptor.reset(); - in = decryptor.createDecryptionStream(in); - onDiskSizeWithoutHeader = plaintextLength; + onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength; } Compression.Algorithm compression = fileContext.getCompression(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java index 7a72ba6d938..8386377bb14 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java @@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileContext; -import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; @@ -165,9 +164,7 @@ public class HFileBlockDefaultEncodingContext implements // Encrypted block format: // +--------------------------+ - // | vint plaintext length | - // +--------------------------+ - // | vint iv length | + // | byte iv length | // +--------------------------+ // | iv data ... | // +--------------------------+ @@ -199,29 +196,29 @@ public class HFileBlockDefaultEncodingContext implements if (plaintextLength > 0) { + // Set up the cipher Cipher cipher = cryptoContext.getCipher(); Encryptor encryptor = cipher.getEncryptor(); encryptor.setKey(cryptoContext.getKey()); - // Write the encryption header and IV (plaintext) + // Set up the IV int ivLength = iv.length; - StreamUtils.writeRawVInt32(cryptoByteStream, plaintextLength); - StreamUtils.writeRawVInt32(cryptoByteStream, ivLength); + Preconditions.checkState(ivLength <= Byte.MAX_VALUE, "IV length out of range"); + cryptoByteStream.write(ivLength); if (ivLength > 0) { Encryption.incrementIv(iv); encryptor.setIv(iv); cryptoByteStream.write(iv); } - // Write the block contents (ciphertext) + // Encrypt the data Encryption.encrypt(cryptoByteStream, in, encryptor); onDiskBytesWithHeader = cryptoByteStream.toByteArray(); } else { - StreamUtils.writeRawVInt32(cryptoByteStream, 0); - StreamUtils.writeRawVInt32(cryptoByteStream, 0); + cryptoByteStream.write(0); onDiskBytesWithHeader = cryptoByteStream.toByteArray(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java index e229adc42d2..3556b79303b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java @@ -151,8 +151,9 @@ public class TestHFileEncryption { Configuration conf = TEST_UTIL.getConfiguration(); CacheConfig cacheConf = new CacheConfig(conf); - HFileContext fileContext = new HFileContext(); - fileContext.setEncryptionContext(cryptoContext); + HFileContext fileContext = new HFileContextBuilder() + .withEncryptionContext(cryptoContext) + .build(); // write a simple encrypted hfile Path path = new Path(TEST_UTIL.getDataTestDir(), "cryptometa.hfile");