diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java index 6f9501647c1..ac9612c58ad 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableUtils; @@ -59,7 +60,11 @@ public class HFileReaderV3 extends HFileReaderV2 { final long size, final CacheConfig cacheConf, DataBlockEncoding preferredEncodingInCache, final HFileSystem hfs) throws IOException { super(path, trailer, fsdis, size, cacheConf, preferredEncodingInCache, hfs); - + byte[] tmp = fileInfo.get(FileInfo.MAX_TAGS_LEN); + // max tag length is not present in the HFile means tags were not at all written to file. + if (tmp != null) { + hfileContext.setIncludesTags(true); + } } @Override @@ -68,7 +73,6 @@ public class HFileReaderV3 extends HFileReaderV2 { .withIncludesMvcc(this.includesMemstoreTS) .withHBaseCheckSum(true) .withCompressionAlgo(this.compressAlgo) - .withIncludesTags(true) .build(); return hfileContext; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java index aa336143b93..5a37db31e4b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java @@ -93,6 +93,7 @@ public class HFileWriterV2 extends AbstractHFileWriter { public Writer createWriter(FileSystem fs, Path path, FSDataOutputStream ostream, KVComparator comparator, HFileContext context) throws IOException { + context.setIncludesTags(false);// HFile V2 does not deal with tags at all! return new HFileWriterV2(conf, cacheConf, fs, path, ostream, comparator, context); } @@ -113,7 +114,7 @@ public class HFileWriterV2 extends AbstractHFileWriter { if (fsBlockWriter != null) throw new IllegalStateException("finishInit called twice"); - fsBlockWriter = createBlockWriter(); + fsBlockWriter = new HFileBlock.Writer(blockEncoder, hFileContext); // Data block index writer boolean cacheIndexesOnWrite = cacheConf.shouldCacheIndexesOnWrite(); @@ -129,11 +130,6 @@ public class HFileWriterV2 extends AbstractHFileWriter { if (LOG.isTraceEnabled()) LOG.trace("Initialized with " + cacheConf); } - protected HFileBlock.Writer createBlockWriter() { - // HFile filesystem-level (non-caching) block writer - hFileContext.setIncludesTags(false); - return new HFileBlock.Writer(blockEncoder, hFileContext); - } /** * At a block boundary, write all the inline blocks and opens new block. * diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java index 3cfd6e0ea92..8759044488f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java @@ -39,7 +39,6 @@ import org.apache.hadoop.io.WritableUtils; @InterfaceAudience.Private public class HFileWriterV3 extends HFileWriterV2 { - // TODO : Use this to track maxtaglength private int maxTagsLength = 0; static class WriterFactoryV3 extends HFile.WriterFactory { @@ -182,13 +181,6 @@ public class HFileWriterV3 extends HFileWriterV2 { } } - @Override - protected HFileBlock.Writer createBlockWriter() { - // HFile filesystem-level (non-caching) block writer - hFileContext.setIncludesTags(true); - return new HFileBlock.Writer(blockEncoder, hFileContext); - } - @Override protected int getMajorVersion() { return 3; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index c5b0d24b60d..a08c5deb82e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -310,7 +310,8 @@ public class TestCacheOnWrite { HFileContext meta = new HFileContextBuilder().withCompressionAlgo(compress) .withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL) .withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncodingInCache(encoder.getEncodingInCache()) - .withDataBlockEncodingOnDisk(encoder.getEncodingOnDisk()).build(); + .withDataBlockEncodingOnDisk(encoder.getEncodingOnDisk()) + .withIncludesTags(useTags).build(); StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf, cacheConf, fs) .withOutputDir(storeFileParentDir).withComparator(KeyValue.COMPARATOR) .withFileContext(meta) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java index a5b375066a2..172ea7960ef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java @@ -117,6 +117,7 @@ public class TestHFileWriterV3 { Algorithm compressAlgo, int entryCount, boolean findMidKey, boolean useTags) throws IOException { HFileContext context = new HFileContextBuilder() .withBlockSize(4096) + .withIncludesTags(useTags) .withCompressionAlgo(compressAlgo).build(); HFileWriterV3 writer = (HFileWriterV3) new HFileWriterV3.WriterFactoryV3(conf, new CacheConfig(conf)) @@ -235,9 +236,11 @@ public class TestHFileWriterV3 { byte[] value = new byte[valueLen]; buf.get(value); byte[] tagValue = null; - int tagLen = buf.getShort(); - tagValue = new byte[tagLen]; - buf.get(tagValue); + if (useTags) { + int tagLen = buf.getShort(); + tagValue = new byte[tagLen]; + buf.get(tagValue); + } if (includeMemstoreTS) { ByteArrayInputStream byte_input = new ByteArrayInputStream(buf.array(), buf.arrayOffset() diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index 3c7af4171d7..e8f3f20dddd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -76,7 +76,8 @@ public class TestSeekTo extends HBaseTestCase { } FSDataOutputStream fout = this.fs.create(ncTFile); int blocksize = toKV("a", tagUsage).getLength() * 3; - HFileContext context = new HFileContextBuilder().withBlockSize(blocksize).build(); + HFileContext context = new HFileContextBuilder().withBlockSize(blocksize) + .withIncludesTags(true).build(); HFile.Writer writer = HFile.getWriterFactoryNoCache(conf).withOutputStream(fout) .withFileContext(context) // NOTE: This test is dependent on this deprecated nonstandard