HBASE-9766 HFileV3 - Optional tags write and read is not working as expected

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1532678 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
anoopsamjohn 2013-10-16 08:07:34 +00:00
parent 743e2fbed9
commit 4a4778c2dc
6 changed files with 18 additions and 21 deletions

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.WritableUtils;
@ -59,7 +60,11 @@ public class HFileReaderV3 extends HFileReaderV2 {
final long size, final CacheConfig cacheConf, DataBlockEncoding preferredEncodingInCache,
final HFileSystem hfs) throws IOException {
super(path, trailer, fsdis, size, cacheConf, preferredEncodingInCache, hfs);
byte[] tmp = fileInfo.get(FileInfo.MAX_TAGS_LEN);
// max tag length is not present in the HFile means tags were not at all written to file.
if (tmp != null) {
hfileContext.setIncludesTags(true);
}
}
@Override
@ -68,7 +73,6 @@ public class HFileReaderV3 extends HFileReaderV2 {
.withIncludesMvcc(this.includesMemstoreTS)
.withHBaseCheckSum(true)
.withCompressionAlgo(this.compressAlgo)
.withIncludesTags(true)
.build();
return hfileContext;
}

View File

@ -93,6 +93,7 @@ public class HFileWriterV2 extends AbstractHFileWriter {
public Writer createWriter(FileSystem fs, Path path,
FSDataOutputStream ostream,
KVComparator comparator, HFileContext context) throws IOException {
context.setIncludesTags(false);// HFile V2 does not deal with tags at all!
return new HFileWriterV2(conf, cacheConf, fs, path, ostream,
comparator, context);
}
@ -113,7 +114,7 @@ public class HFileWriterV2 extends AbstractHFileWriter {
if (fsBlockWriter != null)
throw new IllegalStateException("finishInit called twice");
fsBlockWriter = createBlockWriter();
fsBlockWriter = new HFileBlock.Writer(blockEncoder, hFileContext);
// Data block index writer
boolean cacheIndexesOnWrite = cacheConf.shouldCacheIndexesOnWrite();
@ -129,11 +130,6 @@ public class HFileWriterV2 extends AbstractHFileWriter {
if (LOG.isTraceEnabled()) LOG.trace("Initialized with " + cacheConf);
}
protected HFileBlock.Writer createBlockWriter() {
// HFile filesystem-level (non-caching) block writer
hFileContext.setIncludesTags(false);
return new HFileBlock.Writer(blockEncoder, hFileContext);
}
/**
* At a block boundary, write all the inline blocks and opens new block.
*

View File

@ -39,7 +39,6 @@ import org.apache.hadoop.io.WritableUtils;
@InterfaceAudience.Private
public class HFileWriterV3 extends HFileWriterV2 {
// TODO : Use this to track maxtaglength
private int maxTagsLength = 0;
static class WriterFactoryV3 extends HFile.WriterFactory {
@ -182,13 +181,6 @@ public class HFileWriterV3 extends HFileWriterV2 {
}
}
@Override
protected HFileBlock.Writer createBlockWriter() {
// HFile filesystem-level (non-caching) block writer
hFileContext.setIncludesTags(true);
return new HFileBlock.Writer(blockEncoder, hFileContext);
}
@Override
protected int getMajorVersion() {
return 3;

View File

@ -310,7 +310,8 @@ public class TestCacheOnWrite {
HFileContext meta = new HFileContextBuilder().withCompressionAlgo(compress)
.withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL)
.withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncodingInCache(encoder.getEncodingInCache())
.withDataBlockEncodingOnDisk(encoder.getEncodingOnDisk()).build();
.withDataBlockEncodingOnDisk(encoder.getEncodingOnDisk())
.withIncludesTags(useTags).build();
StoreFile.Writer sfw = new StoreFile.WriterBuilder(conf, cacheConf, fs)
.withOutputDir(storeFileParentDir).withComparator(KeyValue.COMPARATOR)
.withFileContext(meta)

View File

@ -117,6 +117,7 @@ public class TestHFileWriterV3 {
Algorithm compressAlgo, int entryCount, boolean findMidKey, boolean useTags) throws IOException {
HFileContext context = new HFileContextBuilder()
.withBlockSize(4096)
.withIncludesTags(useTags)
.withCompressionAlgo(compressAlgo).build();
HFileWriterV3 writer = (HFileWriterV3)
new HFileWriterV3.WriterFactoryV3(conf, new CacheConfig(conf))
@ -235,9 +236,11 @@ public class TestHFileWriterV3 {
byte[] value = new byte[valueLen];
buf.get(value);
byte[] tagValue = null;
if (useTags) {
int tagLen = buf.getShort();
tagValue = new byte[tagLen];
buf.get(tagValue);
}
if (includeMemstoreTS) {
ByteArrayInputStream byte_input = new ByteArrayInputStream(buf.array(), buf.arrayOffset()

View File

@ -76,7 +76,8 @@ public class TestSeekTo extends HBaseTestCase {
}
FSDataOutputStream fout = this.fs.create(ncTFile);
int blocksize = toKV("a", tagUsage).getLength() * 3;
HFileContext context = new HFileContextBuilder().withBlockSize(blocksize).build();
HFileContext context = new HFileContextBuilder().withBlockSize(blocksize)
.withIncludesTags(true).build();
HFile.Writer writer = HFile.getWriterFactoryNoCache(conf).withOutputStream(fout)
.withFileContext(context)
// NOTE: This test is dependent on this deprecated nonstandard