From d6105b41dbb9a65be13987269b39ecd3da98f1a0 Mon Sep 17 00:00:00 2001 From: ramkrishna Date: Mon, 11 Nov 2013 17:08:04 +0000 Subject: [PATCH] HBASE-9816-Address review comments in HBASE-8496 (Ram) git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1540785 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop/hbase/protobuf/ProtobufUtil.java | 2 +- .../org/apache/hadoop/hbase/CellUtil.java | 38 ++++++++++++++++ .../org/apache/hadoop/hbase/KeyValue.java | 44 +++---------------- .../java/org/apache/hadoop/hbase/Tag.java | 22 +++++----- .../io/encoding/BufferedDataBlockEncoder.java | 22 +++++----- .../io/encoding/CopyKeyDataBlockEncoder.java | 2 +- .../io/encoding/DiffKeyDeltaEncoder.java | 3 +- .../hbase/io/encoding/EncodedDataBlock.java | 4 +- .../io/encoding/FastDiffDeltaEncoder.java | 3 +- .../io/encoding/PrefixKeyDeltaEncoder.java | 3 +- .../hadoop/hbase/io/hfile/HFileContext.java | 15 +++---- .../hbase/io/hfile/HFileContextBuilder.java | 8 ++-- .../org/apache/hadoop/hbase/TestKeyValue.java | 7 +-- .../codec/prefixtree/PrefixTreeCodec.java | 8 ++-- .../encode/other/ColumnNodeType.java | 10 ++--- .../hbase/io/hfile/AbstractHFileReader.java | 1 - .../hadoop/hbase/io/hfile/ChecksumUtil.java | 2 +- .../hadoop/hbase/io/hfile/HFileBlock.java | 34 +++++++------- .../hadoop/hbase/io/hfile/HFileReaderV2.java | 10 ++--- .../hadoop/hbase/io/hfile/HFileReaderV3.java | 20 ++++----- .../hadoop/hbase/io/hfile/HFileWriterV2.java | 4 +- .../hadoop/hbase/io/hfile/HFileWriterV3.java | 8 ++-- .../hbase/mapreduce/HFileOutputFormat.java | 2 +- .../mapreduce/LoadIncrementalHFiles.java | 3 +- .../hadoop/hbase/regionserver/HStore.java | 2 +- .../hadoop/hbase/util/CompressionTest.java | 2 +- .../io/encoding/TestDataBlockEncoders.java | 8 ++-- .../io/encoding/TestPrefixTreeEncoding.java | 8 ++-- .../hadoop/hbase/io/hfile/CacheTestUtils.java | 2 +- .../hbase/io/hfile/TestCacheOnWrite.java | 2 +- .../hadoop/hbase/io/hfile/TestChecksum.java | 8 ++-- .../hadoop/hbase/io/hfile/TestHFile.java | 6 +-- .../hadoop/hbase/io/hfile/TestHFileBlock.java | 20 ++++----- .../io/hfile/TestHFileBlockCompatibility.java | 6 +-- .../hbase/io/hfile/TestHFileBlockIndex.java | 6 +-- .../io/hfile/TestHFileDataBlockEncoder.java | 6 +-- .../hbase/io/hfile/TestHFilePerformance.java | 4 +- .../hadoop/hbase/io/hfile/TestHFileSeek.java | 2 +- .../hbase/io/hfile/TestHFileWriterV2.java | 4 +- .../hbase/io/hfile/TestHFileWriterV3.java | 4 +- .../mapreduce/TestLoadIncrementalHFiles.java | 2 +- .../regionserver/CreateRandomStoreFile.java | 2 +- .../regionserver/DataBlockEncodingTool.java | 2 +- .../TestHRegionServerBulkLoad.java | 2 +- 44 files changed, 185 insertions(+), 188 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index eeb090d32e2..c4cbebe0f9e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -511,7 +511,7 @@ public final class ProtobufUtil { byte[] tags; if (qv.hasTags()) { tags = qv.getTags().toByteArray(); - Object[] array = Tag.createTags(tags, 0, (short)tags.length).toArray(); + Object[] array = Tag.asList(tags, 0, (short)tags.length).toArray(); Tag[] tagArray = new Tag[array.length]; for(int i = 0; i< array.length; i++) { tagArray[i] = (Tag)array[i]; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 6d26510cd16..77fb8e9f6d9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -376,4 +376,42 @@ public final class CellUtil { // Serialization is probably preceded by a length (it is in the KeyValueCodec at least). Bytes.SIZEOF_INT; } + + + /********************* tags *************************************/ + /** + * Util method to iterate through the tags. Used in testcase + * + * @param tags + * @param offset + * @param length + * @return + */ + public static Iterator tagsIterator(final byte[] tags, final int offset, final short length) { + return new Iterator() { + private int pos = offset; + private int endOffset = offset + length - 1; + + @Override + public boolean hasNext() { + return this.pos < endOffset; + } + + @Override + public Tag next() { + if (hasNext()) { + short curTagLen = Bytes.toShort(tags, this.pos); + Tag tag = new Tag(tags, pos, (short) (curTagLen + Bytes.SIZEOF_SHORT)); + this.pos += Bytes.SIZEOF_SHORT + curTagLen; + return tag; + } + return null; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index b15021aafce..f2d777c60c6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -31,7 +31,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; @@ -80,6 +79,8 @@ import com.google.common.primitives.Longs; */ @InterfaceAudience.Private public class KeyValue implements Cell, HeapSize, Cloneable { + private static final ArrayList EMPTY_ARRAY_LIST = new ArrayList(); + static final Log LOG = LogFactory.getLog(KeyValue.class); /** @@ -254,38 +255,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable { } } - /** - * @return an iterator over the tags in this KeyValue. - */ - public Iterator tagsIterator() { - // Subtract -1 to point to the end of the complete tag byte[] - final int endOffset = this.offset + this.length - 1; - return new Iterator() { - private int pos = getTagsOffset(); - - @Override - public boolean hasNext() { - return this.pos < endOffset; - } - - @Override - public Tag next() { - if (hasNext()) { - short curTagLen = Bytes.toShort(bytes, this.pos); - Tag tag = new Tag(bytes, pos, (short) (curTagLen + Bytes.SIZEOF_SHORT)); - this.pos += Bytes.SIZEOF_SHORT + curTagLen; - return tag; - } - return null; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - /** * Lowest possible key. * Makes a Key with highest possible Timestamp, empty row and column. No @@ -1618,18 +1587,15 @@ public class KeyValue implements Cell, HeapSize, Cloneable { } /** - * This method may not be right. But we cannot use the CellUtil.getTagIterator because we don't know - * getKeyOffset and getKeyLength - * Cannnot use the getKeyOffset and getKeyLength in CellUtil as they are not part of the Cell interface. - * Returns any tags embedded in the KeyValue. + * Returns any tags embedded in the KeyValue. Used in testcases. * @return The tags */ public List getTags() { short tagsLength = getTagsLength(); if (tagsLength == 0) { - return new ArrayList(); + return EMPTY_ARRAY_LIST; } - return Tag.createTags(getBuffer(), getTagsOffset(), tagsLength); + return Tag.asList(getBuffer(), getTagsOffset(), tagsLength); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java index 380baa8f686..c5acbd24d42 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java @@ -25,11 +25,9 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; - /** - * <taglength><tagtype><tagbytes>. tagtype is - * one byte and taglength maximum is Short.MAX_SIZE. - * It includes 1 byte type length and actual tag bytes length. + * Tags are part of cells and helps to add metadata about the KVs. + * Metadata could be ACLs per cells, visibility labels, etc. */ @InterfaceAudience.Private @InterfaceStability.Evolving @@ -38,8 +36,8 @@ public class Tag { public final static int TAG_LENGTH_SIZE = Bytes.SIZEOF_SHORT; public final static int INFRASTRUCTURE_SIZE = TYPE_LENGTH_SIZE + TAG_LENGTH_SIZE; - private byte type; - private byte[] bytes; + private final byte type; + private final byte[] bytes; private int offset = 0; private short length = 0; @@ -56,7 +54,9 @@ public class Tag { * @param tag */ public Tag(byte tagType, byte[] tag) { - // + /** + * taglength maximum is Short.MAX_SIZE. It includes 1 byte type length and actual tag bytes length. + */ short tagLength = (short) ((tag.length & 0x0000ffff) + TYPE_LENGTH_SIZE); length = (short) (TAG_LENGTH_SIZE + tagLength); bytes = new byte[length]; @@ -119,14 +119,14 @@ public class Tag { /** * @return Length of actual tag bytes within the backed buffer */ - public int getTagLength() { + int getTagLength() { return this.length - INFRASTRUCTURE_SIZE; } /** * @return Offset of actual tag bytes within the backed buffer */ - public int getTagOffset() { + int getTagOffset() { return this.offset + INFRASTRUCTURE_SIZE; } @@ -145,7 +145,7 @@ public class Tag { * @param length * @return List of tags */ - public static List createTags(byte[] b, int offset, short length) { + public static List asList(byte[] b, int offset, short length) { List tags = new ArrayList(); int pos = offset; while (pos < offset + length) { @@ -169,4 +169,4 @@ public class Tag { int getOffset() { return this.offset; } -} \ No newline at end of file +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index 5578e0132fd..8ccab7fd87f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -52,7 +52,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { HFileBlockDefaultDecodingContext decodingCtx = (HFileBlockDefaultDecodingContext) blkDecodingCtx; - if (decodingCtx.getHFileContext().shouldCompressTags()) { + if (decodingCtx.getHFileContext().isCompressTags()) { try { TagCompressionContext tagCompressionContext = new TagCompressionContext(LRUDictionary.class); decodingCtx.setTagCompressionContext(tagCompressionContext); @@ -162,7 +162,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { this.samePrefixComparator = null; } this.decodingCtx = decodingCtx; - if (decodingCtx.getHFileContext().shouldCompressTags()) { + if (decodingCtx.getHFileContext().isCompressTags()) { try { tagCompressionContext = new TagCompressionContext(LRUDictionary.class); } catch (Exception e) { @@ -172,11 +172,11 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } protected boolean includesMvcc() { - return this.decodingCtx.getHFileContext().shouldIncludeMvcc(); + return this.decodingCtx.getHFileContext().isIncludesMvcc(); } protected boolean includesTags() { - return this.decodingCtx.getHFileContext().shouldIncludeTags(); + return this.decodingCtx.getHFileContext().isIncludesTags(); } @Override @@ -264,7 +264,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { return true; } - public void decodeTags() { + protected void decodeTags() { current.tagsLength = ByteBufferUtils.readCompressedInt(currentBuffer); if (tagCompressionContext != null) { // Tag compression is been used. uncompress it into tagsBuffer @@ -373,7 +373,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { protected final void afterEncodingKeyValue(ByteBuffer in, DataOutputStream out, HFileBlockDefaultEncodingContext encodingCtx) throws IOException { - if (encodingCtx.getHFileContext().shouldIncludeTags()) { + if (encodingCtx.getHFileContext().isIncludesTags()) { short tagsLength = in.getShort(); ByteBufferUtils.putCompressedInt(out, tagsLength); // There are some tags to be written @@ -388,7 +388,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } } } - if (encodingCtx.getHFileContext().shouldIncludeMvcc()) { + if (encodingCtx.getHFileContext().isIncludesMvcc()) { // Copy memstore timestamp from the byte buffer to the output stream. long memstoreTS = -1; try { @@ -403,7 +403,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { protected final void afterDecodingKeyValue(DataInputStream source, ByteBuffer dest, HFileBlockDefaultDecodingContext decodingCtx) throws IOException { - if (decodingCtx.getHFileContext().shouldIncludeTags()) { + if (decodingCtx.getHFileContext().isIncludesTags()) { short tagsLength = (short) ByteBufferUtils.readCompressedInt(source); dest.putShort(tagsLength); if (tagsLength > 0) { @@ -417,7 +417,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } } } - if (decodingCtx.getHFileContext().shouldIncludeMvcc()) { + if (decodingCtx.getHFileContext().isIncludesMvcc()) { long memstoreTS = -1; try { // Copy memstore timestamp from the data input stream to the byte @@ -452,7 +452,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { public abstract void internalEncodeKeyValues(DataOutputStream out, ByteBuffer in, HFileBlockDefaultEncodingContext encodingCtx) throws IOException; - public abstract ByteBuffer internalDecodeKeyValues(DataInputStream source, + protected abstract ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException; @@ -471,7 +471,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { DataOutputStream dataOut = ((HFileBlockDefaultEncodingContext) encodingCtx) .getOutputStreamForEncoder(); - if (encodingCtx.getHFileContext().shouldCompressTags()) { + if (encodingCtx.getHFileContext().isCompressTags()) { try { TagCompressionContext tagCompressionContext = new TagCompressionContext(LRUDictionary.class); encodingCtx.setTagCompressionContext(tagCompressionContext); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java index b660f423b01..1dc8413181e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java @@ -88,7 +88,7 @@ public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder { } @Override - public ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, + protected ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException { int decompressedSize = source.readInt(); ByteBuffer buffer = ByteBuffer.allocate(decompressedSize + diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java index 144501140c4..f72878bcb6a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java @@ -26,7 +26,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.RawComparator; /** * Compress using: @@ -534,7 +533,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, + protected ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException { int decompressedSize = source.readInt(); ByteBuffer buffer = ByteBuffer.allocate(decompressedSize + diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java index e75b32efc98..9e832368b5f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java @@ -113,13 +113,13 @@ public class EncodedDataBlock { short tagsLen = 0; ByteBufferUtils.skip(decompressedData, klen + vlen); // Read the tag length in case when steam contain tags - if (meta.shouldIncludeTags()) { + if (meta.isIncludesTags()) { tagsLen = decompressedData.getShort(); ByteBufferUtils.skip(decompressedData, tagsLen); } KeyValue kv = new KeyValue(decompressedData.array(), offset, (int) KeyValue.getKeyValueDataStructureSize(klen, vlen, tagsLen)); - if (meta.shouldIncludeMvcc()) { + if (meta.isIncludesMvcc()) { long mvccVersion = ByteBufferUtils.readVLong(decompressedData); kv.setMvccVersion(mvccVersion); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java index 559db7c0d92..0346b201bde 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.RawComparator; /** * Encoder similar to {@link DiffKeyDeltaEncoder} but supposedly faster. @@ -362,7 +361,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, + protected ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException { int decompressedSize = source.readInt(); ByteBuffer buffer = ByteBuffer.allocate(decompressedSize + diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java index e8a6c4957db..f57ff4fc7e2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java @@ -26,7 +26,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.RawComparator; /** * Compress key by storing size of common prefix with previous KeyValue @@ -92,7 +91,7 @@ public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, + protected ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength, int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException { int decompressedSize = source.readInt(); ByteBuffer buffer = ByteBuffer.allocate(decompressedSize + diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java index 7de89f86cf1..add5fe76436 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java @@ -27,9 +27,8 @@ import org.apache.hadoop.hbase.util.ClassSize; /** * This carries the information on some of the meta data about the HFile. This - * meta data would be used across the HFileWriter/Readers and the HFileBlocks. - * This would help to add new information to the HFile. - * This class is not meant to be immutable. + * meta data is used across the HFileWriter/Readers and the HFileBlocks. + * This helps to add new information to the HFile. */ @InterfaceAudience.Private public class HFileContext implements HeapSize, Cloneable { @@ -96,29 +95,27 @@ public class HFileContext implements HeapSize, Cloneable { return compressAlgo; } - public boolean shouldUseHBaseChecksum() { + public boolean isUseHBaseChecksum() { return usesHBaseChecksum; } - public boolean shouldIncludeMvcc() { + public boolean isIncludesMvcc() { return includesMvcc; } - // TODO : This setter should be removed public void setIncludesMvcc(boolean includesMvcc) { this.includesMvcc = includesMvcc; } - public boolean shouldIncludeTags() { + public boolean isIncludesTags() { return includesTags; } - // TODO : This setter should be removed? public void setIncludesTags(boolean includesTags) { this.includesTags = includesTags; } - public boolean shouldCompressTags() { + public boolean isCompressTags() { return compressTags; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java index dcd3e85a7e2..3a95080cbbd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContextBuilder.java @@ -38,7 +38,7 @@ public class HFileContextBuilder { /** Whether tags are to be included in the Read/Write **/ private boolean includesTags; /** Compression algorithm used **/ - private Algorithm compressAlgo = Algorithm.NONE; + private Algorithm compression = Algorithm.NONE; /** Whether tags to be compressed or not **/ private boolean compressTags; /** the checksum type **/ @@ -65,8 +65,8 @@ public class HFileContextBuilder { return this; } - public HFileContextBuilder withCompressionAlgo(Algorithm compressionAlgo) { - this.compressAlgo = compressionAlgo; + public HFileContextBuilder withCompression(Algorithm compression) { + this.compression = compression; return this; } @@ -101,7 +101,7 @@ public class HFileContextBuilder { } public HFileContext build() { - return new HFileContext(usesHBaseChecksum, includesMvcc, includesTags, compressAlgo, + return new HFileContext(usesHBaseChecksum, includesMvcc, includesTags, compression, compressTags, checksumType, bytesPerChecksum, blocksize, encodingOnDisk, encodingInCache); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index f0df4720e5a..f565cd3b7ec 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -560,8 +560,9 @@ public class TestKeyValue extends TestCase { } assertTrue(meta1Ok); assertTrue(meta2Ok); - - Iterator tagItr = kv.tagsIterator(); + Iterator tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(), + kv.getTagsLength()); + //Iterator tagItr = kv.tagsIterator(); assertTrue(tagItr.hasNext()); Tag next = tagItr.next(); assertEquals(10, next.getTagLength()); @@ -574,7 +575,7 @@ public class TestKeyValue extends TestCase { Bytes.equals(next.getValue(), metaValue2); assertFalse(tagItr.hasNext()); - tagItr = kv.tagsIterator(); + tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(), kv.getTagsLength()); assertTrue(tagItr.hasNext()); next = tagItr.next(); assertEquals(10, next.getTagLength()); diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java index b209529ec8b..ef576eb361c 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java @@ -79,8 +79,8 @@ public class PrefixTreeCodec implements DataBlockEncoder{ = (HFileBlockDefaultEncodingContext) blkEncodingCtx; encodingCtx.prepareEncoding(); DataOutputStream dataOut = encodingCtx.getOutputStreamForEncoder(); - internalEncodeKeyValues(dataOut, in, encodingCtx.getHFileContext().shouldIncludeMvcc(), - encodingCtx.getHFileContext().shouldIncludeTags()); + internalEncodeKeyValues(dataOut, in, encodingCtx.getHFileContext().isIncludesMvcc(), + encodingCtx.getHFileContext().isIncludesTags()); //do i need to check this, or will it always be DataBlockEncoding.PREFIX_TREE? if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) { @@ -130,7 +130,7 @@ public class PrefixTreeCodec implements DataBlockEncoder{ result.rewind(); CellSearcher searcher = null; try { - boolean includesMvcc = decodingCtx.getHFileContext().shouldIncludeMvcc(); + boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc(); searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvcc); while (searcher.advance()) { KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current()); @@ -199,7 +199,7 @@ public class PrefixTreeCodec implements DataBlockEncoder{ +"table"); } - return new PrefixTreeSeeker(decodingCtx.getHFileContext().shouldIncludeMvcc()); + return new PrefixTreeSeeker(decodingCtx.getHFileContext().isIncludesMvcc()); } } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/ColumnNodeType.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/ColumnNodeType.java index 8946cc10234..f05adff947b 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/ColumnNodeType.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/ColumnNodeType.java @@ -1,7 +1,3 @@ -package org.apache.hadoop.hbase.codec.prefixtree.encode.other; - -import org.apache.hadoop.classification.InterfaceAudience; - /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -19,10 +15,14 @@ import org.apache.hadoop.classification.InterfaceAudience; * See the License for the specific language governing permissions and * limitations under the License. */ +package org.apache.hadoop.hbase.codec.prefixtree.encode.other; + +import org.apache.hadoop.classification.InterfaceAudience; + /** * Specifies the type of columnnode writer. */ @InterfaceAudience.Private public enum ColumnNodeType { FAMILY, QUALIFIER, TAGS; -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java index 6c337bb532c..88ea7df0801 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; -import org.apache.hadoop.io.RawComparator; /** * Common functionality needed by all versions of {@link HFile} readers. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java index 8938362f7ac..328221326b0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java @@ -104,7 +104,7 @@ public class ChecksumUtil { // when the minorVersion is 0, thus this is a defensive check for a // cannot-happen case. Since this is a cannot-happen case, it is // better to return false to indicate a checksum validation failure. - if (!block.getHFileContext().shouldUseHBaseChecksum()) { + if (!block.getHFileContext().isUseHBaseChecksum()) { return false; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 2169bf7abdf..41e1de60556 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -398,7 +398,7 @@ public class HFileBlock implements Cacheable { "uncompressedSizeWithoutHeader"); sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset"); - if (this.fileContext.shouldUseHBaseChecksum()) { + if (this.fileContext.isUseHBaseChecksum()) { sanityCheckAssertion(buf.get(), this.fileContext.getChecksumType().getCode(), "checksumType"); sanityCheckAssertion(buf.getInt(), this.fileContext.getBytesPerChecksum(), "bytesPerChecksum"); sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader, @@ -1024,13 +1024,13 @@ public class HFileBlock implements Cacheable { .withBlockSize(fileContext.getBlocksize()) .withBytesPerCheckSum(0) .withChecksumType(ChecksumType.NULL) // no checksums in cached data - .withCompressionAlgo(fileContext.getCompression()) + .withCompression(fileContext.getCompression()) .withDataBlockEncodingInCache(fileContext.getEncodingInCache()) .withDataBlockEncodingOnDisk(fileContext.getEncodingOnDisk()) - .withHBaseCheckSum(fileContext.shouldUseHBaseChecksum()) - .withCompressTags(fileContext.shouldCompressTags()) - .withIncludesMvcc(fileContext.shouldIncludeMvcc()) - .withIncludesTags(fileContext.shouldIncludeTags()) + .withHBaseCheckSum(fileContext.isUseHBaseChecksum()) + .withCompressTags(fileContext.isCompressTags()) + .withIncludesMvcc(fileContext.isIncludesMvcc()) + .withIncludesTags(fileContext.isIncludesTags()) .build(); return new HFileBlock(blockType, getOnDiskSizeWithoutHeader(), getUncompressedSizeWithoutHeader(), prevOffset, getUncompressedBufferWithHeader(), @@ -1135,7 +1135,7 @@ public class HFileBlock implements Cacheable { this.hfs = hfs; this.path = path; this.fileContext = fileContext; - this.hdrSize = headerSize(fileContext.shouldUseHBaseChecksum()); + this.hdrSize = headerSize(fileContext.isUseHBaseChecksum()); } @Override @@ -1277,7 +1277,7 @@ public class HFileBlock implements Cacheable { super(fileSize, hfs, path, fileContext); this.streamWrapper = stream; // Older versions of HBase didn't support checksum. - this.streamWrapper.prepareForBlockReader(!fileContext.shouldUseHBaseChecksum()); + this.streamWrapper.prepareForBlockReader(!fileContext.isUseHBaseChecksum()); defaultDecodingCtx = new HFileBlockDefaultDecodingContext(fileContext); encodedBlockDecodingCtx = @@ -1449,7 +1449,7 @@ public class HFileBlock implements Cacheable { // from memory if using compression. Here we have already read the // block's header try { - b = new HFileBlock(headerBuf, this.fileContext.shouldUseHBaseChecksum()); + b = new HFileBlock(headerBuf, this.fileContext.isUseHBaseChecksum()); } catch (IOException ex) { // Seen in load testing. Provide comprehensive debug info. throw new IOException("Failed to read compressed block at " @@ -1487,7 +1487,7 @@ public class HFileBlock implements Cacheable { readAtOffset(is, headerBuf.array(), headerBuf.arrayOffset(), hdrSize, false, offset, pread); } - b = new HFileBlock(headerBuf, this.fileContext.shouldUseHBaseChecksum()); + b = new HFileBlock(headerBuf, this.fileContext.isUseHBaseChecksum()); onDiskBlock = new byte[b.getOnDiskSizeWithHeader() + hdrSize]; System.arraycopy(headerBuf.array(), headerBuf.arrayOffset(), onDiskBlock, 0, hdrSize); @@ -1534,7 +1534,7 @@ public class HFileBlock implements Cacheable { // contains the header of next block, so no need to set next // block's header in it. b = new HFileBlock(ByteBuffer.wrap(onDiskBlock, 0, - onDiskSizeWithHeader), this.fileContext.shouldUseHBaseChecksum()); + onDiskSizeWithHeader), this.fileContext.isUseHBaseChecksum()); } b.nextBlockOnDiskSizeWithHeader = nextBlockOnDiskSize; @@ -1547,8 +1547,8 @@ public class HFileBlock implements Cacheable { } b.offset = offset; - b.fileContext.setIncludesTags(this.fileContext.shouldIncludeTags()); - b.fileContext.setIncludesMvcc(this.fileContext.shouldIncludeMvcc()); + b.fileContext.setIncludesTags(this.fileContext.isIncludesTags()); + b.fileContext.setIncludesMvcc(this.fileContext.isIncludesMvcc()); return b; } @@ -1596,7 +1596,7 @@ public class HFileBlock implements Cacheable { } public void serializeExtraInfo(ByteBuffer destination) { - destination.put(this.fileContext.shouldUseHBaseChecksum() ? (byte) 1 : (byte) 0); + destination.put(this.fileContext.isUseHBaseChecksum() ? (byte) 1 : (byte) 0); destination.putLong(this.offset); destination.putInt(this.nextBlockOnDiskSizeWithHeader); destination.rewind(); @@ -1679,7 +1679,7 @@ public class HFileBlock implements Cacheable { // data to validate. Similarly, a zero value in this.bytesPerChecksum // indicates that cached blocks do not have checksum data because // checksums were already validated when the block was read from disk. - if (!fileContext.shouldUseHBaseChecksum() || this.fileContext.getBytesPerChecksum() == 0) { + if (!fileContext.isUseHBaseChecksum() || this.fileContext.getBytesPerChecksum() == 0) { return 0; } return (int)ChecksumUtil.numBytes(onDiskDataSizeWithHeader, this.fileContext.getBytesPerChecksum()); @@ -1689,7 +1689,7 @@ public class HFileBlock implements Cacheable { * Returns the size of this block header. */ public int headerSize() { - return headerSize(this.fileContext.shouldUseHBaseChecksum()); + return headerSize(this.fileContext.isUseHBaseChecksum()); } /** @@ -1706,7 +1706,7 @@ public class HFileBlock implements Cacheable { * Return the appropriate DUMMY_HEADER for the minor version */ public byte[] getDummyHeaderForVersion() { - return getDummyHeaderForVersion(this.fileContext.shouldUseHBaseChecksum()); + return getDummyHeaderForVersion(this.fileContext.isUseHBaseChecksum()); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java index 949a2176fce..b1bdcd6f657 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java @@ -182,7 +182,7 @@ public class HFileReaderV2 extends AbstractHFileReader { protected HFileContext createHFileContext(FixedFileTrailer trailer) { HFileContext hFileContext = new HFileContextBuilder() .withIncludesMvcc(this.includesMemstoreTS) - .withCompressionAlgo(this.compressAlgo) + .withCompression(this.compressAlgo) .withHBaseCheckSum(trailer.getMinorVersion() >= MINOR_VERSION_WITH_CHECKSUM) .build(); return hFileContext; @@ -651,14 +651,14 @@ public class HFileReaderV2 extends AbstractHFileReader { return null; KeyValue ret = new KeyValue(blockBuffer.array(), blockBuffer.arrayOffset() - + blockBuffer.position(), getKvBufSize(), currKeyLen); + + blockBuffer.position(), getCellBufSize(), currKeyLen); if (this.reader.shouldIncludeMemstoreTS()) { ret.setMvccVersion(currMemstoreTS); } return ret; } - protected int getKvBufSize() { + protected int getCellBufSize() { return KEY_VALUE_LEN_SIZE + currKeyLen + currValueLen; } @@ -707,7 +707,7 @@ public class HFileReaderV2 extends AbstractHFileReader { assertSeeked(); try { - blockBuffer.position(getNextKVStartPosition()); + blockBuffer.position(getNextCellStartPosition()); } catch (IllegalArgumentException e) { LOG.error("Current pos = " + blockBuffer.position() + "; currKeyLen = " + currKeyLen + "; currValLen = " @@ -742,7 +742,7 @@ public class HFileReaderV2 extends AbstractHFileReader { return true; } - protected int getNextKVStartPosition() { + protected int getNextCellStartPosition() { return blockBuffer.position() + KEY_VALUE_LEN_SIZE + currKeyLen + currValueLen + currMemstoreTSLen; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java index ca63a578bf7..5ed68d2ce7a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java @@ -76,7 +76,7 @@ public class HFileReaderV3 extends HFileReaderV2 { HFileContext hfileContext = new HFileContextBuilder() .withIncludesMvcc(this.includesMemstoreTS) .withHBaseCheckSum(true) - .withCompressionAlgo(this.compressAlgo) + .withCompression(this.compressAlgo) .build(); return hfileContext; } @@ -120,9 +120,9 @@ public class HFileReaderV3 extends HFileReaderV2 { } @Override - protected int getKvBufSize() { - int kvBufSize = super.getKvBufSize(); - if (reader.hfileContext.shouldIncludeTags()) { + protected int getCellBufSize() { + int kvBufSize = super.getCellBufSize(); + if (reader.hfileContext.isIncludesTags()) { kvBufSize += Bytes.SIZEOF_SHORT + currTagsLen; } return kvBufSize; @@ -134,9 +134,9 @@ public class HFileReaderV3 extends HFileReaderV2 { } @Override - protected int getNextKVStartPosition() { - int nextKvPos = super.getNextKVStartPosition(); - if (reader.hfileContext.shouldIncludeTags()) { + protected int getNextCellStartPosition() { + int nextKvPos = super.getNextCellStartPosition(); + if (reader.hfileContext.isIncludesTags()) { nextKvPos += Bytes.SIZEOF_SHORT + currTagsLen; } return nextKvPos; @@ -147,7 +147,7 @@ public class HFileReaderV3 extends HFileReaderV2 { currKeyLen = blockBuffer.getInt(); currValueLen = blockBuffer.getInt(); ByteBufferUtils.skip(blockBuffer, currKeyLen + currValueLen); - if (reader.hfileContext.shouldIncludeTags()) { + if (reader.hfileContext.isIncludesTags()) { currTagsLen = blockBuffer.getShort(); ByteBufferUtils.skip(blockBuffer, currTagsLen); } @@ -191,7 +191,7 @@ public class HFileReaderV3 extends HFileReaderV2 { klen = blockBuffer.getInt(); vlen = blockBuffer.getInt(); ByteBufferUtils.skip(blockBuffer, klen + vlen); - if (reader.hfileContext.shouldIncludeTags()) { + if (reader.hfileContext.isIncludesTags()) { tlen = blockBuffer.getShort(); ByteBufferUtils.skip(blockBuffer, tlen); } @@ -247,7 +247,7 @@ public class HFileReaderV3 extends HFileReaderV2 { // The size of this key/value tuple, including key/value length fields. lastKeyValueSize = klen + vlen + memstoreTSLen + KEY_VALUE_LEN_SIZE; // include tag length also if tags included with KV - if (reader.hfileContext.shouldIncludeTags()) { + if (reader.hfileContext.isIncludesTags()) { lastKeyValueSize += tlen + Bytes.SIZEOF_SHORT; } blockBuffer.position(blockBuffer.position() + lastKeyValueSize); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java index 5a37db31e4b..67d69ca0b3f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java @@ -310,7 +310,7 @@ public class HFileWriterV2 extends AbstractHFileWriter { totalValueLength += vlength; out.write(key, koffset, klength); out.write(value, voffset, vlength); - if (this.hFileContext.shouldIncludeMvcc()) { + if (this.hFileContext.isIncludesMvcc()) { WritableUtils.writeVLong(out, memstoreTS); } } @@ -379,7 +379,7 @@ public class HFileWriterV2 extends AbstractHFileWriter { fsBlockWriter.writeHeaderAndData(outputStream); totalUncompressedBytes += fsBlockWriter.getUncompressedSizeWithHeader(); - if (this.hFileContext.shouldIncludeMvcc()) { + if (this.hFileContext.isIncludesMvcc()) { appendFileInfo(MAX_MEMSTORE_TS_KEY, Bytes.toBytes(maxMemstoreTS)); appendFileInfo(KEY_VALUE_VERSION, Bytes.toBytes(KEY_VALUE_VER_WITH_MEMSTORE)); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java index f5177f3d1f6..28845d1604b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java @@ -146,7 +146,7 @@ public class HFileWriterV3 extends HFileWriterV2 { out.write(key, koffset, klength); out.write(value, voffset, vlength); // Write the additional tag into the stream - if (hFileContext.shouldIncludeTags()) { + if (hFileContext.isIncludesTags()) { out.writeShort((short) tagsLength); if (tagsLength > 0) { out.write(tag, tagsOffset, tagsLength); @@ -155,7 +155,7 @@ public class HFileWriterV3 extends HFileWriterV2 { } } } - if (this.hFileContext.shouldIncludeMvcc()) { + if (this.hFileContext.isIncludesMvcc()) { WritableUtils.writeVLong(out, memstoreTS); } } @@ -175,12 +175,12 @@ public class HFileWriterV3 extends HFileWriterV2 { protected void finishFileInfo() throws IOException { super.finishFileInfo(); - if (hFileContext.shouldIncludeTags()) { + if (hFileContext.isIncludesTags()) { // When tags are not being written in this file, MAX_TAGS_LEN is excluded // from the FileInfo fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false); boolean tagsCompressed = (hFileContext.getEncodingOnDisk() != DataBlockEncoding.NONE) - && hFileContext.shouldCompressTags(); + && hFileContext.isCompressTags(); fileInfo.append(FileInfo.TAGS_COMPRESSED, Bytes.toBytes(tagsCompressed), false); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java index 1875ee434b9..ce5e191acde 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java @@ -193,7 +193,7 @@ public class HFileOutputFormat extends FileOutputFormat