HBASE-16530 Reduce DBE code duplication

Signed-off-by: Yu Li <liyu@apache.org>
This commit is contained in:
binlijin 2016-09-08 13:44:41 +08:00 committed by Yu Li
parent e65817ef15
commit fc224ed0ed
8 changed files with 222 additions and 175 deletions

View File

@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.apache.hadoop.hbase.io.encoding;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
@InterfaceAudience.Private
public abstract class AbstractDataBlockEncoder implements DataBlockEncoder {
@Override
public HFileBlockEncodingContext newDataBlockEncodingContext(
DataBlockEncoding encoding, byte[] header, HFileContext meta) {
return new HFileBlockDefaultEncodingContext(encoding, header, meta);
}
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
return new HFileBlockDefaultDecodingContext(meta);
}
protected void postEncoding(HFileBlockEncodingContext encodingCtx)
throws IOException {
if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
encodingCtx.postEncoding(BlockType.ENCODED_DATA);
} else {
encodingCtx.postEncoding(BlockType.DATA);
}
}
protected Cell createFirstKeyCell(ByteBuffer key, int keyLength) {
if (key.hasArray()) {
return new KeyValue.KeyOnlyKeyValue(key.array(), key.arrayOffset()
+ key.position(), keyLength);
} else {
return new ByteBufferedKeyOnlyKeyValue(key, key.position(), keyLength);
}
}
protected abstract static class AbstractEncodedSeeker implements
EncodedSeeker {
protected HFileBlockDecodingContext decodingCtx;
protected final CellComparator comparator;
public AbstractEncodedSeeker(CellComparator comparator,
HFileBlockDecodingContext decodingCtx) {
this.comparator = comparator;
this.decodingCtx = decodingCtx;
}
protected boolean includesMvcc() {
return this.decodingCtx.getHFileContext().isIncludesMvcc();
}
protected boolean includesTags() {
return this.decodingCtx.getHFileContext().isIncludesTags();
}
}
}

View File

@ -23,7 +23,6 @@ import java.io.OutputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.ByteBufferedCell; import org.apache.hadoop.hbase.ByteBufferedCell;
import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
@ -36,8 +35,6 @@ import org.apache.hadoop.hbase.Streamable;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.TagCompressionContext;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.util.LRUDictionary; import org.apache.hadoop.hbase.io.util.LRUDictionary;
import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.ByteBuff;
@ -51,7 +48,7 @@ import org.apache.hadoop.io.WritableUtils;
* Base class for all data block encoders that use a buffer. * Base class for all data block encoders that use a buffer.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
abstract class BufferedDataBlockEncoder implements DataBlockEncoder { abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
/** /**
* TODO: This datablockencoder is dealing in internals of hfileblocks. Purge reference to HFBs * TODO: This datablockencoder is dealing in internals of hfileblocks. Purge reference to HFBs
*/ */
@ -682,11 +679,8 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
} }
} }
protected abstract static class protected abstract static class BufferedEncodedSeeker<STATE extends SeekerState>
BufferedEncodedSeeker<STATE extends SeekerState> extends AbstractEncodedSeeker {
implements EncodedSeeker {
protected HFileBlockDecodingContext decodingCtx;
protected final CellComparator comparator;
protected ByteBuff currentBuffer; protected ByteBuff currentBuffer;
protected TagCompressionContext tagCompressionContext = null; protected TagCompressionContext tagCompressionContext = null;
protected KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue(); protected KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();
@ -697,8 +691,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
public BufferedEncodedSeeker(CellComparator comparator, public BufferedEncodedSeeker(CellComparator comparator,
HFileBlockDecodingContext decodingCtx) { HFileBlockDecodingContext decodingCtx) {
this.comparator = comparator; super(comparator, decodingCtx);
this.decodingCtx = decodingCtx;
if (decodingCtx.getHFileContext().isCompressTags()) { if (decodingCtx.getHFileContext().isCompressTags()) {
try { try {
tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE); tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
@ -710,14 +703,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
previous = createSeekerState(); // may not be valid previous = createSeekerState(); // may not be valid
} }
protected boolean includesMvcc() {
return this.decodingCtx.getHFileContext().isIncludesMvcc();
}
protected boolean includesTags() {
return this.decodingCtx.getHFileContext().isIncludesTags();
}
@Override @Override
public int compareKey(CellComparator comparator, Cell key) { public int compareKey(CellComparator comparator, Cell key) {
keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength); keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength);
@ -1049,17 +1034,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
} }
} }
@Override
public HFileBlockEncodingContext newDataBlockEncodingContext(DataBlockEncoding encoding,
byte[] header, HFileContext meta) {
return new HFileBlockDefaultEncodingContext(encoding, header, meta);
}
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
return new HFileBlockDefaultDecodingContext(meta);
}
protected abstract ByteBuffer internalDecodeKeyValues(DataInputStream source, protected abstract ByteBuffer internalDecodeKeyValues(DataInputStream source,
int allocateHeaderLength, int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) int allocateHeaderLength, int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx)
throws IOException; throws IOException;
@ -1139,19 +1113,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
Bytes.putInt(uncompressedBytesWithHeader, Bytes.putInt(uncompressedBytesWithHeader,
HConstants.HFILEBLOCK_HEADER_SIZE + DataBlockEncoding.ID_SIZE, state.unencodedDataSizeWritten HConstants.HFILEBLOCK_HEADER_SIZE + DataBlockEncoding.ID_SIZE, state.unencodedDataSizeWritten
); );
if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) { postEncoding(encodingCtx);
encodingCtx.postEncoding(BlockType.ENCODED_DATA);
} else {
encodingCtx.postEncoding(BlockType.DATA);
}
} }
protected Cell createFirstKeyCell(ByteBuffer key, int keyLength) {
if (key.hasArray()) {
return new KeyValue.KeyOnlyKeyValue(key.array(), key.arrayOffset() + key.position(),
keyLength);
} else {
return new ByteBufferedKeyOnlyKeyValue(key, key.position(), keyLength);
}
}
} }

View File

@ -23,14 +23,10 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.WritableUtils;
/** /**
* Just copy data, do not do any kind of compression. Use for comparison and * Just copy data, do not do any kind of compression. Use for comparison and
@ -39,31 +35,36 @@ import org.apache.hadoop.io.WritableUtils;
@InterfaceAudience.Private @InterfaceAudience.Private
public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder { public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder {
@Override private static class CopyKeyEncodingState extends EncodingState {
public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext, NoneEncoder encoder = null;
DataOutputStream out) throws IOException { }
int klength = KeyValueUtil.keyLength(cell);
int vlength = cell.getValueLength();
out.writeInt(klength); @Override
out.writeInt(vlength); public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx,
CellUtil.writeFlatKey(cell, out); DataOutputStream out) throws IOException {
CellUtil.writeValue(out, cell, vlength); if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
int size = klength + vlength + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE; throw new IOException(this.getClass().getName() + " only accepts "
// Write the additional tag into the stream + HFileBlockDefaultEncodingContext.class.getName() + " as the "
if (encodingContext.getHFileContext().isIncludesTags()) { + "encoding context.");
int tagsLength = cell.getTagsLength();
out.writeShort(tagsLength);
if (tagsLength > 0) {
CellUtil.writeTags(out, cell, tagsLength);
}
size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
} }
if (encodingContext.getHFileContext().isIncludesMvcc()) {
WritableUtils.writeVLong(out, cell.getSequenceId()); HFileBlockDefaultEncodingContext encodingCtx = (HFileBlockDefaultEncodingContext) blkEncodingCtx;
size += WritableUtils.getVIntSize(cell.getSequenceId()); encodingCtx.prepareEncoding(out);
}
return size; NoneEncoder encoder = new NoneEncoder(out, encodingCtx);
CopyKeyEncodingState state = new CopyKeyEncodingState();
state.encoder = encoder;
blkEncodingCtx.setEncodingState(state);
}
@Override
public int internalEncode(Cell cell,
HFileBlockDefaultEncodingContext encodingContext, DataOutputStream out)
throws IOException {
CopyKeyEncodingState state = (CopyKeyEncodingState) encodingContext
.getEncodingState();
NoneEncoder encoder = state.encoder;
return encoder.write(cell);
} }
@Override @Override

View File

@ -0,0 +1,68 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io.encoding;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.io.WritableUtils;
@InterfaceAudience.Private
public class NoneEncoder {
private DataOutputStream out;
private HFileBlockDefaultEncodingContext encodingCtx;
public NoneEncoder(DataOutputStream out,
HFileBlockDefaultEncodingContext encodingCtx) {
this.out = out;
this.encodingCtx = encodingCtx;
}
public int write(Cell cell) throws IOException {
int klength = KeyValueUtil.keyLength(cell);
int vlength = cell.getValueLength();
out.writeInt(klength);
out.writeInt(vlength);
CellUtil.writeFlatKey(cell, out);
CellUtil.writeValue(out, cell, vlength);
int size = klength + vlength + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
// Write the additional tag into the stream
if (encodingCtx.getHFileContext().isIncludesTags()) {
int tagsLength = cell.getTagsLength();
out.writeShort(tagsLength);
if (tagsLength > 0) {
CellUtil.writeTags(out, cell, tagsLength);
}
size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
}
if (encodingCtx.getHFileContext().isIncludesMvcc()) {
WritableUtils.writeVLong(out, cell.getSequenceId());
size += WritableUtils.getVIntSize(cell.getSequenceId());
}
return size;
}
}

View File

@ -23,15 +23,12 @@ import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.nio.SingleByteBuff;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
@ -51,7 +48,7 @@ import org.apache.hadoop.io.WritableUtils;
* *
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class RowIndexCodecV1 implements DataBlockEncoder { public class RowIndexCodecV1 extends AbstractDataBlockEncoder {
private static class RowIndexEncodingState extends EncodingState { private static class RowIndexEncodingState extends EncodingState {
RowIndexEncoderV1 encoder = null; RowIndexEncoderV1 encoder = null;
@ -92,11 +89,7 @@ public class RowIndexCodecV1 implements DataBlockEncoder {
.getEncodingState(); .getEncodingState();
RowIndexEncoderV1 encoder = state.encoder; RowIndexEncoderV1 encoder = state.encoder;
encoder.flush(); encoder.flush();
if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) { postEncoding(encodingCtx);
encodingCtx.postEncoding(BlockType.ENCODED_DATA);
} else {
encodingCtx.postEncoding(BlockType.DATA);
}
} }
@Override @Override
@ -138,17 +131,6 @@ public class RowIndexCodecV1 implements DataBlockEncoder {
} }
} }
@Override
public HFileBlockEncodingContext newDataBlockEncodingContext(
DataBlockEncoding encoding, byte[] header, HFileContext meta) {
return new HFileBlockDefaultEncodingContext(encoding, header, meta);
}
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
return new HFileBlockDefaultDecodingContext(meta);
}
@Override @Override
public Cell getFirstKeyCellInBlock(ByteBuff block) { public Cell getFirstKeyCellInBlock(ByteBuff block) {
block.mark(); block.mark();
@ -165,13 +147,4 @@ public class RowIndexCodecV1 implements DataBlockEncoder {
return new RowIndexSeekerV1(comparator, decodingCtx); return new RowIndexSeekerV1(comparator, decodingCtx);
} }
protected Cell createFirstKeyCell(ByteBuffer key, int keyLength) {
if (key.hasArray()) {
return new KeyValue.KeyOnlyKeyValue(key.array(), key.arrayOffset()
+ key.position(), keyLength);
} else {
return new ByteBufferedKeyOnlyKeyValue(key, key.position(), keyLength);
}
}
} }

View File

@ -17,12 +17,8 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.io.WritableUtils;
@InterfaceAudience.Private @InterfaceAudience.Private
public class RowIndexEncoderV1 { public class RowIndexEncoderV1 {
@ -32,14 +28,15 @@ public class RowIndexEncoderV1 {
private Cell lastCell = null; private Cell lastCell = null;
private DataOutputStream out; private DataOutputStream out;
private HFileBlockDefaultEncodingContext encodingCtx; private NoneEncoder encoder;
private int startOffset = -1; private int startOffset = -1;
private ByteArrayOutputStream rowsOffsetBAOS = new ByteArrayOutputStream( private ByteArrayOutputStream rowsOffsetBAOS = new ByteArrayOutputStream(
64 * 4); 64 * 4);
public RowIndexEncoderV1(DataOutputStream out, HFileBlockDefaultEncodingContext encodingCtx) { public RowIndexEncoderV1(DataOutputStream out,
HFileBlockDefaultEncodingContext encodingCtx) {
this.out = out; this.out = out;
this.encodingCtx = encodingCtx; this.encoder = new NoneEncoder(out, encodingCtx);
} }
public int write(Cell cell) throws IOException { public int write(Cell cell) throws IOException {
@ -50,30 +47,8 @@ public class RowIndexEncoderV1 {
} }
rowsOffsetBAOS.writeInt(out.size() - startOffset); rowsOffsetBAOS.writeInt(out.size() - startOffset);
} }
int klength = KeyValueUtil.keyLength(cell);
int vlength = cell.getValueLength();
out.writeInt(klength);
out.writeInt(vlength);
CellUtil.writeFlatKey(cell, out);
// Write the value part
CellUtil.writeValue(out, cell, vlength);
int encodedKvSize = klength + vlength
+ KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
// Write the additional tag into the stream
if (encodingCtx.getHFileContext().isIncludesTags()) {
int tagsLength = cell.getTagsLength();
out.writeShort(tagsLength);
if (tagsLength > 0) {
CellUtil.writeTags(out, cell, tagsLength);
}
encodedKvSize += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
}
if (encodingCtx.getHFileContext().isIncludesMvcc()) {
WritableUtils.writeVLong(out, cell.getSequenceId());
encodedKvSize += WritableUtils.getVIntSize(cell.getSequenceId());
}
lastCell = cell; lastCell = cell;
return encodedKvSize; return encoder.write(cell);
} }
protected boolean checkRow(final Cell cell) throws IOException { protected boolean checkRow(final Cell cell) throws IOException {

View File

@ -29,17 +29,14 @@ import org.apache.hadoop.hbase.OffheapKeyValue;
import org.apache.hadoop.hbase.SizeCachedKeyValue; import org.apache.hadoop.hbase.SizeCachedKeyValue;
import org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue; import org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker; import org.apache.hadoop.hbase.io.encoding.AbstractDataBlockEncoder.AbstractEncodedSeeker;
import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ObjectIntPair; import org.apache.hadoop.hbase.util.ObjectIntPair;
@InterfaceAudience.Private @InterfaceAudience.Private
public class RowIndexSeekerV1 implements EncodedSeeker { public class RowIndexSeekerV1 extends AbstractEncodedSeeker {
private HFileBlockDecodingContext decodingCtx;
private final CellComparator comparator;
// A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too
// many object creations. // many object creations.
@ -54,8 +51,7 @@ public class RowIndexSeekerV1 implements EncodedSeeker {
public RowIndexSeekerV1(CellComparator comparator, public RowIndexSeekerV1(CellComparator comparator,
HFileBlockDecodingContext decodingCtx) { HFileBlockDecodingContext decodingCtx) {
this.comparator = comparator; super(comparator, decodingCtx);
this.decodingCtx = decodingCtx;
} }
@Override @Override
@ -293,14 +289,6 @@ public class RowIndexSeekerV1 implements EncodedSeeker {
current.keyLength); current.keyLength);
} }
protected boolean includesMvcc() {
return this.decodingCtx.getHFileContext().isIncludesMvcc();
}
protected boolean includesTags() {
return this.decodingCtx.getHFileContext().isIncludesTags();
}
protected void decodeTags() { protected void decodeTags() {
current.tagsLength = currentBuffer.getShortAfterPosition(0); current.tagsLength = currentBuffer.getShortAfterPosition(0);
currentBuffer.skip(Bytes.SIZEOF_SHORT); currentBuffer.skip(Bytes.SIZEOF_SHORT);

View File

@ -19,17 +19,15 @@ package org.apache.hadoop.hbase.io.hfile;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.encoding.EncodingState;
import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.hbase.io.encoding.NoneEncoder;
/** /**
* Does not perform any kind of encoding/decoding. * Does not perform any kind of encoding/decoding.
@ -40,35 +38,21 @@ public class NoOpDataBlockEncoder implements HFileDataBlockEncoder {
public static final NoOpDataBlockEncoder INSTANCE = public static final NoOpDataBlockEncoder INSTANCE =
new NoOpDataBlockEncoder(); new NoOpDataBlockEncoder();
private static class NoneEncodingState extends EncodingState {
NoneEncoder encoder = null;
}
/** Cannot be instantiated. Use {@link #INSTANCE} instead. */ /** Cannot be instantiated. Use {@link #INSTANCE} instead. */
private NoOpDataBlockEncoder() { private NoOpDataBlockEncoder() {
} }
@Override @Override
public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out) public int encode(Cell cell, HFileBlockEncodingContext encodingCtx,
throws IOException { DataOutputStream out) throws IOException {
int klength = KeyValueUtil.keyLength(cell); NoneEncodingState state = (NoneEncodingState) encodingCtx
int vlength = cell.getValueLength(); .getEncodingState();
NoneEncoder encoder = state.encoder;
out.writeInt(klength); return encoder.write(cell);
out.writeInt(vlength);
CellUtil.writeFlatKey(cell, out);
CellUtil.writeValue(out, cell, vlength);
int encodedKvSize = klength + vlength + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
// Write the additional tag into the stream
if (encodingCtx.getHFileContext().isIncludesTags()) {
int tagsLength = cell.getTagsLength();
out.writeShort(tagsLength);
if (tagsLength > 0) {
CellUtil.writeTags(out, cell, tagsLength);
}
encodedKvSize += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
}
if (encodingCtx.getHFileContext().isIncludesMvcc()) {
WritableUtils.writeVLong(out, cell.getSequenceId());
encodedKvSize += WritableUtils.getVIntSize(cell.getSequenceId());
}
return encodedKvSize;
} }
@Override @Override
@ -107,8 +91,21 @@ public class NoOpDataBlockEncoder implements HFileDataBlockEncoder {
} }
@Override @Override
public void startBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out) public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx,
throws IOException { DataOutputStream out) throws IOException {
if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
throw new IOException(this.getClass().getName() + " only accepts "
+ HFileBlockDefaultEncodingContext.class.getName() + " as the "
+ "encoding context.");
}
HFileBlockDefaultEncodingContext encodingCtx = (HFileBlockDefaultEncodingContext) blkEncodingCtx;
encodingCtx.prepareEncoding(out);
NoneEncoder encoder = new NoneEncoder(out, encodingCtx);
NoneEncodingState state = new NoneEncodingState();
state.encoder = encoder;
blkEncodingCtx.setEncodingState(state);
} }
@Override @Override