HBASE-12298 Support BB usage in PrefixTree (Ram)
This commit is contained in:
parent
a47ff1d998
commit
d81fba59cf
|
@ -207,6 +207,16 @@ public abstract class ByteBuff {
|
|||
*/
|
||||
public abstract void get(byte[] dst, int offset, int length);
|
||||
|
||||
/**
|
||||
* Copies the specified number of bytes from this ByteBuff's given position to
|
||||
* the byte[]'s offset. The position of the ByteBuff remains in the current position only
|
||||
* @param sourceOffset the offset in this ByteBuff from where the copy should happen
|
||||
* @param dst the byte[] to which the ByteBuff's content is to be copied
|
||||
* @param offset within the current array
|
||||
* @param length upto which the bytes to be copied
|
||||
*/
|
||||
public abstract void get(int sourceOffset, byte[] dst, int offset, int length);
|
||||
|
||||
/**
|
||||
* Copies the content from this ByteBuff's current position to the byte array and fills it. Also
|
||||
* advances the position of the ByteBuff by the length of the byte[].
|
||||
|
@ -454,4 +464,54 @@ public abstract class ByteBuff {
|
|||
}
|
||||
return (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i);
|
||||
}
|
||||
|
||||
/**
|
||||
* Search sorted array "a" for byte "key".
|
||||
*
|
||||
* @param a Array to search. Entries must be sorted and unique.
|
||||
* @param fromIndex First index inclusive of "a" to include in the search.
|
||||
* @param toIndex Last index exclusive of "a" to include in the search.
|
||||
* @param key The byte to search for.
|
||||
* @return The index of key if found. If not found, return -(index + 1), where
|
||||
* negative indicates "not found" and the "index + 1" handles the "-0"
|
||||
* case.
|
||||
*/
|
||||
public static int unsignedBinarySearch(ByteBuff a, int fromIndex, int toIndex, byte key) {
|
||||
int unsignedKey = key & 0xff;
|
||||
int low = fromIndex;
|
||||
int high = toIndex - 1;
|
||||
|
||||
while (low <= high) {
|
||||
int mid = (low + high) >>> 1;
|
||||
int midVal = a.get(mid) & 0xff;
|
||||
|
||||
if (midVal < unsignedKey) {
|
||||
low = mid + 1;
|
||||
} else if (midVal > unsignedKey) {
|
||||
high = mid - 1;
|
||||
} else {
|
||||
return mid; // key found
|
||||
}
|
||||
}
|
||||
return -(low + 1); // key not found.
|
||||
}
|
||||
|
||||
public static String toStringBinary(final ByteBuff b, int off, int len) {
|
||||
StringBuilder result = new StringBuilder();
|
||||
// Just in case we are passed a 'len' that is > buffer length...
|
||||
if (off >= b.capacity())
|
||||
return result.toString();
|
||||
if (off + len > b.capacity())
|
||||
len = b.capacity() - off;
|
||||
for (int i = off; i < off + len; ++i) {
|
||||
int ch = b.get(i) & 0xFF;
|
||||
if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z')
|
||||
|| " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) {
|
||||
result.append((char) ch);
|
||||
} else {
|
||||
result.append(String.format("\\x%02X", ch));
|
||||
}
|
||||
}
|
||||
return result.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -613,6 +613,25 @@ public class MultiByteBuff extends ByteBuff {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void get(int sourceOffset, byte[] dst, int offset, int length) {
|
||||
int itemIndex = getItemIndex(sourceOffset);
|
||||
ByteBuffer item = this.items[itemIndex];
|
||||
sourceOffset = sourceOffset - this.itemBeginPos[itemIndex];
|
||||
while (length > 0) {
|
||||
int toRead = Math.min((item.limit() - sourceOffset), length);
|
||||
ByteBufferUtils.copyFromBufferToArray(dst, item, sourceOffset, offset,
|
||||
toRead);
|
||||
length -= toRead;
|
||||
if (length == 0)
|
||||
break;
|
||||
itemIndex++;
|
||||
item = this.items[itemIndex];
|
||||
offset += toRead;
|
||||
sourceOffset = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the limit of this MBB.
|
||||
* @param limit
|
||||
|
|
|
@ -160,6 +160,11 @@ public class SingleByteBuff extends ByteBuff {
|
|||
buf.position(buf.position() + length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void get(int sourceOffset, byte[] dst, int offset, int length) {
|
||||
ByteBufferUtils.copyFromBufferToArray(dst, buf, sourceOffset, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void get(byte[] dst) {
|
||||
get(dst, 0, dst.length);
|
||||
|
|
|
@ -314,4 +314,32 @@ public class TestMultiByteBuff {
|
|||
int intRes = mbb1.getIntAfterPosition(1);
|
||||
assertEquals(3, intRes);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPositonalCopyToByteArray() throws Exception {
|
||||
byte[] b = new byte[4];
|
||||
byte[] b1 = new byte[8];
|
||||
ByteBuffer bb1 = ByteBuffer.wrap(b);
|
||||
ByteBuffer bb2 = ByteBuffer.wrap(b1);
|
||||
MultiByteBuff mbb1 = new MultiByteBuff(bb1, bb2);
|
||||
mbb1.position(2);
|
||||
mbb1.putInt(4);
|
||||
mbb1.position(7);
|
||||
mbb1.put((byte) 2);
|
||||
mbb1.putInt(3);
|
||||
byte[] dst = new byte[4];
|
||||
mbb1.get(2, dst, 0, 4);
|
||||
assertEquals(4, Bytes.toInt(dst));
|
||||
assertEquals(12, mbb1.position());
|
||||
mbb1.position(1);
|
||||
dst = new byte[4];
|
||||
mbb1.get(8, dst, 0, 4);
|
||||
assertEquals(3, Bytes.toInt(dst));
|
||||
assertEquals(1, mbb1.position());
|
||||
mbb1.position(12);
|
||||
dst = new byte[1];
|
||||
mbb1.get(7, dst, 0, 1);
|
||||
assertEquals(2, dst[0]);
|
||||
assertEquals(12, mbb1.position());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.codec.prefixtree;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.vint.UVIntTool;
|
||||
import org.apache.hadoop.hbase.util.vint.UVLongTool;
|
||||
|
@ -56,8 +56,6 @@ public class PrefixTreeBlockMeta {
|
|||
|
||||
|
||||
/**************** transient fields *********************/
|
||||
|
||||
protected int arrayOffset;
|
||||
protected int bufferOffset;
|
||||
|
||||
|
||||
|
@ -116,7 +114,6 @@ public class PrefixTreeBlockMeta {
|
|||
|
||||
public PrefixTreeBlockMeta(InputStream is) throws IOException{
|
||||
this.version = VERSION;
|
||||
this.arrayOffset = 0;
|
||||
this.bufferOffset = 0;
|
||||
readVariableBytesFromInputStream(is);
|
||||
}
|
||||
|
@ -124,14 +121,13 @@ public class PrefixTreeBlockMeta {
|
|||
/**
|
||||
* @param buffer positioned at start of PtBlockMeta
|
||||
*/
|
||||
public PrefixTreeBlockMeta(ByteBuffer buffer) {
|
||||
public PrefixTreeBlockMeta(ByteBuff buffer) {
|
||||
initOnBlock(buffer);
|
||||
}
|
||||
|
||||
public void initOnBlock(ByteBuffer buffer) {
|
||||
arrayOffset = buffer.arrayOffset();
|
||||
public void initOnBlock(ByteBuff buffer) {
|
||||
bufferOffset = buffer.position();
|
||||
readVariableBytesFromArray(buffer.array(), arrayOffset + bufferOffset);
|
||||
readVariableBytesFromBuffer(buffer, bufferOffset);
|
||||
}
|
||||
|
||||
|
||||
|
@ -263,79 +259,79 @@ public class PrefixTreeBlockMeta {
|
|||
numUniqueTags = UVIntTool.getInt(is);
|
||||
}
|
||||
|
||||
public void readVariableBytesFromArray(byte[] bytes, int offset) {
|
||||
public void readVariableBytesFromBuffer(ByteBuff buf, int offset) {
|
||||
int position = offset;
|
||||
|
||||
version = UVIntTool.getInt(bytes, position);
|
||||
version = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(version);
|
||||
numMetaBytes = UVIntTool.getInt(bytes, position);
|
||||
numMetaBytes = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numMetaBytes);
|
||||
numKeyValueBytes = UVIntTool.getInt(bytes, position);
|
||||
numKeyValueBytes = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numKeyValueBytes);
|
||||
setIncludesMvccVersion(bytes[position]);
|
||||
setIncludesMvccVersion(buf.get(position));
|
||||
++position;
|
||||
|
||||
numRowBytes = UVIntTool.getInt(bytes, position);
|
||||
numRowBytes = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numRowBytes);
|
||||
numFamilyBytes = UVIntTool.getInt(bytes, position);
|
||||
numFamilyBytes = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numFamilyBytes);
|
||||
numQualifierBytes = UVIntTool.getInt(bytes, position);
|
||||
numQualifierBytes = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numQualifierBytes);
|
||||
numTagsBytes = UVIntTool.getInt(bytes, position);
|
||||
numTagsBytes = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numTagsBytes);
|
||||
numTimestampBytes = UVIntTool.getInt(bytes, position);
|
||||
numTimestampBytes = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numTimestampBytes);
|
||||
numMvccVersionBytes = UVIntTool.getInt(bytes, position);
|
||||
numMvccVersionBytes = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numMvccVersionBytes);
|
||||
numValueBytes = UVIntTool.getInt(bytes, position);
|
||||
numValueBytes = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numValueBytes);
|
||||
|
||||
nextNodeOffsetWidth = UVIntTool.getInt(bytes, position);
|
||||
nextNodeOffsetWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(nextNodeOffsetWidth);
|
||||
familyOffsetWidth = UVIntTool.getInt(bytes, position);
|
||||
familyOffsetWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(familyOffsetWidth);
|
||||
qualifierOffsetWidth = UVIntTool.getInt(bytes, position);
|
||||
qualifierOffsetWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(qualifierOffsetWidth);
|
||||
tagsOffsetWidth = UVIntTool.getInt(bytes, position);
|
||||
tagsOffsetWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(tagsOffsetWidth);
|
||||
timestampIndexWidth = UVIntTool.getInt(bytes, position);
|
||||
timestampIndexWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(timestampIndexWidth);
|
||||
mvccVersionIndexWidth = UVIntTool.getInt(bytes, position);
|
||||
mvccVersionIndexWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(mvccVersionIndexWidth);
|
||||
valueOffsetWidth = UVIntTool.getInt(bytes, position);
|
||||
valueOffsetWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(valueOffsetWidth);
|
||||
valueLengthWidth = UVIntTool.getInt(bytes, position);
|
||||
valueLengthWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(valueLengthWidth);
|
||||
|
||||
rowTreeDepth = UVIntTool.getInt(bytes, position);
|
||||
rowTreeDepth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(rowTreeDepth);
|
||||
maxRowLength = UVIntTool.getInt(bytes, position);
|
||||
maxRowLength = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(maxRowLength);
|
||||
maxQualifierLength = UVIntTool.getInt(bytes, position);
|
||||
maxQualifierLength = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(maxQualifierLength);
|
||||
maxTagsLength = UVIntTool.getInt(bytes, position);
|
||||
maxTagsLength = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(maxTagsLength);
|
||||
minTimestamp = UVLongTool.getLong(bytes, position);
|
||||
minTimestamp = UVLongTool.getLong(buf, position);
|
||||
position += UVLongTool.numBytes(minTimestamp);
|
||||
timestampDeltaWidth = UVIntTool.getInt(bytes, position);
|
||||
timestampDeltaWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(timestampDeltaWidth);
|
||||
minMvccVersion = UVLongTool.getLong(bytes, position);
|
||||
minMvccVersion = UVLongTool.getLong(buf, position);
|
||||
position += UVLongTool.numBytes(minMvccVersion);
|
||||
mvccVersionDeltaWidth = UVIntTool.getInt(bytes, position);
|
||||
mvccVersionDeltaWidth = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(mvccVersionDeltaWidth);
|
||||
|
||||
setAllSameType(bytes[position]);
|
||||
setAllSameType(buf.get(position));
|
||||
++position;
|
||||
allTypes = bytes[position];
|
||||
allTypes = buf.get(position);
|
||||
++position;
|
||||
|
||||
numUniqueRows = UVIntTool.getInt(bytes, position);
|
||||
numUniqueRows = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numUniqueRows);
|
||||
numUniqueFamilies = UVIntTool.getInt(bytes, position);
|
||||
numUniqueFamilies = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numUniqueFamilies);
|
||||
numUniqueQualifiers = UVIntTool.getInt(bytes, position);
|
||||
numUniqueQualifiers = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numUniqueQualifiers);
|
||||
numUniqueTags = UVIntTool.getInt(bytes, position);
|
||||
numUniqueTags = UVIntTool.getInt(buf, position);
|
||||
position += UVIntTool.numBytes(numUniqueTags);
|
||||
}
|
||||
|
||||
|
@ -405,8 +401,6 @@ public class PrefixTreeBlockMeta {
|
|||
return false;
|
||||
if (allTypes != other.allTypes)
|
||||
return false;
|
||||
if (arrayOffset != other.arrayOffset)
|
||||
return false;
|
||||
if (bufferOffset != other.bufferOffset)
|
||||
return false;
|
||||
if (valueLengthWidth != other.valueLengthWidth)
|
||||
|
@ -483,7 +477,6 @@ public class PrefixTreeBlockMeta {
|
|||
int result = 1;
|
||||
result = prime * result + (allSameType ? 1231 : 1237);
|
||||
result = prime * result + allTypes;
|
||||
result = prime * result + arrayOffset;
|
||||
result = prime * result + bufferOffset;
|
||||
result = prime * result + valueLengthWidth;
|
||||
result = prime * result + valueOffsetWidth;
|
||||
|
@ -525,9 +518,7 @@ public class PrefixTreeBlockMeta {
|
|||
@Override
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("PtBlockMeta [arrayOffset=");
|
||||
builder.append(arrayOffset);
|
||||
builder.append(", bufferOffset=");
|
||||
builder.append("PtBlockMeta [bufferOffset=");
|
||||
builder.append(bufferOffset);
|
||||
builder.append(", version=");
|
||||
builder.append(version);
|
||||
|
@ -602,12 +593,8 @@ public class PrefixTreeBlockMeta {
|
|||
|
||||
/************** absolute getters *******************/
|
||||
|
||||
public int getAbsoluteMetaOffset() {
|
||||
return arrayOffset + bufferOffset;
|
||||
}
|
||||
|
||||
public int getAbsoluteRowOffset() {
|
||||
return getAbsoluteMetaOffset() + numMetaBytes;
|
||||
return getBufferOffset() + numMetaBytes;
|
||||
}
|
||||
|
||||
public int getAbsoluteFamilyOffset() {
|
||||
|
@ -749,14 +736,6 @@ public class PrefixTreeBlockMeta {
|
|||
this.numMetaBytes = numMetaBytes;
|
||||
}
|
||||
|
||||
public int getArrayOffset() {
|
||||
return arrayOffset;
|
||||
}
|
||||
|
||||
public void setArrayOffset(int arrayOffset) {
|
||||
this.arrayOffset = arrayOffset;
|
||||
}
|
||||
|
||||
public int getBufferOffset() {
|
||||
return bufferOffset;
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
|
|||
import org.apache.hadoop.hbase.io.hfile.BlockType;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileContext;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
|
||||
|
@ -83,7 +84,7 @@ public class PrefixTreeCodec implements DataBlockEncoder {
|
|||
int skipLastBytes, HFileBlockDecodingContext decodingCtx) throws IOException {
|
||||
ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);// waste
|
||||
sourceAsBuffer.mark();
|
||||
PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(sourceAsBuffer);
|
||||
PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(new SingleByteBuff(sourceAsBuffer));
|
||||
sourceAsBuffer.rewind();
|
||||
int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes();
|
||||
byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader];
|
||||
|
@ -92,7 +93,7 @@ public class PrefixTreeCodec implements DataBlockEncoder {
|
|||
CellSearcher searcher = null;
|
||||
try {
|
||||
boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc();
|
||||
searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvcc);
|
||||
searcher = DecoderFactory.checkOut(new SingleByteBuff(sourceAsBuffer), includesMvcc);
|
||||
while (searcher.advance()) {
|
||||
KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current());
|
||||
// needs to be modified for DirectByteBuffers. no existing methods to
|
||||
|
@ -121,9 +122,7 @@ public class PrefixTreeCodec implements DataBlockEncoder {
|
|||
PrefixTreeArraySearcher searcher = null;
|
||||
try {
|
||||
// should i includeMemstoreTS (second argument)? i think PrefixKeyDeltaEncoder is, so i will
|
||||
// TODO : Change to work with BBs
|
||||
searcher = DecoderFactory.checkOut(block.asSubByteBuffer(block.limit() - block.position()),
|
||||
true);
|
||||
searcher = DecoderFactory.checkOut(block, true);
|
||||
if (!searcher.positionAtFirstCell()) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree;
|
|||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.apache.hadoop.hbase.ByteBufferedCell;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellComparator;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
|
@ -33,6 +34,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
|
|||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
||||
|
@ -57,9 +59,7 @@ public class PrefixTreeSeeker implements EncodedSeeker {
|
|||
|
||||
@Override
|
||||
public void setCurrentBuffer(ByteBuff fullBlockBuffer) {
|
||||
block = fullBlockBuffer.asSubByteBuffer(fullBlockBuffer.limit());
|
||||
// TODO : change to Bytebuff
|
||||
ptSearcher = DecoderFactory.checkOut(block, includeMvccVersion);
|
||||
ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion);
|
||||
rewind();
|
||||
}
|
||||
|
||||
|
@ -99,16 +99,29 @@ public class PrefixTreeSeeker implements EncodedSeeker {
|
|||
*/
|
||||
@Override
|
||||
public Cell getCell() {
|
||||
Cell cell = ptSearcher.current();
|
||||
// The PrefixTreecell is of type BytebufferedCell and the value part of the cell
|
||||
// determines whether we are offheap cell or onheap cell. All other parts of the cell-
|
||||
// row, fam and col are all represented as onheap byte[]
|
||||
ByteBufferedCell cell = (ByteBufferedCell)ptSearcher.current();
|
||||
if (cell == null) {
|
||||
return null;
|
||||
}
|
||||
return new ClonedPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
|
||||
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
|
||||
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
|
||||
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(),
|
||||
cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(),
|
||||
cell.getSequenceId());
|
||||
// Use the ByteBuffered cell to see if the Cell is onheap or offheap
|
||||
if (cell.getValueByteBuffer().hasArray()) {
|
||||
return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
|
||||
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
|
||||
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
|
||||
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(),
|
||||
cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(),
|
||||
cell.getSequenceId());
|
||||
} else {
|
||||
return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
|
||||
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
|
||||
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
|
||||
cell.getValueByteBuffer(), cell.getValuePositionInByteBuffer(), cell.getValueLength(),
|
||||
cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(),
|
||||
cell.getTypeByte(), cell.getSequenceId());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -208,12 +221,13 @@ public class PrefixTreeSeeker implements EncodedSeeker {
|
|||
return comparator.compare(key,
|
||||
ptSearcher.current());
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloned version of the PrefixTreeCell where except the value part, the rest
|
||||
* of the key part is deep copied
|
||||
*
|
||||
*/
|
||||
private static class ClonedPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {
|
||||
private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {
|
||||
private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT
|
||||
+ (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)
|
||||
+ (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY));
|
||||
|
@ -232,7 +246,7 @@ public class PrefixTreeSeeker implements EncodedSeeker {
|
|||
private long seqId;
|
||||
private byte type;
|
||||
|
||||
public ClonedPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,
|
||||
public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,
|
||||
int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val,
|
||||
int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,
|
||||
long seqId) {
|
||||
|
@ -367,4 +381,215 @@ public class PrefixTreeSeeker implements EncodedSeeker {
|
|||
return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;
|
||||
}
|
||||
}
|
||||
|
||||
private static class OffheapPrefixTreeCell extends ByteBufferedCell implements Cell,
|
||||
SettableSequenceId, HeapSize {
|
||||
private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT
|
||||
+ (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)
|
||||
+ (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER));
|
||||
private ByteBuffer rowBuff;
|
||||
private short rowLength;
|
||||
private ByteBuffer famBuff;
|
||||
private byte famLength;
|
||||
private ByteBuffer qualBuff;
|
||||
private int qualLength;
|
||||
private ByteBuffer val;
|
||||
private int valOffset;
|
||||
private int valLength;
|
||||
private ByteBuffer tagBuff;
|
||||
private int tagsLength;
|
||||
private long ts;
|
||||
private long seqId;
|
||||
private byte type;
|
||||
public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,
|
||||
int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val,
|
||||
int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,
|
||||
long seqId) {
|
||||
byte[] tmpRow = new byte[rowLength];
|
||||
System.arraycopy(row, rowOffset, tmpRow, 0, rowLength);
|
||||
this.rowBuff = ByteBuffer.wrap(tmpRow);
|
||||
this.rowLength = rowLength;
|
||||
byte[] tmpFam = new byte[famLength];
|
||||
System.arraycopy(fam, famOffset, tmpFam, 0, famLength);
|
||||
this.famBuff = ByteBuffer.wrap(tmpFam);
|
||||
this.famLength = famLength;
|
||||
byte[] tmpQual = new byte[qualLength];
|
||||
System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength);
|
||||
this.qualBuff = ByteBuffer.wrap(tmpQual);
|
||||
this.qualLength = qualLength;
|
||||
byte[] tmpTag = new byte[tagLength];
|
||||
System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength);
|
||||
this.tagBuff = ByteBuffer.wrap(tmpTag);
|
||||
this.tagsLength = tagLength;
|
||||
this.val = val;
|
||||
this.valLength = valLength;
|
||||
this.valOffset = valOffset;
|
||||
this.ts = ts;
|
||||
this.seqId = seqId;
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSequenceId(long seqId) {
|
||||
this.seqId = seqId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getRowArray() {
|
||||
return this.rowBuff.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getRowOffset() {
|
||||
return getRowPositionInByteBuffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public short getRowLength() {
|
||||
return this.rowLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getFamilyArray() {
|
||||
return this.famBuff.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFamilyOffset() {
|
||||
return getFamilyPositionInByteBuffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte getFamilyLength() {
|
||||
return this.famLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getQualifierArray() {
|
||||
return this.qualBuff.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getQualifierOffset() {
|
||||
return getQualifierPositionInByteBuffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getQualifierLength() {
|
||||
return this.qualLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getTimestamp() {
|
||||
return ts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte getTypeByte() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSequenceId() {
|
||||
return seqId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getValueArray() {
|
||||
byte[] tmpVal = new byte[valLength];
|
||||
ByteBufferUtils.copyFromBufferToArray(tmpVal, val, valOffset, 0, valLength);
|
||||
return tmpVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValueOffset() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValueLength() {
|
||||
return this.valLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getTagsArray() {
|
||||
return this.tagBuff.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTagsOffset() {
|
||||
return getTagsPositionInByteBuffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTagsLength() {
|
||||
return this.tagsLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getRowByteBuffer() {
|
||||
return this.rowBuff;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getRowPositionInByteBuffer() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getFamilyByteBuffer() {
|
||||
return this.famBuff;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFamilyPositionInByteBuffer() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getQualifierByteBuffer() {
|
||||
return this.qualBuff;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getQualifierPositionInByteBuffer() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getTagsByteBuffer() {
|
||||
return this.tagBuff;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTagsPositionInByteBuffer() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getValueByteBuffer() {
|
||||
return this.val;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValuePositionInByteBuffer() {
|
||||
return this.valOffset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long heapSize() {
|
||||
return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());
|
||||
String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());
|
||||
String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),
|
||||
getQualifierLength());
|
||||
String timestamp = String.valueOf((getTimestamp()));
|
||||
return row + "/" + family + (family != null && family.length() > 0 ? ":" : "") + qualifier
|
||||
+ "/" + timestamp + "/" + Type.codeToType(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,11 +18,11 @@
|
|||
|
||||
package org.apache.hadoop.hbase.codec.prefixtree.decode;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
@ -45,7 +45,7 @@ public class ArraySearcherPool {
|
|||
protected Queue<PrefixTreeArraySearcher> pool
|
||||
= new LinkedBlockingQueue<PrefixTreeArraySearcher>(MAX_POOL_SIZE);
|
||||
|
||||
public PrefixTreeArraySearcher checkOut(ByteBuffer buffer, boolean includesMvccVersion) {
|
||||
public PrefixTreeArraySearcher checkOut(ByteBuff buffer, boolean includesMvccVersion) {
|
||||
PrefixTreeArraySearcher searcher = pool.poll();//will return null if pool is empty
|
||||
searcher = DecoderFactory.ensureArraySearcherValid(buffer, searcher, includesMvccVersion);
|
||||
return searcher;
|
||||
|
|
|
@ -18,28 +18,21 @@
|
|||
|
||||
package org.apache.hadoop.hbase.codec.prefixtree.decode;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
|
||||
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
/**
|
||||
* Static wrapper class for the ArraySearcherPool.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class DecoderFactory {
|
||||
|
||||
private static final ArraySearcherPool POOL = new ArraySearcherPool();
|
||||
|
||||
//TODO will need a PrefixTreeSearcher on top of CellSearcher
|
||||
public static PrefixTreeArraySearcher checkOut(final ByteBuffer buffer,
|
||||
public static PrefixTreeArraySearcher checkOut(final ByteBuff buffer,
|
||||
boolean includeMvccVersion) {
|
||||
if (buffer.isDirect()) {
|
||||
throw new IllegalArgumentException("DirectByteBuffers not supported yet");
|
||||
// TODO implement PtByteBufferBlockScanner
|
||||
}
|
||||
|
||||
PrefixTreeArraySearcher searcher = POOL.checkOut(buffer,
|
||||
includeMvccVersion);
|
||||
return searcher;
|
||||
|
@ -59,14 +52,14 @@ public class DecoderFactory {
|
|||
|
||||
|
||||
/**************************** helper ******************************/
|
||||
public static PrefixTreeArraySearcher ensureArraySearcherValid(ByteBuffer buffer,
|
||||
public static PrefixTreeArraySearcher ensureArraySearcherValid(ByteBuff buffer,
|
||||
PrefixTreeArraySearcher searcher, boolean includeMvccVersion) {
|
||||
if (searcher == null) {
|
||||
PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(buffer);
|
||||
searcher = new PrefixTreeArraySearcher(blockMeta, blockMeta.getRowTreeDepth(),
|
||||
blockMeta.getMaxRowLength(), blockMeta.getMaxQualifierLength(),
|
||||
blockMeta.getMaxTagsLength());
|
||||
searcher.initOnBlock(blockMeta, buffer.array(), includeMvccVersion);
|
||||
searcher.initOnBlock(blockMeta, buffer, includeMvccVersion);
|
||||
return searcher;
|
||||
}
|
||||
|
||||
|
@ -83,7 +76,7 @@ public class DecoderFactory {
|
|||
qualifierBufferLength, tagBufferLength);
|
||||
}
|
||||
//this is where we parse the BlockMeta
|
||||
searcher.initOnBlock(blockMeta, buffer.array(), includeMvccVersion);
|
||||
searcher.initOnBlock(blockMeta, buffer, includeMvccVersion);
|
||||
return searcher;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.decode.row.RowNodeReader;
|
|||
import org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp.MvccVersionDecoder;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp.TimestampDecoder;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.encode.other.ColumnNodeType;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
|
||||
/**
|
||||
* Extends PtCell and manipulates its protected fields. Could alternatively contain a PtCell and
|
||||
|
@ -103,7 +104,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne
|
|||
return true;
|
||||
}
|
||||
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, byte[] block,
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, ByteBuff block,
|
||||
boolean includeMvccVersion) {
|
||||
this.block = block;
|
||||
this.blockMeta = blockMeta;
|
||||
|
@ -358,7 +359,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne
|
|||
/***************** helper methods **************************/
|
||||
|
||||
protected void appendCurrentTokenToRowBuffer() {
|
||||
System.arraycopy(block, currentRowNode.getTokenArrayOffset(), rowBuffer, rowLength,
|
||||
block.get(currentRowNode.getTokenArrayOffset(), rowBuffer, rowLength,
|
||||
currentRowNode.getTokenLength());
|
||||
rowLength += currentRowNode.getTokenLength();
|
||||
}
|
||||
|
@ -498,14 +499,11 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne
|
|||
int offsetIntoValueSection = currentRowNode.getValueOffset(currentCellIndex, blockMeta);
|
||||
absoluteValueOffset = blockMeta.getAbsoluteValueOffset() + offsetIntoValueSection;
|
||||
valueLength = currentRowNode.getValueLength(currentCellIndex, blockMeta);
|
||||
this.block.asSubByteBuffer(this.absoluteValueOffset, valueLength, pair);
|
||||
}
|
||||
|
||||
/**************** getters ***************************/
|
||||
|
||||
public byte[] getTreeBytes() {
|
||||
return block;
|
||||
}
|
||||
|
||||
public PrefixTreeBlockMeta getBlockMeta() {
|
||||
return blockMeta;
|
||||
}
|
||||
|
|
|
@ -18,6 +18,9 @@
|
|||
|
||||
package org.apache.hadoop.hbase.codec.prefixtree.decode;
|
||||
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import org.apache.hadoop.hbase.ByteBufferedCell;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellComparator;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
|
@ -25,16 +28,20 @@ import org.apache.hadoop.hbase.KeyValue;
|
|||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.SettableSequenceId;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ObjectIntPair;
|
||||
|
||||
/**
|
||||
* As the PrefixTreeArrayScanner moves through the tree bytes, it changes the values in the fields
|
||||
* of this class so that Cell logic can be applied, but without allocating new memory for every Cell
|
||||
* iterated through.
|
||||
* As the PrefixTreeArrayScanner moves through the tree bytes, it changes the
|
||||
* values in the fields of this class so that Cell logic can be applied, but
|
||||
* without allocating new memory for every Cell iterated through.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell> {
|
||||
// Create a reference here? Can be removed too
|
||||
public class PrefixTreeCell extends ByteBufferedCell implements SettableSequenceId,
|
||||
Comparable<Cell> {
|
||||
// Create a reference here? Can be removed too
|
||||
protected CellComparator comparator = CellComparator.COMPARATOR;
|
||||
|
||||
/********************** static **********************/
|
||||
|
@ -46,13 +53,15 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
|
|||
}
|
||||
}
|
||||
|
||||
//Same as KeyValue constructor. Only used to avoid NPE's when full cell hasn't been initialized.
|
||||
// Same as KeyValue constructor. Only used to avoid NPE's when full cell
|
||||
// hasn't been initialized.
|
||||
public static final KeyValue.Type DEFAULT_TYPE = KeyValue.Type.Put;
|
||||
|
||||
/******************** fields ************************/
|
||||
|
||||
protected byte[] block;
|
||||
//we could also avoid setting the mvccVersion in the scanner/searcher, but this is simpler
|
||||
protected ByteBuff block;
|
||||
// we could also avoid setting the mvccVersion in the scanner/searcher, but
|
||||
// this is simpler
|
||||
protected boolean includeMvccVersion;
|
||||
|
||||
protected byte[] rowBuffer;
|
||||
|
@ -77,11 +86,14 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
|
|||
protected byte[] tagsBuffer;
|
||||
protected int tagsOffset;
|
||||
protected int tagsLength;
|
||||
// Pair to set the value ByteBuffer and its offset
|
||||
protected ObjectIntPair<ByteBuffer> pair = new ObjectIntPair<ByteBuffer>();
|
||||
|
||||
/********************** Cell methods ******************/
|
||||
|
||||
/**
|
||||
* For debugging. Currently creates new KeyValue to utilize its toString() method.
|
||||
* For debugging. Currently creates new KeyValue to utilize its toString()
|
||||
* method.
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
|
@ -93,10 +105,10 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
|
|||
if (!(obj instanceof Cell)) {
|
||||
return false;
|
||||
}
|
||||
//Temporary hack to maintain backwards compatibility with KeyValue.equals
|
||||
return CellUtil.equalsIgnoreMvccVersion(this, (Cell)obj);
|
||||
// Temporary hack to maintain backwards compatibility with KeyValue.equals
|
||||
return CellUtil.equalsIgnoreMvccVersion(this, (Cell) obj);
|
||||
|
||||
//TODO return CellComparator.equals(this, (Cell)obj);//see HBASE-6907
|
||||
// TODO return CellComparator.equals(this, (Cell)obj);//see HBASE-6907
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -190,12 +202,24 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
|
|||
|
||||
@Override
|
||||
public byte[] getValueArray() {
|
||||
return block;
|
||||
if (this.pair.getFirst().hasArray()) {
|
||||
return this.pair.getFirst().array();
|
||||
} else {
|
||||
// Just in case getValueArray is called on offheap BB
|
||||
byte[] val = new byte[valueLength];
|
||||
ByteBufferUtils.copyFromBufferToArray(val, this.pair.getFirst(), this.pair.getSecond(), 0,
|
||||
valueLength);
|
||||
return val;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValueOffset() {
|
||||
return absoluteValueOffset;
|
||||
if (this.pair.getFirst().hasArray()) {
|
||||
return this.pair.getSecond() + this.pair.getFirst().arrayOffset();
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -206,9 +230,10 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
|
|||
/************************* helper methods *************************/
|
||||
|
||||
/**
|
||||
* Need this separate method so we can call it from subclasses' toString() methods
|
||||
* Need this separate method so we can call it from subclasses' toString()
|
||||
* methods
|
||||
*/
|
||||
protected String getKeyValueString(){
|
||||
protected String getKeyValueString() {
|
||||
KeyValue kv = KeyValueUtil.copyToNewKeyValue(this);
|
||||
return kv.toString();
|
||||
}
|
||||
|
@ -232,4 +257,54 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
|
|||
public void setSequenceId(long seqId) {
|
||||
mvccVersion = seqId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getRowByteBuffer() {
|
||||
return ByteBuffer.wrap(rowBuffer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getRowPositionInByteBuffer() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getFamilyByteBuffer() {
|
||||
return ByteBuffer.wrap(familyBuffer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFamilyPositionInByteBuffer() {
|
||||
return getFamilyOffset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getQualifierByteBuffer() {
|
||||
return ByteBuffer.wrap(qualifierBuffer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getQualifierPositionInByteBuffer() {
|
||||
return getQualifierOffset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getValueByteBuffer() {
|
||||
return pair.getFirst();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValuePositionInByteBuffer() {
|
||||
return pair.getSecond();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getTagsByteBuffer() {
|
||||
return ByteBuffer.wrap(tagsBuffer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTagsPositionInByteBuffer() {
|
||||
return getTagsOffset();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode.column;
|
|||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.encode.other.ColumnNodeType;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.util.vint.UFIntTool;
|
||||
import org.apache.hadoop.hbase.util.vint.UVIntTool;
|
||||
|
||||
|
@ -30,7 +31,7 @@ public class ColumnNodeReader {
|
|||
/**************** fields ************************/
|
||||
|
||||
protected PrefixTreeBlockMeta blockMeta;
|
||||
protected byte[] block;
|
||||
protected ByteBuff block;
|
||||
protected ColumnNodeType nodeType;
|
||||
protected byte[] columnBuffer;
|
||||
|
||||
|
@ -48,7 +49,7 @@ public class ColumnNodeReader {
|
|||
this.nodeType = nodeType;
|
||||
}
|
||||
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, byte[] block) {
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, ByteBuff block) {
|
||||
this.blockMeta = blockMeta;
|
||||
this.block = block;
|
||||
}
|
||||
|
@ -73,7 +74,7 @@ public class ColumnNodeReader {
|
|||
}
|
||||
|
||||
public void prependTokenToBuffer(int bufferStartIndex) {
|
||||
System.arraycopy(block, tokenOffsetIntoBlock, columnBuffer, bufferStartIndex, tokenLength);
|
||||
block.get(tokenOffsetIntoBlock, columnBuffer, bufferStartIndex, tokenLength);
|
||||
}
|
||||
|
||||
public boolean isRoot() {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode.column;
|
|||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.encode.other.ColumnNodeType;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
|
||||
/**
|
||||
* Position one of these appropriately in the data block and you can call its methods to retrieve
|
||||
|
@ -49,7 +50,7 @@ public class ColumnReader {
|
|||
this.columnNodeReader = new ColumnNodeReader(columnBuffer, nodeType);
|
||||
}
|
||||
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, byte[] block) {
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, ByteBuff block) {
|
||||
this.blockMeta = blockMeta;
|
||||
clearColumnBuffer();
|
||||
columnNodeReader.initOnBlock(blockMeta, block);
|
||||
|
|
|
@ -20,8 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode.row;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.util.vint.UFIntTool;
|
||||
import org.apache.hadoop.hbase.util.vint.UVIntTool;
|
||||
|
||||
|
@ -34,7 +33,7 @@ public class RowNodeReader {
|
|||
|
||||
/************* fields ***********************************/
|
||||
|
||||
protected byte[] block;
|
||||
protected ByteBuff block;
|
||||
protected int offset;
|
||||
protected int fanIndex;
|
||||
|
||||
|
@ -58,7 +57,7 @@ public class RowNodeReader {
|
|||
|
||||
/******************* construct **************************/
|
||||
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, byte[] block, int offset) {
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, ByteBuff block, int offset) {
|
||||
this.block = block;
|
||||
|
||||
this.offset = offset;
|
||||
|
@ -120,14 +119,14 @@ public class RowNodeReader {
|
|||
}
|
||||
|
||||
public byte getFanByte(int i) {
|
||||
return block[fanOffset + i];
|
||||
return block.get(fanOffset + i);
|
||||
}
|
||||
|
||||
/**
|
||||
* for debugging
|
||||
*/
|
||||
protected String getFanByteReadable(int i){
|
||||
return Bytes.toStringBinary(block, fanOffset + i, 1);
|
||||
return ByteBuff.toStringBinary(block, fanOffset + i, 1);
|
||||
}
|
||||
|
||||
public int getFamilyOffset(int index, PrefixTreeBlockMeta blockMeta) {
|
||||
|
@ -164,7 +163,7 @@ public class RowNodeReader {
|
|||
if (blockMeta.isAllSameType()) {
|
||||
return blockMeta.getAllTypes();
|
||||
}
|
||||
return block[operationTypesOffset + index];
|
||||
return block.get(operationTypesOffset + index);
|
||||
}
|
||||
|
||||
public int getValueOffset(int index, PrefixTreeBlockMeta blockMeta) {
|
||||
|
@ -215,8 +214,9 @@ public class RowNodeReader {
|
|||
}
|
||||
|
||||
public byte[] getToken() {
|
||||
// TODO pass in reusable ByteRange
|
||||
return new SimpleMutableByteRange(block, tokenOffset, tokenLength).deepCopyToNewArray();
|
||||
byte[] newToken = new byte[tokenLength];
|
||||
block.get(tokenOffset, newToken, 0, tokenLength);
|
||||
return newToken;
|
||||
}
|
||||
|
||||
public int getOffset() {
|
||||
|
@ -227,7 +227,7 @@ public class RowNodeReader {
|
|||
if( ! hasFan()){
|
||||
throw new IllegalStateException("This row node has no fan, so can't search it");
|
||||
}
|
||||
int fanIndexInBlock = Bytes.unsignedBinarySearch(block, fanOffset, fanOffset + fanOut,
|
||||
int fanIndexInBlock = ByteBuff.unsignedBinarySearch(block, fanOffset, fanOffset + fanOut,
|
||||
searchForByte);
|
||||
if (fanIndexInBlock >= 0) {// found it, but need to adjust for position of fan in overall block
|
||||
return fanIndexInBlock - fanOffset;
|
||||
|
@ -269,8 +269,8 @@ public class RowNodeReader {
|
|||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("fan:" + Bytes.toStringBinary(block, fanOffset, fanOut));
|
||||
sb.append(",token:" + Bytes.toStringBinary(block, tokenOffset, tokenLength));
|
||||
sb.append("fan:" + ByteBuff.toStringBinary(block, fanOffset, fanOut));
|
||||
sb.append(",token:" + ByteBuff.toStringBinary(block, tokenOffset, tokenLength));
|
||||
sb.append(",numCells:" + numCells);
|
||||
sb.append(",fanIndex:"+fanIndex);
|
||||
if(fanIndex>=0){
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.util.vint.UFIntTool;
|
||||
|
||||
/**
|
||||
|
@ -29,7 +30,7 @@ import org.apache.hadoop.hbase.util.vint.UFIntTool;
|
|||
public class MvccVersionDecoder {
|
||||
|
||||
protected PrefixTreeBlockMeta blockMeta;
|
||||
protected byte[] block;
|
||||
protected ByteBuff block;
|
||||
|
||||
|
||||
/************** construct ***********************/
|
||||
|
@ -37,7 +38,7 @@ public class MvccVersionDecoder {
|
|||
public MvccVersionDecoder() {
|
||||
}
|
||||
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, byte[] block) {
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, ByteBuff block) {
|
||||
this.block = block;
|
||||
this.blockMeta = blockMeta;
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.util.vint.UFIntTool;
|
||||
|
||||
/**
|
||||
|
@ -29,7 +30,7 @@ import org.apache.hadoop.hbase.util.vint.UFIntTool;
|
|||
public class TimestampDecoder {
|
||||
|
||||
protected PrefixTreeBlockMeta blockMeta;
|
||||
protected byte[] block;
|
||||
protected ByteBuff block;
|
||||
|
||||
|
||||
/************** construct ***********************/
|
||||
|
@ -37,7 +38,7 @@ public class TimestampDecoder {
|
|||
public TimestampDecoder() {
|
||||
}
|
||||
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, byte[] block) {
|
||||
public void initOnBlock(PrefixTreeBlockMeta blockMeta, ByteBuff block) {
|
||||
this.block = block;
|
||||
this.blockMeta = blockMeta;
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@ public class RowNodeWriter{
|
|||
width += numCells * fixedBytesPerCell;
|
||||
}
|
||||
|
||||
if( ! tokenizerNode.isLeaf()){
|
||||
if (!tokenizerNode.isLeaf()) {
|
||||
width += fanOut * offsetWidth;
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
|||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
|
||||
/**
|
||||
* UFInt is an abbreviation for Unsigned Fixed-width Integer.
|
||||
|
@ -103,12 +104,12 @@ public class UFIntTool {
|
|||
return value;
|
||||
}
|
||||
|
||||
public static long fromBytes(final byte[] bytes, final int offset, final int width) {
|
||||
public static long fromBytes(final ByteBuff buf, final int offset, final int width) {
|
||||
long value = 0;
|
||||
value |= bytes[0 + offset] & 0xff;// these seem to do ok without casting the byte to int
|
||||
value |= buf.get(offset + 0) & 0xff;// these seem to do ok without casting the byte to int
|
||||
for (int i = 1; i < width; ++i) {
|
||||
value <<= 8;
|
||||
value |= bytes[i + offset] & 0xff;
|
||||
value |= buf.get(i + offset) & 0xff;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.io.InputStream;
|
|||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
|
||||
/**
|
||||
* Simple Variable Length Integer encoding. Left bit of 0 means we are on the last byte. If left
|
||||
|
@ -81,14 +82,10 @@ public class UVIntTool {
|
|||
|
||||
/******************** bytes -> int **************************/
|
||||
|
||||
public static int getInt(byte[] bytes) {
|
||||
return getInt(bytes, 0);
|
||||
}
|
||||
|
||||
public static int getInt(byte[] bytes, int offset) {
|
||||
public static int getInt(ByteBuff buffer, int offset) {
|
||||
int value = 0;
|
||||
for (int i = 0;; ++i) {
|
||||
byte b = bytes[offset + i];
|
||||
byte b = buffer.get(offset + i);
|
||||
int shifted = BYTE_7_RIGHT_BITS_SET & b;// kill leftmost bit
|
||||
shifted <<= 7 * i;
|
||||
value |= shifted;
|
||||
|
|
|
@ -21,8 +21,11 @@ package org.apache.hadoop.hbase.util.vint;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
|
||||
/**
|
||||
* Simple Variable Length Integer encoding. Left bit of 0 means we are on the last byte. If left
|
||||
|
@ -80,13 +83,13 @@ public class UVLongTool{
|
|||
/******************** bytes -> long **************************/
|
||||
|
||||
public static long getLong(byte[] bytes) {
|
||||
return getLong(bytes, 0);
|
||||
return getLong(new SingleByteBuff(ByteBuffer.wrap(bytes)), 0);
|
||||
}
|
||||
|
||||
public static long getLong(byte[] bytes, int offset) {
|
||||
public static long getLong(ByteBuff buf, int offset) {
|
||||
long value = 0;
|
||||
for (int i = 0;; ++i) {
|
||||
byte b = bytes[offset + i];
|
||||
byte b = buf.get(offset + i);
|
||||
long shifted = BYTE_7_RIGHT_BITS_SET & b;// kill leftmost bit
|
||||
shifted <<= 7 * i;
|
||||
value |= shifted;
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.io.IOException;
|
|||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
|
@ -83,7 +84,7 @@ public class TestBlockMeta {
|
|||
ByteArrayOutputStream os = new ByteArrayOutputStream(10000);
|
||||
original.writeVariableBytesToOutputStream(os);
|
||||
ByteBuffer buffer = ByteBuffer.wrap(os.toByteArray());
|
||||
PrefixTreeBlockMeta roundTripped = new PrefixTreeBlockMeta(buffer);
|
||||
PrefixTreeBlockMeta roundTripped = new PrefixTreeBlockMeta(new SingleByteBuff(buffer));
|
||||
Assert.assertTrue(original.equals(roundTripped));
|
||||
}
|
||||
|
||||
|
|
|
@ -20,9 +20,11 @@ package org.apache.hadoop.hbase.codec.prefixtree.column;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
|
@ -103,7 +105,7 @@ public class TestColumnBuilder {
|
|||
bytes = baos.toByteArray();
|
||||
buffer = new byte[blockMeta.getMaxQualifierLength()];
|
||||
reader = new ColumnReader(buffer, ColumnNodeType.QUALIFIER);
|
||||
reader.initOnBlock(blockMeta, bytes);
|
||||
reader.initOnBlock(blockMeta, new SingleByteBuff(ByteBuffer.wrap(bytes)));
|
||||
|
||||
List<TokenizerNode> builderNodes = Lists.newArrayList();
|
||||
builder.appendNodes(builderNodes, true, true);
|
||||
|
|
|
@ -29,6 +29,8 @@ import org.apache.hadoop.hbase.Cell;
|
|||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
|
||||
|
@ -36,6 +38,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
|
|||
import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataSearchWithPrefix;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.CollectionUtils;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
@ -56,7 +59,7 @@ public class TestPrefixTreeSearcher {
|
|||
}
|
||||
|
||||
protected TestRowData rows;
|
||||
protected ByteBuffer block;
|
||||
protected ByteBuff block;
|
||||
|
||||
public TestPrefixTreeSearcher(TestRowData testRows) throws IOException {
|
||||
this.rows = testRows;
|
||||
|
@ -67,7 +70,10 @@ public class TestPrefixTreeSearcher {
|
|||
}
|
||||
kvBuilder.flush();
|
||||
byte[] outputBytes = os.toByteArray();
|
||||
this.block = ByteBuffer.wrap(outputBytes);
|
||||
ByteBuffer out = ByteBuffer.allocateDirect(outputBytes.length);
|
||||
ByteBufferUtils.copyFromArrayToBuffer(out, outputBytes, 0, outputBytes.length);
|
||||
out.position(0);
|
||||
this.block = new SingleByteBuff(out);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -28,11 +28,14 @@ import java.util.List;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
|
@ -67,7 +70,7 @@ public class TestRowEncoder {
|
|||
protected int totalBytes;
|
||||
protected PrefixTreeBlockMeta blockMetaWriter;
|
||||
protected byte[] outputBytes;
|
||||
protected ByteBuffer buffer;
|
||||
protected ByteBuff buffer;
|
||||
protected ByteArrayInputStream is;
|
||||
protected PrefixTreeBlockMeta blockMetaReader;
|
||||
protected byte[] inputBytes;
|
||||
|
@ -93,13 +96,16 @@ public class TestRowEncoder {
|
|||
outputBytes = os.toByteArray();
|
||||
|
||||
// start reading, but save the assertions for @Test methods
|
||||
buffer = ByteBuffer.wrap(outputBytes);
|
||||
ByteBuffer out = ByteBuffer.allocateDirect(outputBytes.length);
|
||||
ByteBufferUtils.copyFromArrayToBuffer(out, outputBytes, 0, outputBytes.length);
|
||||
out.position(0);
|
||||
buffer = new SingleByteBuff(out);
|
||||
blockMetaReader = new PrefixTreeBlockMeta(buffer);
|
||||
|
||||
searcher = new PrefixTreeArraySearcher(blockMetaReader, blockMetaReader.getRowTreeDepth(),
|
||||
blockMetaReader.getMaxRowLength(), blockMetaReader.getMaxQualifierLength(),
|
||||
blockMetaReader.getMaxTagsLength());
|
||||
searcher.initOnBlock(blockMetaReader, outputBytes, includeMemstoreTS);
|
||||
searcher.initOnBlock(blockMetaReader, buffer, includeMemstoreTS);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -19,8 +19,10 @@
|
|||
package org.apache.hadoop.hbase.codec.prefixtree.timestamp;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
|
||||
|
@ -62,7 +64,7 @@ public class TestTimestampEncoder {
|
|||
blockMeta.setTimestampFields(encoder);
|
||||
bytes = encoder.getByteArray();
|
||||
decoder = new TimestampDecoder();
|
||||
decoder.initOnBlock(blockMeta, bytes);
|
||||
decoder.initOnBlock(blockMeta, new SingleByteBuff(ByteBuffer.wrap(bytes)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -21,8 +21,10 @@ package org.apache.hadoop.hbase.util.vint;
|
|||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Assert;
|
||||
|
@ -74,7 +76,8 @@ public class TestVIntTool {
|
|||
|
||||
@Test
|
||||
public void testFromBytes() {
|
||||
Assert.assertEquals(Integer.MAX_VALUE, UVIntTool.getInt(UVIntTool.MAX_VALUE_BYTES));
|
||||
Assert.assertEquals(Integer.MAX_VALUE,
|
||||
UVIntTool.getInt(new SingleByteBuff(ByteBuffer.wrap(UVIntTool.MAX_VALUE_BYTES)), 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -83,7 +86,7 @@ public class TestVIntTool {
|
|||
for (int i = 0; i < 10000; ++i) {
|
||||
int value = random.nextInt(Integer.MAX_VALUE);
|
||||
byte[] bytes = UVIntTool.getBytes(value);
|
||||
int roundTripped = UVIntTool.getInt(bytes);
|
||||
int roundTripped = UVIntTool.getInt(new SingleByteBuff(ByteBuffer.wrap(bytes)), 0);
|
||||
Assert.assertEquals(value, roundTripped);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,8 +20,10 @@ package org.apache.hadoop.hbase.util.vint;
|
|||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.hbase.nio.SingleByteBuff;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.number.RandomNumberUtils;
|
||||
|
@ -62,13 +64,14 @@ public class TestVLongTool {
|
|||
|
||||
@Test
|
||||
public void testFromBytesOffset() {
|
||||
Assert.assertEquals(Long.MAX_VALUE, UVLongTool.getLong(UVLongTool.MAX_VALUE_BYTES, 0));
|
||||
Assert.assertEquals(Long.MAX_VALUE,
|
||||
UVLongTool.getLong(new SingleByteBuff(ByteBuffer.wrap(UVLongTool.MAX_VALUE_BYTES)), 0));
|
||||
|
||||
long ms = 1318966363481L;
|
||||
// System.out.println(ms);
|
||||
byte[] bytes = UVLongTool.getBytes(ms);
|
||||
// System.out.println(Arrays.toString(bytes));
|
||||
long roundTripped = UVLongTool.getLong(bytes, 0);
|
||||
long roundTripped = UVLongTool.getLong(new SingleByteBuff(ByteBuffer.wrap(bytes)), 0);
|
||||
Assert.assertEquals(ms, roundTripped);
|
||||
|
||||
int calculatedNumBytes = UVLongTool.numBytes(ms);
|
||||
|
@ -78,7 +81,8 @@ public class TestVLongTool {
|
|||
byte[] shiftedBytes = new byte[1000];
|
||||
int shift = 33;
|
||||
System.arraycopy(bytes, 0, shiftedBytes, shift, bytes.length);
|
||||
long shiftedRoundTrip = UVLongTool.getLong(shiftedBytes, shift);
|
||||
long shiftedRoundTrip =
|
||||
UVLongTool.getLong(new SingleByteBuff(ByteBuffer.wrap(shiftedBytes)), shift);
|
||||
Assert.assertEquals(ms, shiftedRoundTrip);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue