From d81fba59cfab5ed368fe888ff811a7f5064b18cc Mon Sep 17 00:00:00 2001 From: ramkrishna Date: Fri, 18 Sep 2015 10:44:02 +0530 Subject: [PATCH] HBASE-12298 Support BB usage in PrefixTree (Ram) --- .../org/apache/hadoop/hbase/nio/ByteBuff.java | 60 +++++ .../hadoop/hbase/nio/MultiByteBuff.java | 19 ++ .../hadoop/hbase/nio/SingleByteBuff.java | 5 + .../hadoop/hbase/nio/TestMultiByteBuff.java | 28 ++ .../codec/prefixtree/PrefixTreeBlockMeta.java | 101 +++---- .../codec/prefixtree/PrefixTreeCodec.java | 9 +- .../codec/prefixtree/PrefixTreeSeeker.java | 249 +++++++++++++++++- .../prefixtree/decode/ArraySearcherPool.java | 4 +- .../prefixtree/decode/DecoderFactory.java | 17 +- .../decode/PrefixTreeArrayScanner.java | 10 +- .../prefixtree/decode/PrefixTreeCell.java | 107 ++++++-- .../decode/column/ColumnNodeReader.java | 7 +- .../decode/column/ColumnReader.java | 3 +- .../prefixtree/decode/row/RowNodeReader.java | 24 +- .../decode/timestamp/MvccVersionDecoder.java | 5 +- .../decode/timestamp/TimestampDecoder.java | 5 +- .../prefixtree/encode/row/RowNodeWriter.java | 2 +- .../hadoop/hbase/util/vint/UFIntTool.java | 7 +- .../hadoop/hbase/util/vint/UVIntTool.java | 9 +- .../hadoop/hbase/util/vint/UVLongTool.java | 9 +- .../prefixtree/blockmeta/TestBlockMeta.java | 3 +- .../prefixtree/column/TestColumnBuilder.java | 4 +- .../row/TestPrefixTreeSearcher.java | 10 +- .../codec/prefixtree/row/TestRowEncoder.java | 12 +- .../timestamp/TestTimestampEncoder.java | 4 +- .../hadoop/hbase/util/vint/TestVIntTool.java | 7 +- .../hadoop/hbase/util/vint/TestVLongTool.java | 10 +- 27 files changed, 570 insertions(+), 160 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java index f2856791725..1e0e957d474 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java @@ -207,6 +207,16 @@ public abstract class ByteBuff { */ public abstract void get(byte[] dst, int offset, int length); + /** + * Copies the specified number of bytes from this ByteBuff's given position to + * the byte[]'s offset. The position of the ByteBuff remains in the current position only + * @param sourceOffset the offset in this ByteBuff from where the copy should happen + * @param dst the byte[] to which the ByteBuff's content is to be copied + * @param offset within the current array + * @param length upto which the bytes to be copied + */ + public abstract void get(int sourceOffset, byte[] dst, int offset, int length); + /** * Copies the content from this ByteBuff's current position to the byte array and fills it. Also * advances the position of the ByteBuff by the length of the byte[]. @@ -454,4 +464,54 @@ public abstract class ByteBuff { } return (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i); } + + /** + * Search sorted array "a" for byte "key". + * + * @param a Array to search. Entries must be sorted and unique. + * @param fromIndex First index inclusive of "a" to include in the search. + * @param toIndex Last index exclusive of "a" to include in the search. + * @param key The byte to search for. + * @return The index of key if found. If not found, return -(index + 1), where + * negative indicates "not found" and the "index + 1" handles the "-0" + * case. + */ + public static int unsignedBinarySearch(ByteBuff a, int fromIndex, int toIndex, byte key) { + int unsignedKey = key & 0xff; + int low = fromIndex; + int high = toIndex - 1; + + while (low <= high) { + int mid = (low + high) >>> 1; + int midVal = a.get(mid) & 0xff; + + if (midVal < unsignedKey) { + low = mid + 1; + } else if (midVal > unsignedKey) { + high = mid - 1; + } else { + return mid; // key found + } + } + return -(low + 1); // key not found. + } + + public static String toStringBinary(final ByteBuff b, int off, int len) { + StringBuilder result = new StringBuilder(); + // Just in case we are passed a 'len' that is > buffer length... + if (off >= b.capacity()) + return result.toString(); + if (off + len > b.capacity()) + len = b.capacity() - off; + for (int i = off; i < off + len; ++i) { + int ch = b.get(i) & 0xFF; + if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z') + || " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) { + result.append((char) ch); + } else { + result.append(String.format("\\x%02X", ch)); + } + } + return result.toString(); + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java index 4c5e698fc3c..06652b82a4f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java @@ -613,6 +613,25 @@ public class MultiByteBuff extends ByteBuff { } } + @Override + public void get(int sourceOffset, byte[] dst, int offset, int length) { + int itemIndex = getItemIndex(sourceOffset); + ByteBuffer item = this.items[itemIndex]; + sourceOffset = sourceOffset - this.itemBeginPos[itemIndex]; + while (length > 0) { + int toRead = Math.min((item.limit() - sourceOffset), length); + ByteBufferUtils.copyFromBufferToArray(dst, item, sourceOffset, offset, + toRead); + length -= toRead; + if (length == 0) + break; + itemIndex++; + item = this.items[itemIndex]; + offset += toRead; + sourceOffset = 0; + } + } + /** * Marks the limit of this MBB. * @param limit diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java index 05b3f54bcc2..dbda678c564 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java @@ -160,6 +160,11 @@ public class SingleByteBuff extends ByteBuff { buf.position(buf.position() + length); } + @Override + public void get(int sourceOffset, byte[] dst, int offset, int length) { + ByteBufferUtils.copyFromBufferToArray(dst, buf, sourceOffset, offset, length); + } + @Override public void get(byte[] dst) { get(dst, 0, dst.length); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java index 4983120dc6b..193fcff4ece 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java @@ -314,4 +314,32 @@ public class TestMultiByteBuff { int intRes = mbb1.getIntAfterPosition(1); assertEquals(3, intRes); } + + @Test + public void testPositonalCopyToByteArray() throws Exception { + byte[] b = new byte[4]; + byte[] b1 = new byte[8]; + ByteBuffer bb1 = ByteBuffer.wrap(b); + ByteBuffer bb2 = ByteBuffer.wrap(b1); + MultiByteBuff mbb1 = new MultiByteBuff(bb1, bb2); + mbb1.position(2); + mbb1.putInt(4); + mbb1.position(7); + mbb1.put((byte) 2); + mbb1.putInt(3); + byte[] dst = new byte[4]; + mbb1.get(2, dst, 0, 4); + assertEquals(4, Bytes.toInt(dst)); + assertEquals(12, mbb1.position()); + mbb1.position(1); + dst = new byte[4]; + mbb1.get(8, dst, 0, 4); + assertEquals(3, Bytes.toInt(dst)); + assertEquals(1, mbb1.position()); + mbb1.position(12); + dst = new byte[1]; + mbb1.get(7, dst, 0, 1); + assertEquals(2, dst[0]); + assertEquals(12, mbb1.position()); + } } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java index 8410cf3653d..4705452a35f 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java @@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.codec.prefixtree; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder; +import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.vint.UVIntTool; import org.apache.hadoop.hbase.util.vint.UVLongTool; @@ -56,8 +56,6 @@ public class PrefixTreeBlockMeta { /**************** transient fields *********************/ - - protected int arrayOffset; protected int bufferOffset; @@ -116,7 +114,6 @@ public class PrefixTreeBlockMeta { public PrefixTreeBlockMeta(InputStream is) throws IOException{ this.version = VERSION; - this.arrayOffset = 0; this.bufferOffset = 0; readVariableBytesFromInputStream(is); } @@ -124,14 +121,13 @@ public class PrefixTreeBlockMeta { /** * @param buffer positioned at start of PtBlockMeta */ - public PrefixTreeBlockMeta(ByteBuffer buffer) { + public PrefixTreeBlockMeta(ByteBuff buffer) { initOnBlock(buffer); } - public void initOnBlock(ByteBuffer buffer) { - arrayOffset = buffer.arrayOffset(); + public void initOnBlock(ByteBuff buffer) { bufferOffset = buffer.position(); - readVariableBytesFromArray(buffer.array(), arrayOffset + bufferOffset); + readVariableBytesFromBuffer(buffer, bufferOffset); } @@ -263,79 +259,79 @@ public class PrefixTreeBlockMeta { numUniqueTags = UVIntTool.getInt(is); } - public void readVariableBytesFromArray(byte[] bytes, int offset) { + public void readVariableBytesFromBuffer(ByteBuff buf, int offset) { int position = offset; - version = UVIntTool.getInt(bytes, position); + version = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(version); - numMetaBytes = UVIntTool.getInt(bytes, position); + numMetaBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numMetaBytes); - numKeyValueBytes = UVIntTool.getInt(bytes, position); + numKeyValueBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numKeyValueBytes); - setIncludesMvccVersion(bytes[position]); + setIncludesMvccVersion(buf.get(position)); ++position; - numRowBytes = UVIntTool.getInt(bytes, position); + numRowBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numRowBytes); - numFamilyBytes = UVIntTool.getInt(bytes, position); + numFamilyBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numFamilyBytes); - numQualifierBytes = UVIntTool.getInt(bytes, position); + numQualifierBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numQualifierBytes); - numTagsBytes = UVIntTool.getInt(bytes, position); + numTagsBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numTagsBytes); - numTimestampBytes = UVIntTool.getInt(bytes, position); + numTimestampBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numTimestampBytes); - numMvccVersionBytes = UVIntTool.getInt(bytes, position); + numMvccVersionBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numMvccVersionBytes); - numValueBytes = UVIntTool.getInt(bytes, position); + numValueBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numValueBytes); - nextNodeOffsetWidth = UVIntTool.getInt(bytes, position); + nextNodeOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(nextNodeOffsetWidth); - familyOffsetWidth = UVIntTool.getInt(bytes, position); + familyOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(familyOffsetWidth); - qualifierOffsetWidth = UVIntTool.getInt(bytes, position); + qualifierOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(qualifierOffsetWidth); - tagsOffsetWidth = UVIntTool.getInt(bytes, position); + tagsOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(tagsOffsetWidth); - timestampIndexWidth = UVIntTool.getInt(bytes, position); + timestampIndexWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(timestampIndexWidth); - mvccVersionIndexWidth = UVIntTool.getInt(bytes, position); + mvccVersionIndexWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(mvccVersionIndexWidth); - valueOffsetWidth = UVIntTool.getInt(bytes, position); + valueOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(valueOffsetWidth); - valueLengthWidth = UVIntTool.getInt(bytes, position); + valueLengthWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(valueLengthWidth); - rowTreeDepth = UVIntTool.getInt(bytes, position); + rowTreeDepth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(rowTreeDepth); - maxRowLength = UVIntTool.getInt(bytes, position); + maxRowLength = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(maxRowLength); - maxQualifierLength = UVIntTool.getInt(bytes, position); + maxQualifierLength = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(maxQualifierLength); - maxTagsLength = UVIntTool.getInt(bytes, position); + maxTagsLength = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(maxTagsLength); - minTimestamp = UVLongTool.getLong(bytes, position); + minTimestamp = UVLongTool.getLong(buf, position); position += UVLongTool.numBytes(minTimestamp); - timestampDeltaWidth = UVIntTool.getInt(bytes, position); + timestampDeltaWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(timestampDeltaWidth); - minMvccVersion = UVLongTool.getLong(bytes, position); + minMvccVersion = UVLongTool.getLong(buf, position); position += UVLongTool.numBytes(minMvccVersion); - mvccVersionDeltaWidth = UVIntTool.getInt(bytes, position); + mvccVersionDeltaWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(mvccVersionDeltaWidth); - setAllSameType(bytes[position]); + setAllSameType(buf.get(position)); ++position; - allTypes = bytes[position]; + allTypes = buf.get(position); ++position; - numUniqueRows = UVIntTool.getInt(bytes, position); + numUniqueRows = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numUniqueRows); - numUniqueFamilies = UVIntTool.getInt(bytes, position); + numUniqueFamilies = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numUniqueFamilies); - numUniqueQualifiers = UVIntTool.getInt(bytes, position); + numUniqueQualifiers = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numUniqueQualifiers); - numUniqueTags = UVIntTool.getInt(bytes, position); + numUniqueTags = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numUniqueTags); } @@ -405,8 +401,6 @@ public class PrefixTreeBlockMeta { return false; if (allTypes != other.allTypes) return false; - if (arrayOffset != other.arrayOffset) - return false; if (bufferOffset != other.bufferOffset) return false; if (valueLengthWidth != other.valueLengthWidth) @@ -483,7 +477,6 @@ public class PrefixTreeBlockMeta { int result = 1; result = prime * result + (allSameType ? 1231 : 1237); result = prime * result + allTypes; - result = prime * result + arrayOffset; result = prime * result + bufferOffset; result = prime * result + valueLengthWidth; result = prime * result + valueOffsetWidth; @@ -525,9 +518,7 @@ public class PrefixTreeBlockMeta { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("PtBlockMeta [arrayOffset="); - builder.append(arrayOffset); - builder.append(", bufferOffset="); + builder.append("PtBlockMeta [bufferOffset="); builder.append(bufferOffset); builder.append(", version="); builder.append(version); @@ -602,12 +593,8 @@ public class PrefixTreeBlockMeta { /************** absolute getters *******************/ - public int getAbsoluteMetaOffset() { - return arrayOffset + bufferOffset; - } - public int getAbsoluteRowOffset() { - return getAbsoluteMetaOffset() + numMetaBytes; + return getBufferOffset() + numMetaBytes; } public int getAbsoluteFamilyOffset() { @@ -749,14 +736,6 @@ public class PrefixTreeBlockMeta { this.numMetaBytes = numMetaBytes; } - public int getArrayOffset() { - return arrayOffset; - } - - public void setArrayOffset(int arrayOffset) { - this.arrayOffset = arrayOffset; - } - public int getBufferOffset() { return bufferOffset; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java index 29f481145d9..1efee969de9 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.io.WritableUtils; @@ -83,7 +84,7 @@ public class PrefixTreeCodec implements DataBlockEncoder { int skipLastBytes, HFileBlockDecodingContext decodingCtx) throws IOException { ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);// waste sourceAsBuffer.mark(); - PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(sourceAsBuffer); + PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(new SingleByteBuff(sourceAsBuffer)); sourceAsBuffer.rewind(); int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes(); byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader]; @@ -92,7 +93,7 @@ public class PrefixTreeCodec implements DataBlockEncoder { CellSearcher searcher = null; try { boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc(); - searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvcc); + searcher = DecoderFactory.checkOut(new SingleByteBuff(sourceAsBuffer), includesMvcc); while (searcher.advance()) { KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current()); // needs to be modified for DirectByteBuffers. no existing methods to @@ -121,9 +122,7 @@ public class PrefixTreeCodec implements DataBlockEncoder { PrefixTreeArraySearcher searcher = null; try { // should i includeMemstoreTS (second argument)? i think PrefixKeyDeltaEncoder is, so i will - // TODO : Change to work with BBs - searcher = DecoderFactory.checkOut(block.asSubByteBuffer(block.limit() - block.position()), - true); + searcher = DecoderFactory.checkOut(block, true); if (!searcher.positionAtFirstCell()) { return null; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java index d77bb24851e..b24918b5058 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree; import java.nio.ByteBuffer; +import org.apache.hadoop.hbase.ByteBufferedCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; @@ -33,6 +34,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker; import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -57,9 +59,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { @Override public void setCurrentBuffer(ByteBuff fullBlockBuffer) { - block = fullBlockBuffer.asSubByteBuffer(fullBlockBuffer.limit()); - // TODO : change to Bytebuff - ptSearcher = DecoderFactory.checkOut(block, includeMvccVersion); + ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion); rewind(); } @@ -99,16 +99,29 @@ public class PrefixTreeSeeker implements EncodedSeeker { */ @Override public Cell getCell() { - Cell cell = ptSearcher.current(); + // The PrefixTreecell is of type BytebufferedCell and the value part of the cell + // determines whether we are offheap cell or onheap cell. All other parts of the cell- + // row, fam and col are all represented as onheap byte[] + ByteBufferedCell cell = (ByteBufferedCell)ptSearcher.current(); if (cell == null) { return null; } - return new ClonedPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), - cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), - cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), - cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(), - cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(), - cell.getSequenceId()); + // Use the ByteBuffered cell to see if the Cell is onheap or offheap + if (cell.getValueByteBuffer().hasArray()) { + return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), + cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), + cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), + cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(), + cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(), + cell.getSequenceId()); + } else { + return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), + cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), + cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), + cell.getValueByteBuffer(), cell.getValuePositionInByteBuffer(), cell.getValueLength(), + cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), + cell.getTypeByte(), cell.getSequenceId()); + } } /** @@ -208,12 +221,13 @@ public class PrefixTreeSeeker implements EncodedSeeker { return comparator.compare(key, ptSearcher.current()); } + /** * Cloned version of the PrefixTreeCell where except the value part, the rest * of the key part is deep copied * */ - private static class ClonedPrefixTreeCell implements Cell, SettableSequenceId, HeapSize { + private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize { private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT) + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY)); @@ -232,7 +246,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { private long seqId; private byte type; - public ClonedPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam, + public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam, int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val, int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type, long seqId) { @@ -367,4 +381,215 @@ public class PrefixTreeSeeker implements EncodedSeeker { return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength; } } + + private static class OffheapPrefixTreeCell extends ByteBufferedCell implements Cell, + SettableSequenceId, HeapSize { + private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT) + + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER)); + private ByteBuffer rowBuff; + private short rowLength; + private ByteBuffer famBuff; + private byte famLength; + private ByteBuffer qualBuff; + private int qualLength; + private ByteBuffer val; + private int valOffset; + private int valLength; + private ByteBuffer tagBuff; + private int tagsLength; + private long ts; + private long seqId; + private byte type; + public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam, + int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val, + int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type, + long seqId) { + byte[] tmpRow = new byte[rowLength]; + System.arraycopy(row, rowOffset, tmpRow, 0, rowLength); + this.rowBuff = ByteBuffer.wrap(tmpRow); + this.rowLength = rowLength; + byte[] tmpFam = new byte[famLength]; + System.arraycopy(fam, famOffset, tmpFam, 0, famLength); + this.famBuff = ByteBuffer.wrap(tmpFam); + this.famLength = famLength; + byte[] tmpQual = new byte[qualLength]; + System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength); + this.qualBuff = ByteBuffer.wrap(tmpQual); + this.qualLength = qualLength; + byte[] tmpTag = new byte[tagLength]; + System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength); + this.tagBuff = ByteBuffer.wrap(tmpTag); + this.tagsLength = tagLength; + this.val = val; + this.valLength = valLength; + this.valOffset = valOffset; + this.ts = ts; + this.seqId = seqId; + this.type = type; + } + + @Override + public void setSequenceId(long seqId) { + this.seqId = seqId; + } + + @Override + public byte[] getRowArray() { + return this.rowBuff.array(); + } + + @Override + public int getRowOffset() { + return getRowPositionInByteBuffer(); + } + + @Override + public short getRowLength() { + return this.rowLength; + } + + @Override + public byte[] getFamilyArray() { + return this.famBuff.array(); + } + + @Override + public int getFamilyOffset() { + return getFamilyPositionInByteBuffer(); + } + + @Override + public byte getFamilyLength() { + return this.famLength; + } + + @Override + public byte[] getQualifierArray() { + return this.qualBuff.array(); + } + + @Override + public int getQualifierOffset() { + return getQualifierPositionInByteBuffer(); + } + + @Override + public int getQualifierLength() { + return this.qualLength; + } + + @Override + public long getTimestamp() { + return ts; + } + + @Override + public byte getTypeByte() { + return type; + } + + @Override + public long getSequenceId() { + return seqId; + } + + @Override + public byte[] getValueArray() { + byte[] tmpVal = new byte[valLength]; + ByteBufferUtils.copyFromBufferToArray(tmpVal, val, valOffset, 0, valLength); + return tmpVal; + } + + @Override + public int getValueOffset() { + return 0; + } + + @Override + public int getValueLength() { + return this.valLength; + } + + @Override + public byte[] getTagsArray() { + return this.tagBuff.array(); + } + + @Override + public int getTagsOffset() { + return getTagsPositionInByteBuffer(); + } + + @Override + public int getTagsLength() { + return this.tagsLength; + } + + @Override + public ByteBuffer getRowByteBuffer() { + return this.rowBuff; + } + + @Override + public int getRowPositionInByteBuffer() { + return 0; + } + + @Override + public ByteBuffer getFamilyByteBuffer() { + return this.famBuff; + } + + @Override + public int getFamilyPositionInByteBuffer() { + return 0; + } + + @Override + public ByteBuffer getQualifierByteBuffer() { + return this.qualBuff; + } + + @Override + public int getQualifierPositionInByteBuffer() { + return 0; + } + + @Override + public ByteBuffer getTagsByteBuffer() { + return this.tagBuff; + } + + @Override + public int getTagsPositionInByteBuffer() { + return 0; + } + + @Override + public ByteBuffer getValueByteBuffer() { + return this.val; + } + + @Override + public int getValuePositionInByteBuffer() { + return this.valOffset; + } + + @Override + public long heapSize() { + return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength; + } + + @Override + public String toString() { + String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength()); + String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength()); + String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(), + getQualifierLength()); + String timestamp = String.valueOf((getTimestamp())); + return row + "/" + family + (family != null && family.length() > 0 ? ":" : "") + qualifier + + "/" + timestamp + "/" + Type.codeToType(type); + } + } } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java index f0b249f531d..e6df88a9390 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode; -import java.nio.ByteBuffer; import java.util.Queue; import java.util.concurrent.LinkedBlockingQueue; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.nio.ByteBuff; /** *

@@ -45,7 +45,7 @@ public class ArraySearcherPool { protected Queue pool = new LinkedBlockingQueue(MAX_POOL_SIZE); - public PrefixTreeArraySearcher checkOut(ByteBuffer buffer, boolean includesMvccVersion) { + public PrefixTreeArraySearcher checkOut(ByteBuff buffer, boolean includesMvccVersion) { PrefixTreeArraySearcher searcher = pool.poll();//will return null if pool is empty searcher = DecoderFactory.ensureArraySearcherValid(buffer, searcher, includesMvccVersion); return searcher; diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java index f8f7c99ceb8..6c28143bf03 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java @@ -18,28 +18,21 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode; -import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; - +import org.apache.hadoop.hbase.nio.ByteBuff; /** * Static wrapper class for the ArraySearcherPool. */ @InterfaceAudience.Private public class DecoderFactory { - private static final ArraySearcherPool POOL = new ArraySearcherPool(); //TODO will need a PrefixTreeSearcher on top of CellSearcher - public static PrefixTreeArraySearcher checkOut(final ByteBuffer buffer, + public static PrefixTreeArraySearcher checkOut(final ByteBuff buffer, boolean includeMvccVersion) { - if (buffer.isDirect()) { - throw new IllegalArgumentException("DirectByteBuffers not supported yet"); - // TODO implement PtByteBufferBlockScanner - } - PrefixTreeArraySearcher searcher = POOL.checkOut(buffer, includeMvccVersion); return searcher; @@ -59,14 +52,14 @@ public class DecoderFactory { /**************************** helper ******************************/ - public static PrefixTreeArraySearcher ensureArraySearcherValid(ByteBuffer buffer, + public static PrefixTreeArraySearcher ensureArraySearcherValid(ByteBuff buffer, PrefixTreeArraySearcher searcher, boolean includeMvccVersion) { if (searcher == null) { PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(buffer); searcher = new PrefixTreeArraySearcher(blockMeta, blockMeta.getRowTreeDepth(), blockMeta.getMaxRowLength(), blockMeta.getMaxQualifierLength(), blockMeta.getMaxTagsLength()); - searcher.initOnBlock(blockMeta, buffer.array(), includeMvccVersion); + searcher.initOnBlock(blockMeta, buffer, includeMvccVersion); return searcher; } @@ -83,7 +76,7 @@ public class DecoderFactory { qualifierBufferLength, tagBufferLength); } //this is where we parse the BlockMeta - searcher.initOnBlock(blockMeta, buffer.array(), includeMvccVersion); + searcher.initOnBlock(blockMeta, buffer, includeMvccVersion); return searcher; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java index 1e91eb2149b..3f271fe8261 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.decode.row.RowNodeReader; import org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp.MvccVersionDecoder; import org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp.TimestampDecoder; import org.apache.hadoop.hbase.codec.prefixtree.encode.other.ColumnNodeType; +import org.apache.hadoop.hbase.nio.ByteBuff; /** * Extends PtCell and manipulates its protected fields. Could alternatively contain a PtCell and @@ -103,7 +104,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne return true; } - public void initOnBlock(PrefixTreeBlockMeta blockMeta, byte[] block, + public void initOnBlock(PrefixTreeBlockMeta blockMeta, ByteBuff block, boolean includeMvccVersion) { this.block = block; this.blockMeta = blockMeta; @@ -358,7 +359,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne /***************** helper methods **************************/ protected void appendCurrentTokenToRowBuffer() { - System.arraycopy(block, currentRowNode.getTokenArrayOffset(), rowBuffer, rowLength, + block.get(currentRowNode.getTokenArrayOffset(), rowBuffer, rowLength, currentRowNode.getTokenLength()); rowLength += currentRowNode.getTokenLength(); } @@ -498,14 +499,11 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne int offsetIntoValueSection = currentRowNode.getValueOffset(currentCellIndex, blockMeta); absoluteValueOffset = blockMeta.getAbsoluteValueOffset() + offsetIntoValueSection; valueLength = currentRowNode.getValueLength(currentCellIndex, blockMeta); + this.block.asSubByteBuffer(this.absoluteValueOffset, valueLength, pair); } /**************** getters ***************************/ - public byte[] getTreeBytes() { - return block; - } - public PrefixTreeBlockMeta getBlockMeta() { return blockMeta; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java index 82d1d7eef6c..7f3418f37ef 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java @@ -18,6 +18,9 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode; + +import java.nio.ByteBuffer; +import org.apache.hadoop.hbase.ByteBufferedCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; @@ -25,16 +28,20 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.SettableSequenceId; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ObjectIntPair; /** - * As the PrefixTreeArrayScanner moves through the tree bytes, it changes the values in the fields - * of this class so that Cell logic can be applied, but without allocating new memory for every Cell - * iterated through. + * As the PrefixTreeArrayScanner moves through the tree bytes, it changes the + * values in the fields of this class so that Cell logic can be applied, but + * without allocating new memory for every Cell iterated through. */ @InterfaceAudience.Private -public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable { - // Create a reference here? Can be removed too +public class PrefixTreeCell extends ByteBufferedCell implements SettableSequenceId, + Comparable { + // Create a reference here? Can be removed too protected CellComparator comparator = CellComparator.COMPARATOR; /********************** static **********************/ @@ -46,13 +53,15 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable pair = new ObjectIntPair(); /********************** Cell methods ******************/ /** - * For debugging. Currently creates new KeyValue to utilize its toString() method. + * For debugging. Currently creates new KeyValue to utilize its toString() + * method. */ @Override public String toString() { @@ -93,10 +105,10 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable= 0) {// found it, but need to adjust for position of fan in overall block return fanIndexInBlock - fanOffset; @@ -269,8 +269,8 @@ public class RowNodeReader { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("fan:" + Bytes.toStringBinary(block, fanOffset, fanOut)); - sb.append(",token:" + Bytes.toStringBinary(block, tokenOffset, tokenLength)); + sb.append("fan:" + ByteBuff.toStringBinary(block, fanOffset, fanOut)); + sb.append(",token:" + ByteBuff.toStringBinary(block, tokenOffset, tokenLength)); sb.append(",numCells:" + numCells); sb.append(",fanIndex:"+fanIndex); if(fanIndex>=0){ diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/timestamp/MvccVersionDecoder.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/timestamp/MvccVersionDecoder.java index 8d51e921035..a7ff88e8155 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/timestamp/MvccVersionDecoder.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/timestamp/MvccVersionDecoder.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; +import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.vint.UFIntTool; /** @@ -29,7 +30,7 @@ import org.apache.hadoop.hbase.util.vint.UFIntTool; public class MvccVersionDecoder { protected PrefixTreeBlockMeta blockMeta; - protected byte[] block; + protected ByteBuff block; /************** construct ***********************/ @@ -37,7 +38,7 @@ public class MvccVersionDecoder { public MvccVersionDecoder() { } - public void initOnBlock(PrefixTreeBlockMeta blockMeta, byte[] block) { + public void initOnBlock(PrefixTreeBlockMeta blockMeta, ByteBuff block) { this.block = block; this.blockMeta = blockMeta; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/timestamp/TimestampDecoder.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/timestamp/TimestampDecoder.java index 47a975a633c..d1c018b1102 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/timestamp/TimestampDecoder.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/timestamp/TimestampDecoder.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode.timestamp; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; +import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.vint.UFIntTool; /** @@ -29,7 +30,7 @@ import org.apache.hadoop.hbase.util.vint.UFIntTool; public class TimestampDecoder { protected PrefixTreeBlockMeta blockMeta; - protected byte[] block; + protected ByteBuff block; /************** construct ***********************/ @@ -37,7 +38,7 @@ public class TimestampDecoder { public TimestampDecoder() { } - public void initOnBlock(PrefixTreeBlockMeta blockMeta, byte[] block) { + public void initOnBlock(PrefixTreeBlockMeta blockMeta, ByteBuff block) { this.block = block; this.blockMeta = blockMeta; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowNodeWriter.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowNodeWriter.java index 35f264b6d27..ee5db35dc3f 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowNodeWriter.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowNodeWriter.java @@ -114,7 +114,7 @@ public class RowNodeWriter{ width += numCells * fixedBytesPerCell; } - if( ! tokenizerNode.isLeaf()){ + if (!tokenizerNode.isLeaf()) { width += fanOut * offsetWidth; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java index a3da9f0129e..23c5c344e47 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.io.OutputStream; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.nio.ByteBuff; /** * UFInt is an abbreviation for Unsigned Fixed-width Integer. @@ -103,12 +104,12 @@ public class UFIntTool { return value; } - public static long fromBytes(final byte[] bytes, final int offset, final int width) { + public static long fromBytes(final ByteBuff buf, final int offset, final int width) { long value = 0; - value |= bytes[0 + offset] & 0xff;// these seem to do ok without casting the byte to int + value |= buf.get(offset + 0) & 0xff;// these seem to do ok without casting the byte to int for (int i = 1; i < width; ++i) { value <<= 8; - value |= bytes[i + offset] & 0xff; + value |= buf.get(i + offset) & 0xff; } return value; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UVIntTool.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UVIntTool.java index aeebd2c7fc7..ea1c30043d9 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UVIntTool.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UVIntTool.java @@ -23,6 +23,7 @@ import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.nio.ByteBuff; /** * Simple Variable Length Integer encoding. Left bit of 0 means we are on the last byte. If left @@ -81,14 +82,10 @@ public class UVIntTool { /******************** bytes -> int **************************/ - public static int getInt(byte[] bytes) { - return getInt(bytes, 0); - } - - public static int getInt(byte[] bytes, int offset) { + public static int getInt(ByteBuff buffer, int offset) { int value = 0; for (int i = 0;; ++i) { - byte b = bytes[offset + i]; + byte b = buffer.get(offset + i); int shifted = BYTE_7_RIGHT_BITS_SET & b;// kill leftmost bit shifted <<= 7 * i; value |= shifted; diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UVLongTool.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UVLongTool.java index b55e0f6f73b..650643d523a 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UVLongTool.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UVLongTool.java @@ -21,8 +21,11 @@ package org.apache.hadoop.hbase.util.vint; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.hadoop.hbase.nio.SingleByteBuff; /** * Simple Variable Length Integer encoding. Left bit of 0 means we are on the last byte. If left @@ -80,13 +83,13 @@ public class UVLongTool{ /******************** bytes -> long **************************/ public static long getLong(byte[] bytes) { - return getLong(bytes, 0); + return getLong(new SingleByteBuff(ByteBuffer.wrap(bytes)), 0); } - public static long getLong(byte[] bytes, int offset) { + public static long getLong(ByteBuff buf, int offset) { long value = 0; for (int i = 0;; ++i) { - byte b = bytes[offset + i]; + byte b = buf.get(offset + i); long shifted = BYTE_7_RIGHT_BITS_SET & b;// kill leftmost bit shifted <<= 7 * i; value |= shifted; diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/blockmeta/TestBlockMeta.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/blockmeta/TestBlockMeta.java index 6bf14bf3a3e..24931476a90 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/blockmeta/TestBlockMeta.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/blockmeta/TestBlockMeta.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; @@ -83,7 +84,7 @@ public class TestBlockMeta { ByteArrayOutputStream os = new ByteArrayOutputStream(10000); original.writeVariableBytesToOutputStream(os); ByteBuffer buffer = ByteBuffer.wrap(os.toByteArray()); - PrefixTreeBlockMeta roundTripped = new PrefixTreeBlockMeta(buffer); + PrefixTreeBlockMeta roundTripped = new PrefixTreeBlockMeta(new SingleByteBuff(buffer)); Assert.assertTrue(original.equals(roundTripped)); } diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java index c33a9535950..49347670d34 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java @@ -20,9 +20,11 @@ package org.apache.hadoop.hbase.codec.prefixtree.column; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; +import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; @@ -103,7 +105,7 @@ public class TestColumnBuilder { bytes = baos.toByteArray(); buffer = new byte[blockMeta.getMaxQualifierLength()]; reader = new ColumnReader(buffer, ColumnNodeType.QUALIFIER); - reader.initOnBlock(blockMeta, bytes); + reader.initOnBlock(blockMeta, new SingleByteBuff(ByteBuffer.wrap(bytes))); List builderNodes = Lists.newArrayList(); builder.appendNodes(builderNodes, true, true); diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java index afe30e2ab29..7a9862f6718 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java @@ -29,6 +29,8 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory; @@ -36,6 +38,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder; import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataSearchWithPrefix; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; +import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.CollectionUtils; import org.junit.Assert; import org.junit.Test; @@ -56,7 +59,7 @@ public class TestPrefixTreeSearcher { } protected TestRowData rows; - protected ByteBuffer block; + protected ByteBuff block; public TestPrefixTreeSearcher(TestRowData testRows) throws IOException { this.rows = testRows; @@ -67,7 +70,10 @@ public class TestPrefixTreeSearcher { } kvBuilder.flush(); byte[] outputBytes = os.toByteArray(); - this.block = ByteBuffer.wrap(outputBytes); + ByteBuffer out = ByteBuffer.allocateDirect(outputBytes.length); + ByteBufferUtils.copyFromArrayToBuffer(out, outputBytes, 0, outputBytes.length); + out.position(0); + this.block = new SingleByteBuff(out); } @Test diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java index 0c8caf4dce7..765d0396f62 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java @@ -28,11 +28,14 @@ import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher; import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder; +import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Before; @@ -67,7 +70,7 @@ public class TestRowEncoder { protected int totalBytes; protected PrefixTreeBlockMeta blockMetaWriter; protected byte[] outputBytes; - protected ByteBuffer buffer; + protected ByteBuff buffer; protected ByteArrayInputStream is; protected PrefixTreeBlockMeta blockMetaReader; protected byte[] inputBytes; @@ -93,13 +96,16 @@ public class TestRowEncoder { outputBytes = os.toByteArray(); // start reading, but save the assertions for @Test methods - buffer = ByteBuffer.wrap(outputBytes); + ByteBuffer out = ByteBuffer.allocateDirect(outputBytes.length); + ByteBufferUtils.copyFromArrayToBuffer(out, outputBytes, 0, outputBytes.length); + out.position(0); + buffer = new SingleByteBuff(out); blockMetaReader = new PrefixTreeBlockMeta(buffer); searcher = new PrefixTreeArraySearcher(blockMetaReader, blockMetaReader.getRowTreeDepth(), blockMetaReader.getMaxRowLength(), blockMetaReader.getMaxQualifierLength(), blockMetaReader.getMaxTagsLength()); - searcher.initOnBlock(blockMetaReader, outputBytes, includeMemstoreTS); + searcher.initOnBlock(blockMetaReader, buffer, includeMemstoreTS); } @Test diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampEncoder.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampEncoder.java index 65cbcc9e24c..1d0ad1b981e 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampEncoder.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampEncoder.java @@ -19,8 +19,10 @@ package org.apache.hadoop.hbase.codec.prefixtree.timestamp; import java.io.IOException; +import java.nio.ByteBuffer; import java.util.Collection; +import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; @@ -62,7 +64,7 @@ public class TestTimestampEncoder { blockMeta.setTimestampFields(encoder); bytes = encoder.getByteArray(); decoder = new TimestampDecoder(); - decoder.initOnBlock(blockMeta, bytes); + decoder.initOnBlock(blockMeta, new SingleByteBuff(ByteBuffer.wrap(bytes))); } @Test diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVIntTool.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVIntTool.java index b9cb372d5e7..9171619a1d0 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVIntTool.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVIntTool.java @@ -21,8 +21,10 @@ package org.apache.hadoop.hbase.util.vint; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.nio.ByteBuffer; import java.util.Random; +import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Assert; @@ -74,7 +76,8 @@ public class TestVIntTool { @Test public void testFromBytes() { - Assert.assertEquals(Integer.MAX_VALUE, UVIntTool.getInt(UVIntTool.MAX_VALUE_BYTES)); + Assert.assertEquals(Integer.MAX_VALUE, + UVIntTool.getInt(new SingleByteBuff(ByteBuffer.wrap(UVIntTool.MAX_VALUE_BYTES)), 0)); } @Test @@ -83,7 +86,7 @@ public class TestVIntTool { for (int i = 0; i < 10000; ++i) { int value = random.nextInt(Integer.MAX_VALUE); byte[] bytes = UVIntTool.getBytes(value); - int roundTripped = UVIntTool.getInt(bytes); + int roundTripped = UVIntTool.getInt(new SingleByteBuff(ByteBuffer.wrap(bytes)), 0); Assert.assertEquals(value, roundTripped); } } diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVLongTool.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVLongTool.java index ed637f677e9..247dee086b9 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVLongTool.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/util/vint/TestVLongTool.java @@ -20,8 +20,10 @@ package org.apache.hadoop.hbase.util.vint; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.nio.ByteBuffer; import java.util.Random; +import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.number.RandomNumberUtils; @@ -62,13 +64,14 @@ public class TestVLongTool { @Test public void testFromBytesOffset() { - Assert.assertEquals(Long.MAX_VALUE, UVLongTool.getLong(UVLongTool.MAX_VALUE_BYTES, 0)); + Assert.assertEquals(Long.MAX_VALUE, + UVLongTool.getLong(new SingleByteBuff(ByteBuffer.wrap(UVLongTool.MAX_VALUE_BYTES)), 0)); long ms = 1318966363481L; // System.out.println(ms); byte[] bytes = UVLongTool.getBytes(ms); // System.out.println(Arrays.toString(bytes)); - long roundTripped = UVLongTool.getLong(bytes, 0); + long roundTripped = UVLongTool.getLong(new SingleByteBuff(ByteBuffer.wrap(bytes)), 0); Assert.assertEquals(ms, roundTripped); int calculatedNumBytes = UVLongTool.numBytes(ms); @@ -78,7 +81,8 @@ public class TestVLongTool { byte[] shiftedBytes = new byte[1000]; int shift = 33; System.arraycopy(bytes, 0, shiftedBytes, shift, bytes.length); - long shiftedRoundTrip = UVLongTool.getLong(shiftedBytes, shift); + long shiftedRoundTrip = + UVLongTool.getLong(new SingleByteBuff(ByteBuffer.wrap(shiftedBytes)), shift); Assert.assertEquals(ms, shiftedRoundTrip); }