HBASE-9518 getFakedKey() improvement
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1524896 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4e600b8e0e
commit
ab5e4b98d0
|
@ -1960,34 +1960,25 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
|
|||
&& leftKey[ROW_LENGTH_SIZE + diffIdx] == rightKey[ROW_LENGTH_SIZE + diffIdx]) {
|
||||
diffIdx++;
|
||||
}
|
||||
byte[] newRowKey = null;
|
||||
if (diffIdx >= minLength) {
|
||||
// leftKey's row is prefix of rightKey's. we can optimize it in future
|
||||
return Arrays.copyOf(rightKey, rightKey.length);
|
||||
}
|
||||
int diffByte = leftKey[ROW_LENGTH_SIZE + diffIdx];
|
||||
if ((0xff & diffByte) < 0xff && (diffByte + 1) <
|
||||
(rightKey[ROW_LENGTH_SIZE + diffIdx] & 0xff)) {
|
||||
byte[] newRowKey = new byte[diffIdx + 1];
|
||||
System.arraycopy(leftKey, ROW_LENGTH_SIZE, newRowKey, 0, diffIdx);
|
||||
newRowKey[diffIdx] = (byte) (diffByte + 1);
|
||||
int rightFamilyLength = rightKey[rightCommonLength - 1];
|
||||
byte[] family = null;
|
||||
if (rightFamilyLength > 0) {
|
||||
family = new byte[rightFamilyLength];
|
||||
System.arraycopy(rightKey, rightCommonLength, family, 0, rightFamilyLength);
|
||||
// leftKey's row is prefix of rightKey's.
|
||||
newRowKey = new byte[diffIdx + 1];
|
||||
System.arraycopy(rightKey, ROW_LENGTH_SIZE, newRowKey, 0, diffIdx + 1);
|
||||
} else {
|
||||
int diffByte = leftKey[ROW_LENGTH_SIZE + diffIdx];
|
||||
if ((0xff & diffByte) < 0xff && (diffByte + 1) <
|
||||
(rightKey[ROW_LENGTH_SIZE + diffIdx] & 0xff)) {
|
||||
newRowKey = new byte[diffIdx + 1];
|
||||
System.arraycopy(leftKey, ROW_LENGTH_SIZE, newRowKey, 0, diffIdx);
|
||||
newRowKey[diffIdx] = (byte) (diffByte + 1);
|
||||
} else {
|
||||
newRowKey = new byte[diffIdx + 1];
|
||||
System.arraycopy(rightKey, ROW_LENGTH_SIZE, newRowKey, 0, diffIdx + 1);
|
||||
}
|
||||
int rightQualifierLength = rightColumnLength - rightFamilyLength;
|
||||
byte[] qualifier = null;
|
||||
if (rightQualifierLength > 0) {
|
||||
qualifier = new byte[rightQualifierLength];
|
||||
System.arraycopy(rightKey, rightCommonLength + rightFamilyLength, qualifier, 0,
|
||||
rightQualifierLength);
|
||||
}
|
||||
return new KeyValue(newRowKey, null, null, HConstants.LATEST_TIMESTAMP,
|
||||
Type.Maximum).getKey();
|
||||
}
|
||||
// the following is optimizable in future
|
||||
return Arrays.copyOf(rightKey, rightKey.length);
|
||||
return new KeyValue(newRowKey, null, null, HConstants.LATEST_TIMESTAMP,
|
||||
Type.Maximum).getKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -509,6 +509,21 @@ public class TestKeyValue extends TestCase {
|
|||
assertTrue(keyComparator.compareFlatKey(kv1.getKey(), kv2.getKey()) < 0);
|
||||
newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey());
|
||||
assertTrue(keyComparator.compareFlatKey(kv1.getKey(), newKey) < 0);
|
||||
assertTrue(keyComparator.compareFlatKey(newKey, kv2.getKey()) == 0);
|
||||
assertTrue(keyComparator.compareFlatKey(newKey, kv2.getKey()) < 0);
|
||||
newRowLength = Bytes.toShort(newKey, 0);
|
||||
expectedArray = Bytes.toBytes("ilovehbasea");
|
||||
Bytes.equals(newKey, KeyValue.ROW_LENGTH_SIZE, newRowLength, expectedArray, 0,
|
||||
expectedArray.length);
|
||||
//verify only 1 offset scenario
|
||||
kv1 = new KeyValue(Bytes.toBytes("100abcdefg"), family, qualA, ts, Type.Put);
|
||||
kv2 = new KeyValue(Bytes.toBytes("101abcdefg"), family, qualA, ts, Type.Put);
|
||||
assertTrue(keyComparator.compareFlatKey(kv1.getKey(), kv2.getKey()) < 0);
|
||||
newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey());
|
||||
assertTrue(keyComparator.compareFlatKey(kv1.getKey(), newKey) < 0);
|
||||
assertTrue(keyComparator.compareFlatKey(newKey, kv2.getKey()) < 0);
|
||||
newRowLength = Bytes.toShort(newKey, 0);
|
||||
expectedArray = Bytes.toBytes("101");
|
||||
Bytes.equals(newKey, KeyValue.ROW_LENGTH_SIZE, newRowLength, expectedArray, 0,
|
||||
expectedArray.length);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -536,7 +536,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
|
|||
ByteBuffer firstKey = getFirstKeyInBlock(seekToBlock);
|
||||
|
||||
if (reader.getComparator().compareFlatKey(firstKey.array(),
|
||||
firstKey.arrayOffset(), firstKey.limit(), key, offset, length) == 0)
|
||||
firstKey.arrayOffset(), firstKey.limit(), key, offset, length) >= 0)
|
||||
{
|
||||
long previousBlockOffset = seekToBlock.getPrevBlockOffset();
|
||||
// The key we are interested in
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.junit.AfterClass;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mortbay.log.Log;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestHalfStoreFileReader {
|
||||
|
@ -172,11 +173,13 @@ public class TestHalfStoreFileReader {
|
|||
// Ugly code to get the item before the midkey
|
||||
KeyValue beforeMidKey = null;
|
||||
for (KeyValue item : items) {
|
||||
if (item.equals(midKV)) {
|
||||
if (KeyValue.COMPARATOR.compare(item, midKV) >= 0) {
|
||||
break;
|
||||
}
|
||||
beforeMidKey = item;
|
||||
}
|
||||
System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
|
||||
System.out.println("beforeMidKey: " + beforeMidKey);
|
||||
|
||||
|
||||
// Seek on the splitKey, should be in top, not in bottom
|
||||
|
|
|
@ -261,7 +261,7 @@ public class TestCacheOnWrite {
|
|||
BlockType cachedDataBlockType =
|
||||
encoderType.encodeInCache ? BlockType.ENCODED_DATA : BlockType.DATA;
|
||||
assertEquals("{" + cachedDataBlockType
|
||||
+ "=1379, LEAF_INDEX=173, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=24}",
|
||||
+ "=1379, LEAF_INDEX=154, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=18}",
|
||||
countByType);
|
||||
|
||||
reader.close();
|
||||
|
|
Loading…
Reference in New Issue