HBASE-9247 Cleanup Key/KV/Meta/MetaKey Comparators

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1518817 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Hsieh 2013-08-29 20:45:04 +00:00
parent d122e7b349
commit 739f438176
42 changed files with 633 additions and 922 deletions

View File

@ -19,16 +19,6 @@
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.SplitKeyValue;
import org.apache.hadoop.hbase.util.Bytes;
import java.nio.BufferOverflowException; import java.nio.BufferOverflowException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
@ -39,6 +29,15 @@ import java.util.Map;
import java.util.NavigableMap; import java.util.NavigableMap;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
/** /**
* Single row result of a {@link Get} or {@link Scan} query.<p> * Single row result of a {@link Get} or {@link Scan} query.<p>
* *
@ -537,8 +536,7 @@ public class Result implements CellScannable {
} }
this.familyMap = new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR); this.familyMap = new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR);
for(KeyValue kv : this.kvs) { for(KeyValue kv : this.kvs) {
SplitKeyValue splitKV = kv.split(); byte [] family = kv.getFamily();
byte [] family = splitKV.getFamily();
NavigableMap<byte[], NavigableMap<Long, byte[]>> columnMap = NavigableMap<byte[], NavigableMap<Long, byte[]>> columnMap =
familyMap.get(family); familyMap.get(family);
if(columnMap == null) { if(columnMap == null) {
@ -546,7 +544,7 @@ public class Result implements CellScannable {
(Bytes.BYTES_COMPARATOR); (Bytes.BYTES_COMPARATOR);
familyMap.put(family, columnMap); familyMap.put(family, columnMap);
} }
byte [] qualifier = splitKV.getQualifier(); byte [] qualifier = kv.getQualifier();
NavigableMap<Long, byte[]> versionMap = columnMap.get(qualifier); NavigableMap<Long, byte[]> versionMap = columnMap.get(qualifier);
if(versionMap == null) { if(versionMap == null) {
versionMap = new TreeMap<Long, byte[]>(new Comparator<Long>() { versionMap = new TreeMap<Long, byte[]>(new Comparator<Long>() {
@ -556,8 +554,9 @@ public class Result implements CellScannable {
}); });
columnMap.put(qualifier, versionMap); columnMap.put(qualifier, versionMap);
} }
Long timestamp = Bytes.toLong(splitKV.getTimestamp()); Long timestamp = kv.getTimestamp();
byte [] value = splitKV.getValue(); byte [] value = kv.getValue();
versionMap.put(timestamp, value); versionMap.put(timestamp, value);
} }
return this.familyMap; return this.familyMap;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
/** /**
@ -335,10 +335,10 @@ public final class TableName implements Comparable<TableName> {
* *
* @return The comparator. * @return The comparator.
*/ */
public KeyComparator getRowComparator() { public KVComparator getRowComparator() {
if(TableName.META_TABLE_NAME.equals(this)) { if(TableName.META_TABLE_NAME.equals(this)) {
return KeyValue.META_COMPARATOR.getRawComparator(); return KeyValue.META_COMPARATOR;
} }
return KeyValue.COMPARATOR.getRawComparator(); return KeyValue.COMPARATOR;
} }
} }

View File

@ -24,12 +24,12 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValue.SamePrefixComparator; import org.apache.hadoop.hbase.KeyValue.SamePrefixComparator;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
/** /**
@ -113,14 +113,14 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
BufferedEncodedSeeker<STATE extends SeekerState> BufferedEncodedSeeker<STATE extends SeekerState>
implements EncodedSeeker { implements EncodedSeeker {
protected final RawComparator<byte[]> comparator; protected final KVComparator comparator;
protected final SamePrefixComparator<byte[]> samePrefixComparator; protected final SamePrefixComparator<byte[]> samePrefixComparator;
protected ByteBuffer currentBuffer; protected ByteBuffer currentBuffer;
protected STATE current = createSeekerState(); // always valid protected STATE current = createSeekerState(); // always valid
protected STATE previous = createSeekerState(); // may not be valid protected STATE previous = createSeekerState(); // may not be valid
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public BufferedEncodedSeeker(RawComparator<byte[]> comparator) { public BufferedEncodedSeeker(KVComparator comparator) {
this.comparator = comparator; this.comparator = comparator;
if (comparator instanceof SamePrefixComparator) { if (comparator instanceof SamePrefixComparator) {
this.samePrefixComparator = (SamePrefixComparator<byte[]>) comparator; this.samePrefixComparator = (SamePrefixComparator<byte[]>) comparator;
@ -207,7 +207,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
comp = samePrefixComparator.compareIgnoringPrefix(commonPrefix, key, comp = samePrefixComparator.compareIgnoringPrefix(commonPrefix, key,
offset, length, current.keyBuffer, 0, current.keyLength); offset, length, current.keyBuffer, 0, current.keyLength);
} else { } else {
comp = comparator.compare(key, offset, length, comp = comparator.compareFlatKey(key, offset, length,
current.keyBuffer, 0, current.keyLength); current.keyBuffer, 0, current.keyLength);
} }

View File

@ -22,9 +22,9 @@ import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator;
/** /**
* Just copy data, do not do any kind of compression. Use for comparison and * Just copy data, do not do any kind of compression. Use for comparison and
@ -67,7 +67,7 @@ public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder {
} }
@Override @Override
public EncodedSeeker createSeeker(RawComparator<byte[]> comparator, public EncodedSeeker createSeeker(KVComparator comparator,
final boolean includesMemstoreTS) { final boolean includesMemstoreTS) {
return new BufferedEncodedSeeker<SeekerState>(comparator) { return new BufferedEncodedSeeker<SeekerState>(comparator) {
@Override @Override

View File

@ -22,6 +22,7 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
@ -106,7 +107,7 @@ public interface DataBlockEncoder {
* @return A newly created seeker. * @return A newly created seeker.
*/ */
EncodedSeeker createSeeker( EncodedSeeker createSeeker(
RawComparator<byte[]> comparator, boolean includesMemstoreTS KVComparator comparator, boolean includesMemstoreTS
); );
/** /**

View File

@ -23,6 +23,7 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
@ -422,7 +423,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder {
} }
@Override @Override
public EncodedSeeker createSeeker(RawComparator<byte[]> comparator, public EncodedSeeker createSeeker(KVComparator comparator,
final boolean includesMemstoreTS) { final boolean includesMemstoreTS) {
return new BufferedEncodedSeeker<DiffSeekerState>(comparator) { return new BufferedEncodedSeeker<DiffSeekerState>(comparator) {
private byte[] familyNameWithSize; private byte[] familyNameWithSize;

View File

@ -24,6 +24,7 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
@ -417,7 +418,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder {
} }
@Override @Override
public EncodedSeeker createSeeker(RawComparator<byte[]> comparator, public EncodedSeeker createSeeker(KVComparator comparator,
final boolean includesMemstoreTS) { final boolean includesMemstoreTS) {
return new BufferedEncodedSeeker<FastDiffSeekerState>(comparator) { return new BufferedEncodedSeeker<FastDiffSeekerState>(comparator) {
private void decode(boolean isFirst) { private void decode(boolean isFirst) {

View File

@ -23,6 +23,7 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
@ -164,7 +165,7 @@ public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder {
} }
@Override @Override
public EncodedSeeker createSeeker(RawComparator<byte[]> comparator, public EncodedSeeker createSeeker(KVComparator comparator,
final boolean includesMemstoreTS) { final boolean includesMemstoreTS) {
return new BufferedEncodedSeeker<SeekerState>(comparator) { return new BufferedEncodedSeeker<SeekerState>(comparator) {
@Override @Override

View File

@ -1575,7 +1575,7 @@ public class Bytes {
* ranging from -(N + 1) to N - 1. * ranging from -(N + 1) to N - 1.
*/ */
public static int binarySearch(byte [][]arr, byte []key, int offset, public static int binarySearch(byte [][]arr, byte []key, int offset,
int length, RawComparator<byte []> comparator) { int length, RawComparator<?> comparator) {
int low = 0; int low = 0;
int high = arr.length - 1; int high = arr.length - 1;

View File

@ -28,7 +28,6 @@ import junit.framework.TestCase;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
import org.apache.hadoop.hbase.KeyValue.MetaComparator; import org.apache.hadoop.hbase.KeyValue.MetaComparator;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -94,24 +93,13 @@ public class TestKeyValue extends TestCase {
final byte [] b = Bytes.toBytes("bbb"); final byte [] b = Bytes.toBytes("bbb");
final byte [] fam = Bytes.toBytes("col"); final byte [] fam = Bytes.toBytes("col");
final byte [] qf = Bytes.toBytes("umn"); final byte [] qf = Bytes.toBytes("umn");
// final byte [] column = Bytes.toBytes("col:umn");
KeyValue aaa = new KeyValue(a, fam, qf, a); KeyValue aaa = new KeyValue(a, fam, qf, a);
KeyValue bbb = new KeyValue(b, fam, qf, b); KeyValue bbb = new KeyValue(b, fam, qf, b);
byte [] keyabb = aaa.getKey();
byte [] keybbb = bbb.getKey();
assertTrue(KeyValue.COMPARATOR.compare(aaa, bbb) < 0); assertTrue(KeyValue.COMPARATOR.compare(aaa, bbb) < 0);
assertTrue(KeyValue.KEY_COMPARATOR.compare(keyabb, 0, keyabb.length, keybbb,
0, keybbb.length) < 0);
assertTrue(KeyValue.COMPARATOR.compare(bbb, aaa) > 0); assertTrue(KeyValue.COMPARATOR.compare(bbb, aaa) > 0);
assertTrue(KeyValue.KEY_COMPARATOR.compare(keybbb, 0, keybbb.length, keyabb,
0, keyabb.length) > 0);
// Compare breaks if passed same ByteBuffer as both left and right arguments. // Compare breaks if passed same ByteBuffer as both left and right arguments.
assertTrue(KeyValue.COMPARATOR.compare(bbb, bbb) == 0); assertTrue(KeyValue.COMPARATOR.compare(bbb, bbb) == 0);
assertTrue(KeyValue.KEY_COMPARATOR.compare(keybbb, 0, keybbb.length, keybbb,
0, keybbb.length) == 0);
assertTrue(KeyValue.COMPARATOR.compare(aaa, aaa) == 0); assertTrue(KeyValue.COMPARATOR.compare(aaa, aaa) == 0);
assertTrue(KeyValue.KEY_COMPARATOR.compare(keyabb, 0, keyabb.length, keyabb,
0, keyabb.length) == 0);
// Do compare with different timestamps. // Do compare with different timestamps.
aaa = new KeyValue(a, fam, qf, 1, a); aaa = new KeyValue(a, fam, qf, 1, a);
bbb = new KeyValue(a, fam, qf, 2, a); bbb = new KeyValue(a, fam, qf, 2, a);
@ -299,7 +287,7 @@ public class TestKeyValue extends TestCase {
assertTrue(cmp > 0); assertTrue(cmp > 0);
} }
private void assertKVLessWithoutRow(KeyValue.KeyComparator c, int common, KeyValue less, private void assertKVLessWithoutRow(KeyValue.KVComparator c, int common, KeyValue less,
KeyValue greater) { KeyValue greater) {
int cmp = c.compareIgnoringPrefix(common, less.getBuffer(), less.getOffset() int cmp = c.compareIgnoringPrefix(common, less.getBuffer(), less.getOffset()
+ KeyValue.ROW_OFFSET, less.getKeyLength(), greater.getBuffer(), + KeyValue.ROW_OFFSET, less.getKeyLength(), greater.getBuffer(),
@ -312,7 +300,7 @@ public class TestKeyValue extends TestCase {
} }
public void testCompareWithoutRow() { public void testCompareWithoutRow() {
final KeyValue.KeyComparator c = KeyValue.KEY_COMPARATOR; final KeyValue.KVComparator c = KeyValue.COMPARATOR;
byte[] row = Bytes.toBytes("row"); byte[] row = Bytes.toBytes("row");
byte[] fa = Bytes.toBytes("fa"); byte[] fa = Bytes.toBytes("fa");
@ -462,14 +450,14 @@ public class TestKeyValue extends TestCase {
* See HBASE-7845 * See HBASE-7845
*/ */
public void testGetShortMidpointKey() { public void testGetShortMidpointKey() {
final KeyComparator keyComparator = new KeyValue.KeyComparator(); final KVComparator keyComparator = KeyValue.COMPARATOR;
//verify that faked shorter rowkey could be generated //verify that faked shorter rowkey could be generated
long ts = 5; long ts = 5;
KeyValue kv1 = new KeyValue(Bytes.toBytes("the quick brown fox"), family, qualA, ts, Type.Put); KeyValue kv1 = new KeyValue(Bytes.toBytes("the quick brown fox"), family, qualA, ts, Type.Put);
KeyValue kv2 = new KeyValue(Bytes.toBytes("the who test text"), family, qualA, ts, Type.Put); KeyValue kv2 = new KeyValue(Bytes.toBytes("the who test text"), family, qualA, ts, Type.Put);
byte[] newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey()); byte[] newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey());
assertTrue(keyComparator.compare(kv1.getKey(), newKey) < 0); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), newKey) < 0);
assertTrue(keyComparator.compare(newKey, kv2.getKey()) < 0); assertTrue(keyComparator.compareFlatKey(newKey, kv2.getKey()) < 0);
short newRowLength = Bytes.toShort(newKey, 0); short newRowLength = Bytes.toShort(newKey, 0);
byte[] expectedArray = Bytes.toBytes("the r"); byte[] expectedArray = Bytes.toBytes("the r");
Bytes.equals(newKey, KeyValue.ROW_LENGTH_SIZE, newRowLength, expectedArray, 0, Bytes.equals(newKey, KeyValue.ROW_LENGTH_SIZE, newRowLength, expectedArray, 0,
@ -478,44 +466,44 @@ public class TestKeyValue extends TestCase {
//verify: same with "row + family + qualifier", return rightKey directly //verify: same with "row + family + qualifier", return rightKey directly
kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, 5, Type.Put); kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, 5, Type.Put);
kv2 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, 0, Type.Put); kv2 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, 0, Type.Put);
assertTrue(keyComparator.compare(kv1.getKey(), kv2.getKey()) < 0); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), kv2.getKey()) < 0);
newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey()); newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey());
assertTrue(keyComparator.compare(kv1.getKey(), newKey) < 0); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), newKey) < 0);
assertTrue(keyComparator.compare(newKey, kv2.getKey()) == 0); assertTrue(keyComparator.compareFlatKey(newKey, kv2.getKey()) == 0);
kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, -5, Type.Put); kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, -5, Type.Put);
kv2 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, -10, Type.Put); kv2 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, -10, Type.Put);
assertTrue(keyComparator.compare(kv1.getKey(), kv2.getKey()) < 0); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), kv2.getKey()) < 0);
newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey()); newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey());
assertTrue(keyComparator.compare(kv1.getKey(), newKey) < 0); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), newKey) < 0);
assertTrue(keyComparator.compare(newKey, kv2.getKey()) == 0); assertTrue(keyComparator.compareFlatKey(newKey, kv2.getKey()) == 0);
// verify: same with row, different with qualifier // verify: same with row, different with qualifier
kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, 5, Type.Put); kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, 5, Type.Put);
kv2 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualB, 5, Type.Put); kv2 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualB, 5, Type.Put);
assertTrue(keyComparator.compare(kv1.getKey(), kv2.getKey()) < 0); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), kv2.getKey()) < 0);
newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey()); newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey());
assertTrue(keyComparator.compare(kv1.getKey(), newKey) < 0); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), newKey) < 0);
assertTrue(keyComparator.compare(newKey, kv2.getKey()) < 0); assertTrue(keyComparator.compareFlatKey(newKey, kv2.getKey()) < 0);
KeyValue newKeyValue = KeyValue.createKeyValueFromKey(newKey); KeyValue newKeyValue = KeyValue.createKeyValueFromKey(newKey);
assertTrue(Arrays.equals(newKeyValue.getFamily(),family)); assertTrue(Arrays.equals(newKeyValue.getFamily(),family));
assertTrue(Arrays.equals(newKeyValue.getQualifier(),qualB)); assertTrue(Arrays.equals(newKeyValue.getQualifier(),qualB));
assertTrue(newKeyValue.getTimestamp() == HConstants.LATEST_TIMESTAMP); assertTrue(newKeyValue.getTimestamp() == HConstants.LATEST_TIMESTAMP);
assertTrue(newKeyValue.getType() == Type.Maximum.getCode()); assertTrue(newKeyValue.getTypeByte() == Type.Maximum.getCode());
//verify metaKeyComparator's getShortMidpointKey output //verify metaKeyComparator's getShortMidpointKey output
final KeyComparator metaKeyComparator = new KeyValue.MetaKeyComparator(); final KVComparator metaKeyComparator = KeyValue.META_COMPARATOR;
kv1 = new KeyValue(Bytes.toBytes("ilovehbase123"), family, qualA, 5, Type.Put); kv1 = new KeyValue(Bytes.toBytes("ilovehbase123"), family, qualA, 5, Type.Put);
kv2 = new KeyValue(Bytes.toBytes("ilovehbase234"), family, qualA, 0, Type.Put); kv2 = new KeyValue(Bytes.toBytes("ilovehbase234"), family, qualA, 0, Type.Put);
newKey = metaKeyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey()); newKey = metaKeyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey());
assertTrue(metaKeyComparator.compare(kv1.getKey(), newKey) < 0); assertTrue(metaKeyComparator.compareFlatKey(kv1.getKey(), newKey) < 0);
assertTrue(metaKeyComparator.compare(newKey, kv2.getKey()) == 0); assertTrue(metaKeyComparator.compareFlatKey(newKey, kv2.getKey()) == 0);
//verify common fix scenario //verify common fix scenario
kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, ts, Type.Put); kv1 = new KeyValue(Bytes.toBytes("ilovehbase"), family, qualA, ts, Type.Put);
kv2 = new KeyValue(Bytes.toBytes("ilovehbaseandhdfs"), family, qualA, ts, Type.Put); kv2 = new KeyValue(Bytes.toBytes("ilovehbaseandhdfs"), family, qualA, ts, Type.Put);
assertTrue(keyComparator.compare(kv1.getKey(), kv2.getKey()) < 0); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), kv2.getKey()) < 0);
newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey()); newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey());
assertTrue(keyComparator.compare(kv1.getKey(), newKey) < 0); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), newKey) < 0);
assertTrue(keyComparator.compare(newKey, kv2.getKey()) == 0); assertTrue(keyComparator.compareFlatKey(newKey, kv2.getKey()) == 0);
} }
} }

View File

@ -25,8 +25,9 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValue.MetaKeyComparator; import org.apache.hadoop.hbase.KeyValue.MetaComparator;
import org.apache.hadoop.hbase.KeyValue.RawBytesComparator;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory; import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher; import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
@ -189,11 +190,10 @@ public class PrefixTreeCodec implements DataBlockEncoder{
* the way to this point. * the way to this point.
*/ */
@Override @Override
public EncodedSeeker createSeeker(RawComparator<byte[]> comparator, boolean includesMvccVersion) { public EncodedSeeker createSeeker(KVComparator comparator, boolean includesMvccVersion) {
if(! (comparator instanceof KeyComparator)){ if (comparator instanceof RawBytesComparator){
throw new IllegalArgumentException("comparator must be KeyValue.KeyComparator"); throw new IllegalArgumentException("comparator must be KeyValue.KeyComparator");
} } else if (comparator instanceof MetaComparator){
if(comparator instanceof MetaKeyComparator){
throw new IllegalArgumentException("DataBlockEncoding.PREFIX_TREE not compatible with META " throw new IllegalArgumentException("DataBlockEncoding.PREFIX_TREE not compatible with META "
+"table"); +"table");
} }

View File

@ -160,7 +160,7 @@ public class HalfStoreFileReader extends StoreFile.Reader {
// constrain the bottom. // constrain the bottom.
if (!top) { if (!top) {
ByteBuffer bb = getKey(); ByteBuffer bb = getKey();
if (getComparator().compare(bb.array(), bb.arrayOffset(), bb.limit(), if (getComparator().compareFlatKey(bb.array(), bb.arrayOffset(), bb.limit(),
splitkey, 0, splitkey.length) >= 0) { splitkey, 0, splitkey.length) >= 0) {
atEnd = true; atEnd = true;
return false; return false;
@ -179,13 +179,13 @@ public class HalfStoreFileReader extends StoreFile.Reader {
byte[] fk = getFirstKey(); byte[] fk = getFirstKey();
// This will be null when the file is empty in which we can not seekBefore to any key // This will be null when the file is empty in which we can not seekBefore to any key
if (fk == null) return false; if (fk == null) return false;
if (getComparator().compare(key, offset, length, fk, 0, if (getComparator().compareFlatKey(key, offset, length, fk, 0,
fk.length) <= 0) { fk.length) <= 0) {
return false; return false;
} }
} else { } else {
// The equals sign isn't strictly necessary just here to be consistent with seekTo // The equals sign isn't strictly necessary just here to be consistent with seekTo
if (getComparator().compare(key, offset, length, splitkey, 0, if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
splitkey.length) >= 0) { splitkey.length) >= 0) {
return this.delegate.seekBefore(splitkey, 0, splitkey.length); return this.delegate.seekBefore(splitkey, 0, splitkey.length);
} }
@ -216,7 +216,7 @@ public class HalfStoreFileReader extends StoreFile.Reader {
// Check key. // Check key.
ByteBuffer k = this.delegate.getKey(); ByteBuffer k = this.delegate.getKey();
return this.delegate.getReader().getComparator(). return this.delegate.getReader().getComparator().
compare(k.array(), k.arrayOffset(), k.limit(), compareFlatKey(k.array(), k.arrayOffset(), k.limit(),
splitkey, 0, splitkey.length) < 0; splitkey, 0, splitkey.length) < 0;
} }
@ -226,12 +226,12 @@ public class HalfStoreFileReader extends StoreFile.Reader {
public int seekTo(byte[] key, int offset, int length) throws IOException { public int seekTo(byte[] key, int offset, int length) throws IOException {
if (top) { if (top) {
if (getComparator().compare(key, offset, length, splitkey, 0, if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
splitkey.length) < 0) { splitkey.length) < 0) {
return -1; return -1;
} }
} else { } else {
if (getComparator().compare(key, offset, length, splitkey, 0, if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
splitkey.length) >= 0) { splitkey.length) >= 0) {
// we would place the scanner in the second half. // we would place the scanner in the second half.
// it might be an error to return false here ever... // it might be an error to return false here ever...
@ -256,12 +256,12 @@ public class HalfStoreFileReader extends StoreFile.Reader {
//This function is identical to the corresponding seekTo function except //This function is identical to the corresponding seekTo function except
//that we call reseekTo (and not seekTo) on the delegate. //that we call reseekTo (and not seekTo) on the delegate.
if (top) { if (top) {
if (getComparator().compare(key, offset, length, splitkey, 0, if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
splitkey.length) < 0) { splitkey.length) < 0) {
return -1; return -1;
} }
} else { } else {
if (getComparator().compare(key, offset, length, splitkey, 0, if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
splitkey.length) >= 0) { splitkey.length) >= 0) {
// we would place the scanner in the second half. // we would place the scanner in the second half.
// it might be an error to return false here ever... // it might be an error to return false here ever...

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@ -71,7 +72,7 @@ public abstract class AbstractHFileReader implements HFile.Reader {
protected int avgValueLen = -1; protected int avgValueLen = -1;
/** Key comparator */ /** Key comparator */
protected RawComparator<byte []> comparator; protected KVComparator comparator;
/** Size of this file. */ /** Size of this file. */
protected final long fileSize; protected final long fileSize;
@ -206,7 +207,7 @@ public abstract class AbstractHFileReader implements HFile.Reader {
/** @return comparator */ /** @return comparator */
@Override @Override
public RawComparator<byte []> getComparator() { public KVComparator getComparator() {
return comparator; return comparator;
} }

View File

@ -33,7 +33,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -77,7 +77,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
protected long totalUncompressedBytes = 0; protected long totalUncompressedBytes = 0;
/** Key comparator. Used to ensure we write in order. */ /** Key comparator. Used to ensure we write in order. */
protected final KeyComparator comparator; protected final KVComparator comparator;
/** Meta block names. */ /** Meta block names. */
protected List<byte[]> metaNames = new ArrayList<byte[]>(); protected List<byte[]> metaNames = new ArrayList<byte[]>();
@ -114,7 +114,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
FSDataOutputStream outputStream, Path path, int blockSize, FSDataOutputStream outputStream, Path path, int blockSize,
Compression.Algorithm compressAlgo, Compression.Algorithm compressAlgo,
HFileDataBlockEncoder dataBlockEncoder, HFileDataBlockEncoder dataBlockEncoder,
KeyComparator comparator) { KVComparator comparator) {
this.outputStream = outputStream; this.outputStream = outputStream;
this.path = path; this.path = path;
this.name = path != null ? path.getName() : outputStream.toString(); this.name = path != null ? path.getName() : outputStream.toString();
@ -124,7 +124,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
this.blockEncoder = dataBlockEncoder != null this.blockEncoder = dataBlockEncoder != null
? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE; ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
this.comparator = comparator != null ? comparator this.comparator = comparator != null ? comparator
: KeyValue.KEY_COMPARATOR; : KeyValue.COMPARATOR;
closeOutputStream = path != null; closeOutputStream = path != null;
this.cacheConf = cacheConf; this.cacheConf = cacheConf;
@ -198,8 +198,9 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
throw new IOException("Key cannot be null or empty"); throw new IOException("Key cannot be null or empty");
} }
if (lastKeyBuffer != null) { if (lastKeyBuffer != null) {
int keyComp = comparator.compare(lastKeyBuffer, lastKeyOffset, int keyComp = comparator.compareFlatKey(lastKeyBuffer, lastKeyOffset,
lastKeyLength, key, offset, length); lastKeyLength, key, offset, length);
if (keyComp > 0) { if (keyComp > 0) {
throw new IOException("Added a key not lexically larger than" throw new IOException("Added a key not lexically larger than"
+ " previous key=" + " previous key="

View File

@ -31,10 +31,10 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.protobuf.generated.HFileProtos; import org.apache.hadoop.hbase.protobuf.generated.HFileProtos;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator;
import com.google.common.io.NullOutputStream; import com.google.common.io.NullOutputStream;
@ -114,8 +114,8 @@ public class FixedFileTrailer {
*/ */
private long lastDataBlockOffset; private long lastDataBlockOffset;
/** Raw key comparator class name in version 2 */ /** Raw key comparator class name in version 3 */
private String comparatorClassName = KeyValue.KEY_COMPARATOR.getClass().getName(); private String comparatorClassName = KeyValue.COMPARATOR.getLegacyKeyComparatorName();
/** The {@link HFile} format major version. */ /** The {@link HFile} format major version. */
private final int majorVersion; private final int majorVersion;
@ -214,6 +214,8 @@ public class FixedFileTrailer {
.setNumDataIndexLevels(numDataIndexLevels) .setNumDataIndexLevels(numDataIndexLevels)
.setFirstDataBlockOffset(firstDataBlockOffset) .setFirstDataBlockOffset(firstDataBlockOffset)
.setLastDataBlockOffset(lastDataBlockOffset) .setLastDataBlockOffset(lastDataBlockOffset)
// TODO this is a classname encoded into an HFile's trailer. We are going to need to have
// some compat code here.
.setComparatorClassName(comparatorClassName) .setComparatorClassName(comparatorClassName)
.setCompressionCodec(compressionCodec.ordinal()) .setCompressionCodec(compressionCodec.ordinal())
.build().writeDelimitedTo(baos); .build().writeDelimitedTo(baos);
@ -324,6 +326,8 @@ public class FixedFileTrailer {
lastDataBlockOffset = builder.getLastDataBlockOffset(); lastDataBlockOffset = builder.getLastDataBlockOffset();
} }
if (builder.hasComparatorClassName()) { if (builder.hasComparatorClassName()) {
// TODO this is a classname encoded into an HFile's trailer. We are going to need to have
// some compat code here.
setComparatorClass(getComparatorClass(builder.getComparatorClassName())); setComparatorClass(getComparatorClass(builder.getComparatorClassName()));
} }
if (builder.hasCompressionCodec()) { if (builder.hasCompressionCodec()) {
@ -351,6 +355,8 @@ public class FixedFileTrailer {
numDataIndexLevels = input.readInt(); numDataIndexLevels = input.readInt();
firstDataBlockOffset = input.readLong(); firstDataBlockOffset = input.readLong();
lastDataBlockOffset = input.readLong(); lastDataBlockOffset = input.readLong();
// TODO this is a classname encoded into an HFile's trailer. We are going to need to have
// some compat code here.
setComparatorClass(getComparatorClass(Bytes.readStringFixedSize(input, setComparatorClass(getComparatorClass(Bytes.readStringFixedSize(input,
MAX_COMPARATOR_NAME_LENGTH))); MAX_COMPARATOR_NAME_LENGTH)));
} }
@ -555,30 +561,53 @@ public class FixedFileTrailer {
return minorVersion; return minorVersion;
} }
@SuppressWarnings("rawtypes") public void setComparatorClass(Class<? extends KVComparator> klass) {
public void setComparatorClass(Class<? extends RawComparator> klass) { // Is the comparator instantiable?
// Is the comparator instantiable
try { try {
klass.newInstance(); KVComparator comp = klass.newInstance();
// HFile V2 legacy comparator class names.
if (KeyValue.COMPARATOR.getClass().equals(klass)) {
comparatorClassName = KeyValue.COMPARATOR.getLegacyKeyComparatorName();
} else if (KeyValue.META_COMPARATOR.getClass().equals(klass)) {
comparatorClassName = KeyValue.META_COMPARATOR.getLegacyKeyComparatorName();
} else if (KeyValue.RAW_COMPARATOR.getClass().equals(klass)) {
comparatorClassName = KeyValue.RAW_COMPARATOR.getLegacyKeyComparatorName();
} else {
// if the name wasn't one of the legacy names, maybe its a legit new kind of comparator.
comparatorClassName = klass.getName();
}
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Comparator class " + klass.getName() + throw new RuntimeException("Comparator class " + klass.getName() +
" is not instantiable", e); " is not instantiable", e);
} }
comparatorClassName = klass.getName();
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static Class<? extends RawComparator<byte[]>> getComparatorClass( private static Class<? extends KVComparator> getComparatorClass(
String comparatorClassName) throws IOException { String comparatorClassName) throws IOException {
try { try {
return (Class<? extends RawComparator<byte[]>>) // HFile V2 legacy comparator class names.
if (comparatorClassName.equals(KeyValue.COMPARATOR.getLegacyKeyComparatorName())) {
comparatorClassName = KeyValue.COMPARATOR.getClass().getName();
} else if (comparatorClassName.equals(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName())) {
comparatorClassName = KeyValue.META_COMPARATOR.getClass().getName();
} else if (comparatorClassName.equals(KeyValue.RAW_COMPARATOR.getLegacyKeyComparatorName())) {
comparatorClassName = KeyValue.RAW_COMPARATOR.getClass().getName();
}
// if the name wasn't one of the legacy names, maybe its a legit new kind of comparator.
return (Class<? extends KVComparator>)
Class.forName(comparatorClassName); Class.forName(comparatorClassName);
} catch (ClassNotFoundException ex) { } catch (ClassNotFoundException ex) {
throw new IOException(ex); throw new IOException(ex);
} }
} }
public static RawComparator<byte[]> createComparator( public static KVComparator createComparator(
String comparatorClassName) throws IOException { String comparatorClassName) throws IOException {
try { try {
return getComparatorClass(comparatorClassName).newInstance(); return getComparatorClass(comparatorClassName).newInstance();
@ -591,7 +620,7 @@ public class FixedFileTrailer {
} }
} }
RawComparator<byte[]> createComparator() throws IOException { KVComparator createComparator() throws IOException {
expectAtLeastMajorVersion(2); expectAtLeastMajorVersion(2);
return createComparator(comparatorClassName); return createComparator(comparatorClassName);
} }

View File

@ -53,7 +53,7 @@ import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@ -336,7 +336,7 @@ public class HFile {
protected Compression.Algorithm compression = protected Compression.Algorithm compression =
HFile.DEFAULT_COMPRESSION_ALGORITHM; HFile.DEFAULT_COMPRESSION_ALGORITHM;
protected HFileDataBlockEncoder encoder = NoOpDataBlockEncoder.INSTANCE; protected HFileDataBlockEncoder encoder = NoOpDataBlockEncoder.INSTANCE;
protected KeyComparator comparator = KeyValue.KEY_COMPARATOR; protected KVComparator comparator = KeyValue.COMPARATOR;
protected InetSocketAddress[] favoredNodes; protected InetSocketAddress[] favoredNodes;
protected ChecksumType checksumType = HFile.DEFAULT_CHECKSUM_TYPE; protected ChecksumType checksumType = HFile.DEFAULT_CHECKSUM_TYPE;
protected int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM; protected int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM;
@ -384,7 +384,7 @@ public class HFile {
return this; return this;
} }
public WriterFactory withComparator(KeyComparator comparator) { public WriterFactory withComparator(KVComparator comparator) {
Preconditions.checkNotNull(comparator); Preconditions.checkNotNull(comparator);
this.comparator = comparator; this.comparator = comparator;
return this; return this;
@ -432,7 +432,7 @@ public class HFile {
FSDataOutputStream ostream, int blockSize, FSDataOutputStream ostream, int blockSize,
Compression.Algorithm compress, Compression.Algorithm compress,
HFileDataBlockEncoder dataBlockEncoder, HFileDataBlockEncoder dataBlockEncoder,
KeyComparator comparator, ChecksumType checksumType, KVComparator comparator, ChecksumType checksumType,
int bytesPerChecksum, boolean includeMVCCReadpoint) throws IOException; int bytesPerChecksum, boolean includeMVCCReadpoint) throws IOException;
} }
@ -489,7 +489,7 @@ public class HFile {
*/ */
String getName(); String getName();
RawComparator<byte []> getComparator(); KVComparator getComparator();
HFileScanner getScanner(boolean cacheBlocks, HFileScanner getScanner(boolean cacheBlocks,
final boolean pread, final boolean isCompaction); final boolean pread, final boolean isCompaction);

View File

@ -38,13 +38,13 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile.CachingBlockReader; import org.apache.hadoop.hbase.io.hfile.HFile.CachingBlockReader;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.CompoundBloomFilterWriter; import org.apache.hadoop.hbase.util.CompoundBloomFilterWriter;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
@ -106,7 +106,7 @@ public class HFileBlockIndex {
*/ */
public static class BlockIndexReader implements HeapSize { public static class BlockIndexReader implements HeapSize {
/** Needed doing lookup on blocks. */ /** Needed doing lookup on blocks. */
private final RawComparator<byte[]> comparator; private final KVComparator comparator;
// Root-level data. // Root-level data.
private byte[][] blockKeys; private byte[][] blockKeys;
@ -132,13 +132,13 @@ public class HFileBlockIndex {
/** A way to read {@link HFile} blocks at a given offset */ /** A way to read {@link HFile} blocks at a given offset */
private CachingBlockReader cachingBlockReader; private CachingBlockReader cachingBlockReader;
public BlockIndexReader(final RawComparator<byte[]> c, final int treeLevel, public BlockIndexReader(final KVComparator c, final int treeLevel,
final CachingBlockReader cachingBlockReader) { final CachingBlockReader cachingBlockReader) {
this(c, treeLevel); this(c, treeLevel);
this.cachingBlockReader = cachingBlockReader; this.cachingBlockReader = cachingBlockReader;
} }
public BlockIndexReader(final RawComparator<byte[]> c, final int treeLevel) public BlockIndexReader(final KVComparator c, final int treeLevel)
{ {
comparator = c; comparator = c;
searchTreeLevel = treeLevel; searchTreeLevel = treeLevel;
@ -481,7 +481,7 @@ public class HFileBlockIndex {
*/ */
static int binarySearchNonRootIndex(byte[] key, int keyOffset, static int binarySearchNonRootIndex(byte[] key, int keyOffset,
int keyLength, ByteBuffer nonRootIndex, int keyLength, ByteBuffer nonRootIndex,
RawComparator<byte[]> comparator) { KVComparator comparator) {
int numEntries = nonRootIndex.getInt(0); int numEntries = nonRootIndex.getInt(0);
int low = 0; int low = 0;
@ -516,7 +516,7 @@ public class HFileBlockIndex {
// we have to compare in this order, because the comparator order // we have to compare in this order, because the comparator order
// has special logic when the 'left side' is a special key. // has special logic when the 'left side' is a special key.
int cmp = comparator.compare(key, keyOffset, keyLength, int cmp = comparator.compareFlatKey(key, keyOffset, keyLength,
nonRootIndex.array(), nonRootIndex.arrayOffset() + midKeyOffset, nonRootIndex.array(), nonRootIndex.arrayOffset() + midKeyOffset,
midLength); midLength);
@ -568,7 +568,7 @@ public class HFileBlockIndex {
* *
*/ */
static int locateNonRootIndexEntry(ByteBuffer nonRootBlock, byte[] key, static int locateNonRootIndexEntry(ByteBuffer nonRootBlock, byte[] key,
int keyOffset, int keyLength, RawComparator<byte[]> comparator) { int keyOffset, int keyLength, KVComparator comparator) {
int entryIndex = binarySearchNonRootIndex(key, keyOffset, keyLength, int entryIndex = binarySearchNonRootIndex(key, keyOffset, keyLength,
nonRootBlock, comparator); nonRootBlock, comparator);

View File

@ -120,7 +120,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
dataBlockIndexReader = new HFileBlockIndex.BlockIndexReader(comparator, dataBlockIndexReader = new HFileBlockIndex.BlockIndexReader(comparator,
trailer.getNumDataIndexLevels(), this); trailer.getNumDataIndexLevels(), this);
metaBlockIndexReader = new HFileBlockIndex.BlockIndexReader( metaBlockIndexReader = new HFileBlockIndex.BlockIndexReader(
Bytes.BYTES_RAWCOMPARATOR, 1); KeyValue.RAW_COMPARATOR, 1);
// Parse load-on-open data. // Parse load-on-open data.
@ -500,7 +500,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
int compared; int compared;
if (isSeeked()) { if (isSeeked()) {
ByteBuffer bb = getKey(); ByteBuffer bb = getKey();
compared = reader.getComparator().compare(key, offset, compared = reader.getComparator().compareFlatKey(key, offset,
length, bb.array(), bb.arrayOffset(), bb.limit()); length, bb.array(), bb.arrayOffset(), bb.limit());
if (compared < 1) { if (compared < 1) {
// If the required key is less than or equal to current key, then // If the required key is less than or equal to current key, then
@ -509,7 +509,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
} else { } else {
if (this.nextIndexedKey != null && if (this.nextIndexedKey != null &&
(this.nextIndexedKey == HConstants.NO_NEXT_INDEXED_KEY || (this.nextIndexedKey == HConstants.NO_NEXT_INDEXED_KEY ||
reader.getComparator().compare(key, offset, length, reader.getComparator().compareFlatKey(key, offset, length,
nextIndexedKey, 0, nextIndexedKey.length) < 0)) { nextIndexedKey, 0, nextIndexedKey.length) < 0)) {
// The reader shall continue to scan the current data block instead of querying the // The reader shall continue to scan the current data block instead of querying the
// block index as long as it knows the target key is strictly smaller than // block index as long as it knows the target key is strictly smaller than
@ -535,7 +535,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
} }
ByteBuffer firstKey = getFirstKeyInBlock(seekToBlock); ByteBuffer firstKey = getFirstKeyInBlock(seekToBlock);
if (reader.getComparator().compare(firstKey.array(), if (reader.getComparator().compareFlatKey(firstKey.array(),
firstKey.arrayOffset(), firstKey.limit(), key, offset, length) == 0) firstKey.arrayOffset(), firstKey.limit(), key, offset, length) == 0)
{ {
long previousBlockOffset = seekToBlock.getPrevBlockOffset(); long previousBlockOffset = seekToBlock.getPrevBlockOffset();
@ -851,7 +851,7 @@ public class HFileReaderV2 extends AbstractHFileReader {
int keyOffset = blockBuffer.arrayOffset() + blockBuffer.position() int keyOffset = blockBuffer.arrayOffset() + blockBuffer.position()
+ KEY_VALUE_LEN_SIZE; + KEY_VALUE_LEN_SIZE;
int comp = reader.getComparator().compare(key, offset, length, int comp = reader.getComparator().compareFlatKey(key, offset, length,
blockBuffer.array(), keyOffset, klen); blockBuffer.array(), keyOffset, klen);
if (comp == 0) { if (comp == 0) {

View File

@ -33,7 +33,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable; import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable;
@ -100,7 +100,7 @@ public class HFileWriterV2 extends AbstractHFileWriter {
public Writer createWriter(FileSystem fs, Path path, public Writer createWriter(FileSystem fs, Path path,
FSDataOutputStream ostream, int blockSize, FSDataOutputStream ostream, int blockSize,
Compression.Algorithm compress, HFileDataBlockEncoder blockEncoder, Compression.Algorithm compress, HFileDataBlockEncoder blockEncoder,
final KeyComparator comparator, final ChecksumType checksumType, final KVComparator comparator, final ChecksumType checksumType,
final int bytesPerChecksum, boolean includeMVCCReadpoint) throws IOException { final int bytesPerChecksum, boolean includeMVCCReadpoint) throws IOException {
return new HFileWriterV2(conf, cacheConf, fs, path, ostream, blockSize, compress, return new HFileWriterV2(conf, cacheConf, fs, path, ostream, blockSize, compress,
blockEncoder, comparator, checksumType, bytesPerChecksum, includeMVCCReadpoint); blockEncoder, comparator, checksumType, bytesPerChecksum, includeMVCCReadpoint);
@ -111,7 +111,7 @@ public class HFileWriterV2 extends AbstractHFileWriter {
public HFileWriterV2(Configuration conf, CacheConfig cacheConf, public HFileWriterV2(Configuration conf, CacheConfig cacheConf,
FileSystem fs, Path path, FSDataOutputStream ostream, int blockSize, FileSystem fs, Path path, FSDataOutputStream ostream, int blockSize,
Compression.Algorithm compressAlgo, HFileDataBlockEncoder blockEncoder, Compression.Algorithm compressAlgo, HFileDataBlockEncoder blockEncoder,
final KeyComparator comparator, final ChecksumType checksumType, final KVComparator comparator, final ChecksumType checksumType,
final int bytesPerChecksum, final boolean includeMVCCReadpoint) throws IOException { final int bytesPerChecksum, final boolean includeMVCCReadpoint) throws IOException {
super(cacheConf, super(cacheConf,
ostream == null ? createOutputStream(conf, fs, path, null) : ostream, ostream == null ? createOutputStream(conf, fs, path, null) : ostream,

View File

@ -74,7 +74,7 @@ class GetClosestRowBeforeTracker {
this.tablenamePlusDelimiterLength = metaregion? l + 1: -1; this.tablenamePlusDelimiterLength = metaregion? l + 1: -1;
this.oldestts = System.currentTimeMillis() - ttl; this.oldestts = System.currentTimeMillis() - ttl;
this.kvcomparator = c; this.kvcomparator = c;
KeyValue.RowComparator rc = new KeyValue.RowComparator(this.kvcomparator); KeyValue.RowOnlyComparator rc = new KeyValue.RowOnlyComparator(this.kvcomparator);
this.deletes = new TreeMap<KeyValue, NavigableSet<KeyValue>>(rc); this.deletes = new TreeMap<KeyValue, NavigableSet<KeyValue>>(rc);
} }

View File

@ -529,7 +529,7 @@ public class HRegionFileSystem {
byte[] lastKey = f.createReader().getLastKey(); byte[] lastKey = f.createReader().getLastKey();
// If lastKey is null means storefile is empty. // If lastKey is null means storefile is empty.
if (lastKey == null) return null; if (lastKey == null) return null;
if (f.getReader().getComparator().compare(splitKey.getBuffer(), if (f.getReader().getComparator().compareFlatKey(splitKey.getBuffer(),
splitKey.getKeyOffset(), splitKey.getKeyLength(), lastKey, 0, lastKey.length) > 0) { splitKey.getKeyOffset(), splitKey.getKeyLength(), lastKey, 0, lastKey.length) > 0) {
return null; return null;
} }
@ -539,7 +539,7 @@ public class HRegionFileSystem {
byte[] firstKey = f.createReader().getFirstKey(); byte[] firstKey = f.createReader().getFirstKey();
// If firstKey is null means storefile is empty. // If firstKey is null means storefile is empty.
if (firstKey == null) return null; if (firstKey == null) return null;
if (f.getReader().getComparator().compare(splitKey.getBuffer(), if (f.getReader().getComparator().compareFlatKey(splitKey.getBuffer(),
splitKey.getKeyOffset(), splitKey.getKeyLength(), firstKey, 0, firstKey.length) < 0) { splitKey.getKeyOffset(), splitKey.getKeyLength(), firstKey, 0, firstKey.length) < 0) {
return null; return null;
} }

View File

@ -81,7 +81,7 @@ public class ScanQueryMatcher {
private final KeyValue startKey; private final KeyValue startKey;
/** Row comparator for the region this query is for */ /** Row comparator for the region this query is for */
private final KeyValue.KeyComparator rowComparator; private final KeyValue.KVComparator rowComparator;
/* row is not private for tests */ /* row is not private for tests */
/** Row the query is on */ /** Row the query is on */
@ -145,7 +145,7 @@ public class ScanQueryMatcher {
NavigableSet<byte[]> columns, ScanType scanType, NavigableSet<byte[]> columns, ScanType scanType,
long readPointToUse, long earliestPutTs, long oldestUnexpiredTS) { long readPointToUse, long earliestPutTs, long oldestUnexpiredTS) {
this.tr = scan.getTimeRange(); this.tr = scan.getTimeRange();
this.rowComparator = scanInfo.getComparator().getRawComparator(); this.rowComparator = scanInfo.getComparator();
this.deletes = new ScanDeleteTracker(); this.deletes = new ScanDeleteTracker();
this.stopRow = scan.getStopRow(); this.stopRow = scan.getStopRow();
this.startKey = KeyValue.createFirstDeleteFamilyOnRow(scan.getStartRow(), this.startKey = KeyValue.createFirstDeleteFamilyOnRow(scan.getStartRow(),

View File

@ -798,7 +798,7 @@ public class StoreFile {
.withBlockSize(blocksize) .withBlockSize(blocksize)
.withCompression(compress) .withCompression(compress)
.withDataBlockEncoder(this.dataBlockEncoder) .withDataBlockEncoder(this.dataBlockEncoder)
.withComparator(comparator.getRawComparator()) .withComparator(comparator)
.withChecksumType(checksumType) .withChecksumType(checksumType)
.withBytesPerChecksum(bytesPerChecksum) .withBytesPerChecksum(bytesPerChecksum)
.withFavoredNodes(favoredNodes) .withFavoredNodes(favoredNodes)
@ -877,7 +877,7 @@ public class StoreFile {
* @param kv * @param kv
*/ */
public void trackTimestamps(final KeyValue kv) { public void trackTimestamps(final KeyValue kv) {
if (KeyValue.Type.Put.getCode() == kv.getType()) { if (KeyValue.Type.Put.getCode() == kv.getTypeByte()) {
earliestPutTs = Math.min(earliestPutTs, kv.getTimestamp()); earliestPutTs = Math.min(earliestPutTs, kv.getTimestamp());
} }
if (!isTimeRangeTrackerSet) { if (!isTimeRangeTrackerSet) {
@ -939,7 +939,7 @@ public class StoreFile {
} }
generalBloomFilterWriter.add(bloomKey, bloomKeyOffset, bloomKeyLen); generalBloomFilterWriter.add(bloomKey, bloomKeyOffset, bloomKeyLen);
if (lastBloomKey != null if (lastBloomKey != null
&& generalBloomFilterWriter.getComparator().compare(bloomKey, && generalBloomFilterWriter.getComparator().compareFlatKey(bloomKey,
bloomKeyOffset, bloomKeyLen, lastBloomKey, bloomKeyOffset, bloomKeyLen, lastBloomKey,
lastBloomKeyOffset, lastBloomKeyLen) <= 0) { lastBloomKeyOffset, lastBloomKeyLen) <= 0) {
throw new IOException("Non-increasing Bloom keys: " throw new IOException("Non-increasing Bloom keys: "
@ -1105,7 +1105,7 @@ public class StoreFile {
this.reader = null; this.reader = null;
} }
public RawComparator<byte []> getComparator() { public KVComparator getComparator() {
return reader.getComparator(); return reader.getComparator();
} }
@ -1333,7 +1333,7 @@ public class StoreFile {
// from the file info. For row-column Bloom filters this is not yet // from the file info. For row-column Bloom filters this is not yet
// a sufficient condition to return false. // a sufficient condition to return false.
boolean keyIsAfterLast = lastBloomKey != null boolean keyIsAfterLast = lastBloomKey != null
&& bloomFilter.getComparator().compare(key, lastBloomKey) > 0; && bloomFilter.getComparator().compareFlatKey(key, lastBloomKey) > 0;
if (bloomFilterType == BloomType.ROWCOL) { if (bloomFilterType == BloomType.ROWCOL) {
// Since a Row Delete is essentially a DeleteFamily applied to all // Since a Row Delete is essentially a DeleteFamily applied to all
@ -1344,7 +1344,7 @@ public class StoreFile {
null, 0, 0); null, 0, 0);
if (keyIsAfterLast if (keyIsAfterLast
&& bloomFilter.getComparator().compare(rowBloomKey, && bloomFilter.getComparator().compareFlatKey(rowBloomKey,
lastBloomKey) > 0) { lastBloomKey) > 0) {
exists = false; exists = false;
} else { } else {
@ -1388,9 +1388,9 @@ public class StoreFile {
} }
KeyValue startKeyValue = KeyValue.createFirstOnRow(scan.getStartRow()); KeyValue startKeyValue = KeyValue.createFirstOnRow(scan.getStartRow());
KeyValue stopKeyValue = KeyValue.createLastOnRow(scan.getStopRow()); KeyValue stopKeyValue = KeyValue.createLastOnRow(scan.getStopRow());
boolean nonOverLapping = (getComparator().compare(this.getFirstKey(), boolean nonOverLapping = (getComparator().compareFlatKey(this.getFirstKey(),
stopKeyValue.getKey()) > 0 && !Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW)) stopKeyValue.getKey()) > 0 && !Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW))
|| getComparator().compare(this.getLastKey(), startKeyValue.getKey()) < 0; || getComparator().compareFlatKey(this.getLastKey(), startKeyValue.getKey()) < 0;
return !nonOverLapping; return !nonOverLapping;
} }

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.hbase.KeyValue.KVComparator;
/** /**
* Common methods Bloom filter methods required at read and write time. * Common methods Bloom filter methods required at read and write time.
@ -52,6 +52,6 @@ public interface BloomFilterBase {
/** /**
* @return Bloom key comparator * @return Bloom key comparator
*/ */
RawComparator<byte[]> getComparator(); KVComparator getComparator();
} }

View File

@ -200,7 +200,7 @@ public final class BloomFilterFactory {
// In case of compound Bloom filters we ignore the maxKeys hint. // In case of compound Bloom filters we ignore the maxKeys hint.
CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf), CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(), err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
bloomType == BloomType.ROWCOL ? KeyValue.KEY_COMPARATOR : Bytes.BYTES_RAWCOMPARATOR); bloomType == BloomType.ROWCOL ? KeyValue.COMPARATOR : KeyValue.RAW_COMPARATOR);
writer.addInlineBlockWriter(bloomWriter); writer.addInlineBlockWriter(bloomWriter);
return bloomWriter; return bloomWriter;
} }
@ -231,7 +231,7 @@ public final class BloomFilterFactory {
// In case of compound Bloom filters we ignore the maxKeys hint. // In case of compound Bloom filters we ignore the maxKeys hint.
CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf), CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(), err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
Bytes.BYTES_RAWCOMPARATOR); KeyValue.RAW_COMPARATOR);
writer.addInlineBlockWriter(bloomWriter); writer.addInlineBlockWriter(bloomWriter);
return bloomWriter; return bloomWriter;
} }

View File

@ -20,6 +20,8 @@
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
@ -625,8 +627,9 @@ public class ByteBloomFilter implements BloomFilter, BloomFilterWriter {
} }
@Override @Override
public RawComparator<byte[]> getComparator() { public KVComparator getComparator() {
return Bytes.BYTES_RAWCOMPARATOR; // return Bytes.BYTES_RAWCOMPARATOR;
return KeyValue.RAW_COMPARATOR;
} }
/** /**

View File

@ -24,12 +24,12 @@ import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.FixedFileTrailer; import org.apache.hadoop.hbase.io.hfile.FixedFileTrailer;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileBlock; import org.apache.hadoop.hbase.io.hfile.HFileBlock;
import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex;
import org.apache.hadoop.io.RawComparator;
/** /**
* A Bloom filter implementation built on top of {@link ByteBloomFilter}, * A Bloom filter implementation built on top of {@link ByteBloomFilter},
@ -131,7 +131,7 @@ public class CompoundBloomFilter extends CompoundBloomFilterBase
} }
@Override @Override
public RawComparator<byte[]> getComparator() { public KVComparator getComparator() {
return comparator; return comparator;
} }

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.util;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
@InterfaceAudience.Private @InterfaceAudience.Private
@ -51,7 +52,7 @@ public class CompoundBloomFilterBase implements BloomFilterBase {
protected int hashType; protected int hashType;
/** Comparator used to compare Bloom filter keys */ /** Comparator used to compare Bloom filter keys */
protected RawComparator<byte[]> comparator; protected KVComparator comparator;
@Override @Override
public long getMaxKeys() { public long getMaxKeys() {
@ -89,7 +90,7 @@ public class CompoundBloomFilterBase implements BloomFilterBase {
} }
@Override @Override
public RawComparator<byte[]> getComparator() { public KVComparator getComparator() {
return comparator; return comparator;
} }

View File

@ -28,10 +28,10 @@ import java.util.Queue;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex;
import org.apache.hadoop.hbase.io.hfile.InlineBlockWriter; import org.apache.hadoop.hbase.io.hfile.InlineBlockWriter;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
/** /**
@ -89,7 +89,7 @@ public class CompoundBloomFilterWriter extends CompoundBloomFilterBase
*/ */
public CompoundBloomFilterWriter(int chunkByteSizeHint, float errorRate, public CompoundBloomFilterWriter(int chunkByteSizeHint, float errorRate,
int hashType, int maxFold, boolean cacheOnWrite, int hashType, int maxFold, boolean cacheOnWrite,
RawComparator<byte[]> comparator) { KVComparator comparator) {
chunkByteSize = ByteBloomFilter.computeFoldableByteSize( chunkByteSize = ByteBloomFilter.computeFoldableByteSize(
chunkByteSizeHint * 8L, maxFold); chunkByteSizeHint * 8L, maxFold);

View File

@ -185,7 +185,7 @@ public class TestDataBlockEncoders {
ByteBuffer.wrap(encodeBytes(encoding, originalBuffer)); ByteBuffer.wrap(encodeBytes(encoding, originalBuffer));
DataBlockEncoder encoder = encoding.getEncoder(); DataBlockEncoder encoder = encoding.getEncoder();
DataBlockEncoder.EncodedSeeker seeker = DataBlockEncoder.EncodedSeeker seeker =
encoder.createSeeker(KeyValue.KEY_COMPARATOR, includesMemstoreTS); encoder.createSeeker(KeyValue.COMPARATOR, includesMemstoreTS);
seeker.setCurrentBuffer(encodedBuffer); seeker.setCurrentBuffer(encodedBuffer);
encodedSeekers.add(seeker); encodedSeekers.add(seeker);
} }
@ -240,7 +240,7 @@ public class TestDataBlockEncoders {
"Bug while encoding using '%s'", encoder.toString()), e); "Bug while encoding using '%s'", encoder.toString()), e);
} }
DataBlockEncoder.EncodedSeeker seeker = DataBlockEncoder.EncodedSeeker seeker =
encoder.createSeeker(KeyValue.KEY_COMPARATOR, includesMemstoreTS); encoder.createSeeker(KeyValue.COMPARATOR, includesMemstoreTS);
seeker.setCurrentBuffer(encodedBuffer); seeker.setCurrentBuffer(encodedBuffer);
int i = 0; int i = 0;
do { do {

View File

@ -77,7 +77,7 @@ public class TestPrefixTreeEncoding {
HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext( HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]); Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx); encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, false); EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR, false);
byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader(); byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
DataBlockEncoding.ID_SIZE, onDiskBytes.length DataBlockEncoding.ID_SIZE, onDiskBytes.length
@ -117,7 +117,7 @@ public class TestPrefixTreeEncoding {
HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext( HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]); Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx); encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, false); EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR, false);
byte[] onDiskBytes=blkEncodingCtx.getOnDiskBytesWithHeader(); byte[] onDiskBytes=blkEncodingCtx.getOnDiskBytesWithHeader();
ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
DataBlockEncoding.ID_SIZE, onDiskBytes.length DataBlockEncoding.ID_SIZE, onDiskBytes.length
@ -143,7 +143,7 @@ public class TestPrefixTreeEncoding {
HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext( HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]); Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx); encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, false); EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR, false);
byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader(); byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
DataBlockEncoding.ID_SIZE, onDiskBytes.length DataBlockEncoding.ID_SIZE, onDiskBytes.length
@ -159,7 +159,7 @@ public class TestPrefixTreeEncoding {
HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext( HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]); Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx); encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR,
false); false);
byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader(); byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,

View File

@ -92,7 +92,7 @@ public class TestFixedFileTrailer {
t.setLastDataBlockOffset(291); t.setLastDataBlockOffset(291);
t.setNumDataIndexLevels(3); t.setNumDataIndexLevels(3);
t.setComparatorClass(KeyValue.KEY_COMPARATOR.getClass()); t.setComparatorClass(KeyValue.COMPARATOR.getClass());
t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
t.setUncompressedDataIndexSize(827398717L); // Something random. t.setUncompressedDataIndexSize(827398717L); // Something random.
@ -209,7 +209,7 @@ public class TestFixedFileTrailer {
assertEquals(expected.getFirstDataBlockOffset(), assertEquals(expected.getFirstDataBlockOffset(),
loaded.getFirstDataBlockOffset()); loaded.getFirstDataBlockOffset());
assertTrue( assertTrue(
expected.createComparator() instanceof KeyValue.KeyComparator); expected.createComparator() instanceof KeyValue.KVComparator);
assertEquals(expected.getUncompressedDataIndexSize(), assertEquals(expected.getUncompressedDataIndexSize(),
loaded.getUncompressedDataIndexSize()); loaded.getUncompressedDataIndexSize());
} }

View File

@ -35,7 +35,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.HFile.Reader; import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
@ -216,7 +215,7 @@ public class TestHFile extends HBaseTestCase {
.withBlockSize(minBlockSize) .withBlockSize(minBlockSize)
.withCompression(codec) .withCompression(codec)
// NOTE: This test is dependent on this deprecated nonstandard comparator // NOTE: This test is dependent on this deprecated nonstandard comparator
.withComparator(new KeyValue.RawKeyComparator()) .withComparator(new KeyValue.RawBytesComparator())
.create(); .create();
LOG.info(writer); LOG.info(writer);
writeRecords(writer); writeRecords(writer);
@ -350,36 +349,5 @@ public class TestHFile extends HBaseTestCase {
assertTrue(Compression.Algorithm.LZ4.ordinal() == 4); assertTrue(Compression.Algorithm.LZ4.ordinal() == 4);
} }
// This can't be an anonymous class because the compiler will not generate
// a nullary constructor for it.
static class CustomKeyComparator extends KeyComparator {
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,
int l2) {
return -Bytes.compareTo(b1, s1, l1, b2, s2, l2);
}
@Override
public int compare(byte[] o1, byte[] o2) {
return compare(o1, 0, o1.length, o2, 0, o2.length);
}
}
public void testComparator() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path mFile = new Path(ROOT_DIR, "meta.tfile");
FSDataOutputStream fout = createFSOutput(mFile);
KeyComparator comparator = new CustomKeyComparator();
Writer writer = HFile.getWriterFactory(conf, cacheConf)
.withOutputStream(fout)
.withBlockSize(minBlockSize)
.withComparator(comparator)
.create();
writer.append("3".getBytes(), "0".getBytes());
writer.append("2".getBytes(), "0".getBytes());
writer.append("1".getBytes(), "0".getBytes());
writer.close();
}
} }

View File

@ -42,11 +42,13 @@ import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader;
import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexChunk; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexChunk;
import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.junit.Before; import org.junit.Before;
@ -173,7 +175,7 @@ public class TestHFileBlockIndex {
BlockReaderWrapper brw = new BlockReaderWrapper(blockReader); BlockReaderWrapper brw = new BlockReaderWrapper(blockReader);
HFileBlockIndex.BlockIndexReader indexReader = HFileBlockIndex.BlockIndexReader indexReader =
new HFileBlockIndex.BlockIndexReader( new HFileBlockIndex.BlockIndexReader(
Bytes.BYTES_RAWCOMPARATOR, numLevels, brw); KeyValue.RAW_COMPARATOR, numLevels, brw);
indexReader.readRootIndex(blockReader.blockRange(rootIndexOffset, indexReader.readRootIndex(blockReader.blockRange(rootIndexOffset,
fileSize).nextBlockWithBlockType(BlockType.ROOT_INDEX), numRootEntries); fileSize).nextBlockWithBlockType(BlockType.ROOT_INDEX), numRootEntries);
@ -355,7 +357,7 @@ public class TestHFileBlockIndex {
int searchResult = BlockIndexReader.binarySearchNonRootIndex( int searchResult = BlockIndexReader.binarySearchNonRootIndex(
arrayHoldingKey, searchKey.length / 2, searchKey.length, nonRootIndex, arrayHoldingKey, searchKey.length / 2, searchKey.length, nonRootIndex,
Bytes.BYTES_RAWCOMPARATOR); KeyValue.RAW_COMPARATOR);
String lookupFailureMsg = "Failed to look up key #" + i + " (" String lookupFailureMsg = "Failed to look up key #" + i + " ("
+ Bytes.toStringBinary(searchKey) + ")"; + Bytes.toStringBinary(searchKey) + ")";
@ -381,7 +383,7 @@ public class TestHFileBlockIndex {
// higher-level API function.s // higher-level API function.s
boolean locateBlockResult = boolean locateBlockResult =
(BlockIndexReader.locateNonRootIndexEntry(nonRootIndex, arrayHoldingKey, (BlockIndexReader.locateNonRootIndexEntry(nonRootIndex, arrayHoldingKey,
searchKey.length / 2, searchKey.length, Bytes.BYTES_RAWCOMPARATOR) != -1); searchKey.length / 2, searchKey.length, KeyValue.RAW_COMPARATOR) != -1);
if (i == 0) { if (i == 0) {
assertFalse(locateBlockResult); assertFalse(locateBlockResult);
@ -441,7 +443,7 @@ public class TestHFileBlockIndex {
long expected = ClassSize.estimateBase(cl, false); long expected = ClassSize.estimateBase(cl, false);
HFileBlockIndex.BlockIndexReader bi = HFileBlockIndex.BlockIndexReader bi =
new HFileBlockIndex.BlockIndexReader(Bytes.BYTES_RAWCOMPARATOR, 1); new HFileBlockIndex.BlockIndexReader(KeyValue.RAW_COMPARATOR, 1);
long actual = bi.heapSize(); long actual = bi.heapSize();
// Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets, // Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets,
@ -506,7 +508,7 @@ public class TestHFileBlockIndex {
keyStrSet.add(Bytes.toStringBinary(k)); keyStrSet.add(Bytes.toStringBinary(k));
if (i > 0) { if (i > 0) {
assertTrue(KeyValue.KEY_COMPARATOR.compare(keys[i - 1], assertTrue(KeyValue.COMPARATOR.compareFlatKey(keys[i - 1],
keys[i]) < 0); keys[i]) < 0);
} }
} }

View File

@ -166,7 +166,7 @@ public class TestHFilePerformance extends TestCase {
.withOutputStream(fout) .withOutputStream(fout)
.withBlockSize(minBlockSize) .withBlockSize(minBlockSize)
.withCompression(codecName) .withCompression(codecName)
.withComparator(new KeyValue.RawKeyComparator()) .withComparator(new KeyValue.RawBytesComparator())
.create(); .create();
// Writing value in one shot. // Writing value in one shot.

View File

@ -131,7 +131,7 @@ public class TestHFileSeek extends TestCase {
.withOutputStream(fout) .withOutputStream(fout)
.withBlockSize(options.minBlockSize) .withBlockSize(options.minBlockSize)
.withCompression(options.compress) .withCompression(options.compress)
.withComparator(new KeyValue.RawKeyComparator()) .withComparator(new KeyValue.RawBytesComparator())
.create(); .create();
try { try {
BytesWritable key = new BytesWritable(); BytesWritable key = new BytesWritable();

View File

@ -39,13 +39,13 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
import org.junit.Before; import org.junit.Before;
@ -137,13 +137,13 @@ public class TestHFileWriterV2 {
HFileBlock.FSReader blockReader = HFileBlock.FSReader blockReader =
new HFileBlock.FSReaderV2(fsdis, compressAlgo, fileSize); new HFileBlock.FSReaderV2(fsdis, compressAlgo, fileSize);
// Comparator class name is stored in the trailer in version 2. // Comparator class name is stored in the trailer in version 2.
RawComparator<byte []> comparator = trailer.createComparator(); KVComparator comparator = trailer.createComparator();
HFileBlockIndex.BlockIndexReader dataBlockIndexReader = HFileBlockIndex.BlockIndexReader dataBlockIndexReader =
new HFileBlockIndex.BlockIndexReader(comparator, new HFileBlockIndex.BlockIndexReader(comparator,
trailer.getNumDataIndexLevels()); trailer.getNumDataIndexLevels());
HFileBlockIndex.BlockIndexReader metaBlockIndexReader = HFileBlockIndex.BlockIndexReader metaBlockIndexReader =
new HFileBlockIndex.BlockIndexReader( new HFileBlockIndex.BlockIndexReader(
Bytes.BYTES_RAWCOMPARATOR, 1); KeyValue.RAW_COMPARATOR, 1);
HFileBlock.BlockIterator blockIter = blockReader.blockRange( HFileBlock.BlockIterator blockIter = blockReader.blockRange(
trailer.getLoadOnOpenDataOffset(), trailer.getLoadOnOpenDataOffset(),

View File

@ -51,7 +51,7 @@ public class TestReseekTo {
.withOutputStream(fout) .withOutputStream(fout)
.withBlockSize(4000) .withBlockSize(4000)
// NOTE: This test is dependent on this deprecated nonstandard comparator // NOTE: This test is dependent on this deprecated nonstandard comparator
.withComparator(new KeyValue.RawKeyComparator()) .withComparator(new KeyValue.RawBytesComparator())
.create(); .create();
int numberOfKeys = 1000; int numberOfKeys = 1000;

View File

@ -23,7 +23,6 @@ import java.io.IOException;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -51,7 +50,7 @@ public class TestSeekTo extends HBaseTestCase {
.withOutputStream(fout) .withOutputStream(fout)
.withBlockSize(blocksize) .withBlockSize(blocksize)
// NOTE: This test is dependent on this deprecated nonstandard comparator // NOTE: This test is dependent on this deprecated nonstandard comparator
.withComparator(new KeyValue.RawKeyComparator()) .withComparator(KeyValue.RAW_COMPARATOR)
.create(); .create();
// 4 bytes * 3 * 2 for each key/value + // 4 bytes * 3 * 2 for each key/value +
// 3 for keys, 15 for values = 42 (woot) // 3 for keys, 15 for values = 42 (woot)

View File

@ -536,9 +536,10 @@ public class TestMemStore extends TestCase {
List<KeyValue> results = new ArrayList<KeyValue>(); List<KeyValue> results = new ArrayList<KeyValue>();
for (int i = 0; scanner.next(results); i++) { for (int i = 0; scanner.next(results); i++) {
int rowId = startRowId + i; int rowId = startRowId + i;
KeyValue left = results.get(0);
byte[] row1 = Bytes.toBytes(rowId);
assertTrue("Row name", assertTrue("Row name",
KeyValue.COMPARATOR.compareRows(results.get(0), KeyValue.COMPARATOR.compareRows(left.getBuffer(), left.getRowOffset(), (int) left.getRowLength(), row1, 0, row1.length) == 0);
Bytes.toBytes(rowId)) == 0);
assertEquals("Count of columns", QUALIFIER_COUNT, results.size()); assertEquals("Count of columns", QUALIFIER_COUNT, results.size());
List<KeyValue> row = new ArrayList<KeyValue>(); List<KeyValue> row = new ArrayList<KeyValue>();
for (KeyValue kv : results) { for (KeyValue kv : results) {

View File

@ -327,7 +327,7 @@ public class TestStoreFile extends HBaseTestCase {
(topScanner.isSeeked() && topScanner.next())) { (topScanner.isSeeked() && topScanner.next())) {
key = topScanner.getKey(); key = topScanner.getKey();
if (topScanner.getReader().getComparator().compare(key.array(), if (topScanner.getReader().getComparator().compareFlatKey(key.array(),
key.arrayOffset(), key.limit(), midkey, 0, midkey.length) < 0) { key.arrayOffset(), key.limit(), midkey, 0, midkey.length) < 0) {
fail("key=" + Bytes.toStringBinary(key) + " < midkey=" + fail("key=" + Bytes.toStringBinary(key) + " < midkey=" +
Bytes.toStringBinary(midkey)); Bytes.toStringBinary(midkey));
@ -377,7 +377,7 @@ public class TestStoreFile extends HBaseTestCase {
while ((!topScanner.isSeeked() && topScanner.seekTo()) || while ((!topScanner.isSeeked() && topScanner.seekTo()) ||
topScanner.next()) { topScanner.next()) {
key = topScanner.getKey(); key = topScanner.getKey();
assertTrue(topScanner.getReader().getComparator().compare(key.array(), assertTrue(topScanner.getReader().getComparator().compareFlatKey(key.array(),
key.arrayOffset(), key.limit(), badmidkey, 0, badmidkey.length) >= 0); key.arrayOffset(), key.limit(), badmidkey, 0, badmidkey.length) >= 0);
if (first) { if (first) {
first = false; first = false;