HBASE-9262 Make KeyValue.KEY_COMPARATOR default for HFileWriterFactory

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1515591 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Hsieh 2013-08-19 20:30:02 +00:00
parent bf80b04b97
commit 7d94fa70f5
20 changed files with 126 additions and 204 deletions

View File

@ -1281,7 +1281,7 @@ public class HConnectionManager {
// checking is actually the last region in the table.
byte[] endKey = possibleRegion.getRegionInfo().getEndKey();
if (Bytes.equals(endKey, HConstants.EMPTY_END_ROW) ||
KeyValue.getRowComparator(tableName).compareRows(
tableName.getRowComparator().compareRows(
endKey, 0, endKey.length, row, 0, row.length) > 0) {
return possibleRegion;
}

View File

@ -19,6 +19,8 @@
package org.apache.hadoop.hbase.filter;
import static org.apache.hadoop.hbase.util.Bytes.len;
import com.google.common.base.Preconditions;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
@ -214,8 +216,8 @@ public class ColumnRangeFilter extends FilterBase {
public KeyValue getNextKeyHint(KeyValue kv) {
return KeyValue.createFirstOnRow(kv.getBuffer(), kv.getRowOffset(), kv
.getRowLength(), kv.getBuffer(), kv.getFamilyOffset(), kv
.getFamilyLength(), this.minColumn, 0, this.minColumn == null ? 0
: this.minColumn.length);
.getFamilyLength(), this.minColumn, 0, len(this.minColumn));
}
@Override

View File

@ -19,6 +19,8 @@
*/
package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.util.Bytes.len;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
@ -97,22 +99,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
*/
public static final KVComparator META_COMPARATOR = new MetaComparator();
/**
* Get the appropriate row comparator for the specified table.
*
* Hopefully we can get rid of this, I added this here because it's replacing
* something in HSK. We should move completely off of that.
*
* @param tableName The table name.
* @return The comparator.
*/
public static KeyComparator getRowComparator(TableName tableName) {
if(TableName.META_TABLE_NAME.equals(tableName)) {
return META_COMPARATOR.getRawComparator();
}
return COMPARATOR.getRawComparator();
}
/** Size of the key length field in bytes*/
public static final int KEY_LENGTH_SIZE = Bytes.SIZEOF_INT;
@ -317,7 +303,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
* @param timestamp
*/
public KeyValue(final byte [] row, final long timestamp) {
this(row, timestamp, Type.Maximum);
this(row, null, null, timestamp, Type.Maximum, null);
}
/**
@ -393,31 +379,8 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
public KeyValue(final byte[] row, final byte[] family,
final byte[] qualifier, final long timestamp, Type type,
final byte[] value) {
this(row, family, qualifier, 0, qualifier==null ? 0 : qualifier.length,
timestamp, type, value, 0, value==null ? 0 : value.length);
}
/**
* Constructs KeyValue structure filled with specified values.
* @param row row key
* @param family family name
* @param qualifier column qualifier
* @param qoffset qualifier offset
* @param qlength qualifier length
* @param timestamp version timestamp
* @param type key type
* @param value column value
* @param voffset value offset
* @param vlength value length
* @throws IllegalArgumentException
*/
public KeyValue(byte [] row, byte [] family,
byte [] qualifier, int qoffset, int qlength, long timestamp, Type type,
byte [] value, int voffset, int vlength) {
this(row, 0, row==null ? 0 : row.length,
family, 0, family==null ? 0 : family.length,
qualifier, qoffset, qlength, timestamp, type,
value, voffset, vlength);
this(row, 0, len(row), family, 0, len(family), qualifier, 0, len(qualifier),
timestamp, type, value, 0, len(value));
}
/**
@ -529,85 +492,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
return bytes;
}
/**
* Constructs KeyValue structure filled with specified values. Uses the provided buffer as its
* backing data buffer.
* <p>
* Column is split into two fields, family and qualifier.
*
* @param buffer the bytes buffer to use
* @param row row key
* @param roffset row offset
* @param rlength row length
* @param family family name
* @param foffset family offset
* @param flength family length
* @param qualifier column qualifier
* @param qoffset qualifier offset
* @param qlength qualifier length
* @param timestamp version timestamp
* @param type key type
* @param value column value
* @param voffset value offset
* @param vlength value length
* @throws IllegalArgumentException an illegal value was passed or there is insufficient space
* remaining in the buffer
*/
public KeyValue(byte [] buffer,
final byte [] row, final int roffset, final int rlength,
final byte [] family, final int foffset, final int flength,
final byte [] qualifier, final int qoffset, final int qlength,
final long timestamp, final Type type,
final byte [] value, final int voffset, final int vlength) {
this(buffer, 0,
row, roffset, rlength,
family, foffset, flength,
qualifier, qoffset, qlength,
timestamp, type,
value, voffset, vlength);
}
/**
* Constructs KeyValue structure filled with specified values. Uses the provided buffer as the
* data buffer.
* <p>
* Column is split into two fields, family and qualifier.
*
* @param buffer the bytes buffer to use
* @param boffset buffer offset
* @param row row key
* @param roffset row offset
* @param rlength row length
* @param family family name
* @param foffset family offset
* @param flength family length
* @param qualifier column qualifier
* @param qoffset qualifier offset
* @param qlength qualifier length
* @param timestamp version timestamp
* @param type key type
* @param value column value
* @param voffset value offset
* @param vlength value length
* @throws IllegalArgumentException an illegal value was passed or there is insufficient space
* remaining in the buffer
*/
public KeyValue(byte [] buffer, final int boffset,
final byte [] row, final int roffset, final int rlength,
final byte [] family, final int foffset, final int flength,
final byte [] qualifier, final int qoffset, final int qlength,
final long timestamp, final Type type,
final byte [] value, final int voffset, final int vlength) {
this.bytes = buffer;
this.length = writeByteArray(buffer, boffset,
row, roffset, rlength,
family, foffset, flength, qualifier, qoffset, qlength,
timestamp, type, value, voffset, vlength);
this.offset = boffset;
}
/**
* Checks the parameters passed to a constructor.
*
@ -683,7 +567,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
* @throws IllegalArgumentException an illegal value was passed or there is insufficient space
* remaining in the buffer
*/
static int writeByteArray(byte [] buffer, final int boffset,
private static int writeByteArray(byte [] buffer, final int boffset,
final byte [] row, final int roffset, final int rlength,
final byte [] family, final int foffset, int flength,
final byte [] qualifier, final int qoffset, int qlength,
@ -1083,7 +967,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
/**
* @return Family offset
*/
public int getFamilyOffset(int rlength) {
private int getFamilyOffset(int rlength) {
return this.offset + ROW_OFFSET + Bytes.SIZEOF_SHORT + rlength + Bytes.SIZEOF_BYTE;
}
@ -1121,7 +1005,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
/**
* @return Qualifier offset
*/
public int getQualifierOffset(int foffset) {
private int getQualifierOffset(int foffset) {
return foffset + getFamilyLength(foffset);
}
@ -1136,7 +1020,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
/**
* @return Qualifier length
*/
public int getQualifierLength(int rlength, int flength) {
private int getQualifierLength(int rlength, int flength) {
return getKeyLength() - (int) getKeyDataStructureSize(rlength, flength, 0);
}
@ -1152,7 +1036,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
/**
* @return Column (family + qualifier) length
*/
public int getTotalColumnLength(int rlength, int foffset) {
private int getTotalColumnLength(int rlength, int foffset) {
int flength = getFamilyLength(foffset);
int qlength = getQualifierLength(rlength,flength);
return flength + qlength;
@ -1169,7 +1053,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
* @param keylength Pass if you have it to save on a int creation.
* @return Timestamp offset
*/
public int getTimestampOffset(final int keylength) {
private int getTimestampOffset(final int keylength) {
return getKeyOffset() + keylength - TIMESTAMP_TYPE_SIZE;
}
@ -1298,8 +1182,9 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
/**
* @return Type of this KeyValue.
*/
@Deprecated
public byte getType() {
return getType(getKeyLength());
return getTypeByte();
}
/**
@ -1307,15 +1192,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
*/
@Override
public byte getTypeByte() {
return getType(getKeyLength());
}
/**
* @param keylength Pass if you have it to save on a int creation.
* @return Type of this KeyValue.
*/
byte getType(final int keylength) {
return this.bytes[this.offset + keylength - 1 + ROW_OFFSET];
return this.bytes[this.offset + getKeyLength() - 1 + ROW_OFFSET];
}
/**
@ -1332,21 +1209,21 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
*/
public boolean isDeleteType() {
// TODO: Fix this method name vis-a-vis isDelete!
return getType() == Type.Delete.getCode();
return getTypeByte() == Type.Delete.getCode();
}
/**
* @return True if this KV is a delete family type.
*/
public boolean isDeleteFamily() {
return getType() == Type.DeleteFamily.getCode();
return getTypeByte() == Type.DeleteFamily.getCode();
}
/**
* @return True if this KV is a delete family-version type.
*/
public boolean isDeleteFamilyVersion() {
return getType() == Type.DeleteFamilyVersion.getCode();
return getTypeByte() == Type.DeleteFamilyVersion.getCode();
}
/**
@ -1354,7 +1231,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
* @return True if this KV is a delete family or column type.
*/
public boolean isDeleteColumnOrFamily() {
int t = getType();
int t = getTypeByte();
return t == Type.DeleteColumn.getCode() || t == Type.DeleteFamily.getCode();
}
@ -1539,8 +1416,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
* @return True if column matches
*/
public boolean matchingColumn(final byte[] family, final byte[] qualifier) {
return matchingColumn(family, 0, family == null ? 0 : family.length,
qualifier, 0, qualifier == null ? 0 : qualifier.length);
return matchingColumn(family, 0, len(family), qualifier, 0, len(qualifier));
}
/**
@ -2252,12 +2128,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
throw new IllegalArgumentException("Buffer size " + (buffer.length - boffset) + " < " +
iLength);
}
return new KeyValue(buffer, boffset,
row, roffset, rlength,
family, foffset, flength,
qualifier, qoffset, qlength,
HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum,
int len = writeByteArray(buffer, boffset, row, roffset, rlength, family, foffset, flength,
qualifier, qoffset, qlength, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum,
null, 0, 0);
return new KeyValue(buffer, boffset, len);
}
/**
@ -2533,6 +2408,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
public static class KeyComparator
implements RawComparator<byte []>, SamePrefixComparator<byte[]> {
@Override
public int compare(byte[] left, int loffset, int llength, byte[] right,
int roffset, int rlength) {
// Compare row
@ -2706,7 +2582,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
* @param rightKey the current block's real start key usually
* @return newKey: the newly generated faked key
*/
public byte[] getShortMidpointKey(final byte[] leftKey, final byte[] rightKey) {
protected byte[] getShortMidpointKey(final byte[] leftKey, final byte[] rightKey) {
if (rightKey == null) {
throw new IllegalArgumentException("rightKey can not be null");
}
@ -2798,6 +2674,50 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
}
return 0;
}
/**
* Generate a shorter faked key into index block. For example, consider a block boundary
* between the keys "the quick brown fox" and "the who test text". We can use "the r" as the
* key for the index block entry since it is > all entries in the previous block and <= all
* entries in subsequent blocks.
*
* @param lastKeyOfPreviousBlock
* @param firstKeyInBlock
* @return a shortened null key, or if there are unexpected results, the firstKeyIn (new) Block
*/
public byte[] calcIndexKey(byte[] lastKeyOfPreviousBlock, byte[] firstKeyInBlock) {
byte[] fakeKey = getShortMidpointKey(lastKeyOfPreviousBlock, firstKeyInBlock);
if (compare(fakeKey, firstKeyInBlock) > 0) {
LOG.error("Unexpected getShortMidpointKey result, fakeKey:"
+ Bytes.toStringBinary(fakeKey) + ", firstKeyInBlock:"
+ Bytes.toStringBinary(firstKeyInBlock));
return firstKeyInBlock;
}
if (lastKeyOfPreviousBlock != null && compare(lastKeyOfPreviousBlock, fakeKey) >= 0) {
LOG.error("Unexpected getShortMidpointKey result, lastKeyOfPreviousBlock:" +
Bytes.toStringBinary(lastKeyOfPreviousBlock) + ", fakeKey:" +
Bytes.toStringBinary(fakeKey));
return firstKeyInBlock;
}
return fakeKey;
}
}
/**
* This is a TEST only Comparator used in TestSeekTo and TestReseekTo.
*/
@Deprecated
public static class RawKeyComparator extends KeyComparator {
RawComparator<byte []> getRawComparator() { return Bytes.BYTES_RAWCOMPARATOR; }
public int compare(byte[] left, int loffset, int llength, byte[] right,
int roffset, int rlength) {
return getRawComparator().compare(left, loffset, llength, right, roffset, rlength);
}
public byte[] calcIndexKey(byte[] lastKeyOfPreviousBlock, byte[] firstKeyInBlock) {
return firstKeyInBlock;
}
}
/**

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
import org.apache.hadoop.hbase.util.Bytes;
/**
@ -306,4 +307,16 @@ public final class TableName implements Comparable<TableName> {
public int compareTo(TableName tableName) {
return this.nameAsString.compareTo(tableName.getNameAsString());
}
/**
* Get the appropriate row comparator for this table.
*
* @return The comparator.
*/
public KeyComparator getRowComparator() {
if(TableName.META_TABLE_NAME.equals(this)) {
return KeyValue.META_COMPARATOR.getRawComparator();
}
return KeyValue.COMPARATOR.getRawComparator();
}
}

View File

@ -123,6 +123,17 @@ public class Bytes {
// SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?)
public static final int ESTIMATED_HEAP_TAX = 16;
/**
* Returns length of the byte array, returning 0 if the array is null.
* Useful for calculating sizes.
* @param b byte array, which can be null
* @return 0 if b is null, otherwise returns length
*/
final public static int len(byte[] b) {
return b == null ? 0 : b.length;
}
/**
* Byte array comparator class.
*/

View File

@ -32,12 +32,12 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable;
/**
@ -77,7 +77,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
protected long totalUncompressedBytes = 0;
/** Key comparator. Used to ensure we write in order. */
protected final RawComparator<byte[]> comparator;
protected final KeyComparator comparator;
/** Meta block names. */
protected List<byte[]> metaNames = new ArrayList<byte[]>();
@ -124,7 +124,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
this.blockEncoder = dataBlockEncoder != null
? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
this.comparator = comparator != null ? comparator
: Bytes.BYTES_RAWCOMPARATOR;
: KeyValue.KEY_COMPARATOR;
closeOutputStream = path != null;
this.cacheConf = cacheConf;

View File

@ -336,7 +336,7 @@ public class HFile {
protected Compression.Algorithm compression =
HFile.DEFAULT_COMPRESSION_ALGORITHM;
protected HFileDataBlockEncoder encoder = NoOpDataBlockEncoder.INSTANCE;
protected KeyComparator comparator;
protected KeyComparator comparator = KeyValue.KEY_COMPARATOR;
protected InetSocketAddress[] favoredNodes;
protected ChecksumType checksumType = HFile.DEFAULT_CHECKSUM_TYPE;
protected int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM;

View File

@ -173,28 +173,9 @@ public class HFileWriterV2 extends AbstractHFileWriter {
lastDataBlockOffset = outputStream.getPos();
fsBlockWriter.writeHeaderAndData(outputStream);
int onDiskSize = fsBlockWriter.getOnDiskSizeWithHeader();
// Generate a shorter faked key into index block. For example, consider a block boundary
// between the keys "the quick brown fox" and "the who test text". We can use "the r" as the
// key for the index block entry since it is > all entries in the previous block and <= all
// entries in subsequent blocks.
if (comparator instanceof KeyComparator) {
byte[] fakeKey = ((KeyComparator) comparator).getShortMidpointKey(
lastKeyOfPreviousBlock, firstKeyInBlock);
if (comparator.compare(fakeKey, firstKeyInBlock) > 0) {
throw new IOException("Unexpected getShortMidpointKey result, fakeKey:"
+ Bytes.toStringBinary(fakeKey) + ", firstKeyInBlock:"
+ Bytes.toStringBinary(firstKeyInBlock));
}
if (lastKeyOfPreviousBlock != null && comparator.compare(lastKeyOfPreviousBlock,
fakeKey) >= 0) {
throw new IOException("Unexpected getShortMidpointKey result, lastKeyOfPreviousBlock:" +
Bytes.toStringBinary(lastKeyOfPreviousBlock) + ", fakeKey:" +
Bytes.toStringBinary(fakeKey));
}
dataBlockIndexWriter.addEntry(fakeKey, lastDataBlockOffset,onDiskSize);
} else {
dataBlockIndexWriter.addEntry(firstKeyInBlock, lastDataBlockOffset,onDiskSize);
}
byte[] indexKey = comparator.calcIndexKey(lastKeyOfPreviousBlock, firstKeyInBlock);
dataBlockIndexWriter.addEntry(indexKey, lastDataBlockOffset, onDiskSize);
totalUncompressedBytes += fsBlockWriter.getUncompressedSizeWithHeader();
HFile.offerWriteLatency(System.nanoTime() - startTimeNs);
if (cacheConf.shouldCacheDataOnWrite()) {

View File

@ -519,7 +519,6 @@ public class TestRegionObserverInterface {
byte[] family, byte[] qualifier) throws IOException {
HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf))
.withPath(fs, path)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
long now = System.currentTimeMillis();
try {
@ -532,14 +531,4 @@ public class TestRegionObserverInterface {
}
}
private static byte [][] makeN(byte [] base, int n) {
byte [][] ret = new byte[n][];
for(int i=0;i<n;i++) {
ret[i] = Bytes.add(base, Bytes.toBytes(String.format("%02d", i)));
}
return ret;
}
}

View File

@ -30,9 +30,11 @@ import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.util.Bytes;
@ -83,7 +85,6 @@ public class TestHalfStoreFileReader {
HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
.withPath(fs, p)
.withBlockSize(1024)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
// write some things.
@ -149,7 +150,6 @@ public class TestHalfStoreFileReader {
HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
.withPath(fs, p)
.withBlockSize(1024)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
// write some things.

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.io.compress.Compression;
@ -214,6 +215,8 @@ public class TestHFile extends HBaseTestCase {
.withOutputStream(fout)
.withBlockSize(minBlockSize)
.withCompression(codec)
// NOTE: This test is dependent on this deprecated nonstandard comparator
.withComparator(new KeyValue.RawKeyComparator())
.create();
LOG.info(writer);
writeRecords(writer);

View File

@ -489,7 +489,6 @@ public class TestHFileBlockIndex {
.withPath(fs, hfilePath)
.withBlockSize(SMALL_BLOCK_SIZE)
.withCompression(compr)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
Random rand = new Random(19231737);

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.SequenceFile;
@ -165,6 +166,7 @@ public class TestHFilePerformance extends TestCase {
.withOutputStream(fout)
.withBlockSize(minBlockSize)
.withCompression(codecName)
.withComparator(new KeyValue.RawKeyComparator())
.create();
// Writing value in one shot.

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
@ -130,6 +131,7 @@ public class TestHFileSeek extends TestCase {
.withOutputStream(fout)
.withBlockSize(options.minBlockSize)
.withCompression(options.compress)
.withComparator(new KeyValue.RawKeyComparator())
.create();
try {
BytesWritable key = new BytesWritable();

View File

@ -98,7 +98,6 @@ public class TestHFileWriterV2 {
.withPath(fs, hfilePath)
.withBlockSize(4096)
.withCompression(compressAlgo)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
Random rand = new Random(9713312); // Just a fixed seed.

View File

@ -18,21 +18,20 @@
*/
package org.apache.hadoop.hbase.io.hfile;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.List;
import junit.framework.Assert;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
/**
* Test {@link HFileScanner#reseekTo(byte[])}
*/
@ -51,6 +50,8 @@ public class TestReseekTo {
TEST_UTIL.getConfiguration(), cacheConf)
.withOutputStream(fout)
.withBlockSize(4000)
// NOTE: This test is dependent on this deprecated nonstandard comparator
.withComparator(new KeyValue.RawKeyComparator())
.create();
int numberOfKeys = 1000;

View File

@ -23,7 +23,9 @@ import java.io.IOException;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator;
import org.junit.experimental.categories.Category;
/**
@ -48,6 +50,8 @@ public class TestSeekTo extends HBaseTestCase {
HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
.withOutputStream(fout)
.withBlockSize(blocksize)
// NOTE: This test is dependent on this deprecated nonstandard comparator
.withComparator(new KeyValue.RawKeyComparator())
.create();
// 4 bytes * 3 * 2 for each key/value +
// 3 for keys, 15 for values = 42 (woot)

View File

@ -264,7 +264,6 @@ public class TestLoadIncrementalHFiles {
.withPath(fs, path)
.withBlockSize(BLOCKSIZE)
.withCompression(COMPRESSION)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
long now = System.currentTimeMillis();
try {

View File

@ -92,7 +92,6 @@ public class TestHRegionServerBulkLoad {
.withPath(fs, path)
.withBlockSize(BLOCKSIZE)
.withCompression(COMPRESSION)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
long now = System.currentTimeMillis();
try {

View File

@ -39,7 +39,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -49,6 +48,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.LargeTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
@ -85,7 +85,6 @@ import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.hbase.util.TestTableName;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@ -845,7 +844,6 @@ public class TestAccessController extends SecureTestUtil {
try {
writer = HFile.getWriterFactory(conf, new CacheConfig(conf))
.withPath(fs, path)
.withComparator(KeyValue.KEY_COMPARATOR)
.create();
// subtract 2 since numRows doesn't include boundary keys
for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, true, numRows-2)) {