HBASE-19160 expose CellComparator as IA.Public

This commit is contained in:
Mike Drob 2017-11-02 16:16:43 -05:00
parent 47c614c706
commit cfddfcf23c
44 changed files with 133 additions and 151 deletions

View File

@ -39,7 +39,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.PrivateCellUtil;
@ -336,7 +336,7 @@ public final class ConnectionUtils {
}
Cell[] rawCells = result.rawCells();
int index =
Arrays.binarySearch(rawCells, keepCellsAfter, CellComparatorImpl.COMPARATOR::compareWithoutRow);
Arrays.binarySearch(rawCells, keepCellsAfter, CellComparator.getInstance()::compareWithoutRow);
if (index < 0) {
index = -index - 1;
} else {

View File

@ -35,7 +35,6 @@ import java.util.TreeMap;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
@ -304,7 +303,7 @@ public class Result implements CellScannable, CellScanner {
qualifierNotNull, 0, qualifierNotNull.length);
// pos === ( -(insertion point) - 1)
int pos = Arrays.binarySearch(kvs, searchTerm, CellComparatorImpl.COMPARATOR);
int pos = Arrays.binarySearch(kvs, searchTerm, CellComparator.getInstance());
// never will exact match
if (pos < 0) {
pos = (pos+1) * -1;
@ -349,7 +348,7 @@ public class Result implements CellScannable, CellScanner {
qualifier, qoffset, qlength);
// pos === ( -(insertion point) - 1)
int pos = Arrays.binarySearch(kvs, searchTerm, CellComparatorImpl.COMPARATOR);
int pos = Arrays.binarySearch(kvs, searchTerm, CellComparator.getInstance());
// never will exact match
if (pos < 0) {
pos = (pos+1) * -1;

View File

@ -25,7 +25,7 @@ import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.yetus.audience.InterfaceAudience;
@ -86,7 +86,7 @@ public abstract class FilterListBase extends FilterBase {
}
protected int compareCell(Cell a, Cell b) {
int cmp = CellComparatorImpl.COMPARATOR.compare(a, b);
int cmp = CellComparator.getInstance().compare(a, b);
return reversed ? -1 * cmp : cmp;
}

View File

@ -24,8 +24,7 @@ import java.util.List;
import java.util.PriorityQueue;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@ -241,8 +240,7 @@ public class FuzzyRowFilter extends FilterBase {
}
boolean lessThan(Cell currentCell, byte[] nextRowKey) {
int compareResult =
CellComparatorImpl.COMPARATOR.compareRows(currentCell, nextRowKey, 0, nextRowKey.length);
int compareResult = CellComparator.getInstance().compareRows(currentCell, nextRowKey, 0, nextRowKey.length);
return (!isReversed() && compareResult < 0) || (isReversed() && compareResult > 0);
}

View File

@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
@ -66,7 +66,7 @@ public class InclusiveStopFilter extends FilterBase {
public boolean filterRowKey(Cell firstRowCell) {
// if stopRowKey is <= buffer, then true, filter row.
if (filterAllRemaining()) return true;
int cmp = CellComparatorImpl.COMPARATOR.compareRows(firstRowCell, stopRowKey, 0, stopRowKey.length);
int cmp = CellComparator.getInstance().compareRows(firstRowCell, stopRowKey, 0, stopRowKey.length);
done = reversed ? cmp < 0 : cmp > 0;
return done;
}

View File

@ -25,9 +25,19 @@ import org.apache.yetus.audience.InterfaceStability;
* Comparator for comparing cells and has some specialized methods that allows comparing individual
* cell components like row, family, qualifier and timestamp
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface CellComparator extends Comparator<Cell> {
/**
* A comparator for ordering cells in user-space tables. Useful when writing cells in sorted
* order as necessary for bulk import (i.e. via MapReduce)
* <p>
* CAUTION: This comparator may provide inaccurate ordering for cells from system tables,
* and should not be relied upon in that case.
*/
static CellComparator getInstance() {
return CellComparatorImpl.COMPARATOR;
}
/**
* Lexographically compares two cells. The key part of the cell is taken for comparison which

View File

@ -1552,7 +1552,7 @@ public final class CellUtil {
}
public static boolean matchingTimestamp(Cell a, Cell b) {
return CellComparatorImpl.COMPARATOR.compareTimestamps(a.getTimestamp(), b.getTimestamp()) == 0;
return CellComparator.getInstance().compareTimestamps(a.getTimestamp(), b.getTimestamp()) == 0;
}
/**

View File

@ -96,7 +96,7 @@ public class KeyValue implements ExtendedCell {
/**
* Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion
* of KeyValue only.
* @deprecated Use {@link CellComparatorImpl#COMPARATOR} instead. Deprecated for hbase 2.0, remove for hbase 3.0.
* @deprecated Use {@link CellComparator#getInstance()} instead. Deprecated for hbase 2.0, remove for hbase 3.0.
*/
@Deprecated
public static final KVComparator COMPARATOR = new KVComparator();

View File

@ -25,7 +25,6 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.HConstants;
@ -882,7 +881,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
qualCommonPrefix);
comp = compareCommonQualifierPrefix(seekCell, keyOnlyKV, qualCommonPrefix);
if (comp == 0) {
comp = CellComparatorImpl.COMPARATOR.compareTimestamps(seekCell, keyOnlyKV);
comp = CellComparator.getInstance().compareTimestamps(seekCell, keyOnlyKV);
if (comp == 0) {
// Compare types. Let the delete types sort ahead of puts;
// i.e. types

View File

@ -31,7 +31,7 @@ import org.junit.experimental.categories.Category;
@Category({MiscTests.class, SmallTests.class})
public class TestCellComparator {
private CellComparatorImpl comparator = CellComparatorImpl.COMPARATOR;
private CellComparator comparator = CellComparator.getInstance();
byte[] row1 = Bytes.toBytes("row1");
byte[] row2 = Bytes.toBytes("row2");
byte[] row_1_0 = Bytes.toBytes("row10");
@ -53,7 +53,7 @@ public class TestCellComparator {
kv1 = new KeyValue(row1, fam2, qual1, val);
kv2 = new KeyValue(row1, fam1, qual1, val);
assertTrue((CellComparatorImpl.COMPARATOR.compareFamilies(kv1, kv2) > 0));
assertTrue((comparator.compareFamilies(kv1, kv2) > 0));
kv1 = new KeyValue(row1, fam1, qual1, 1l, val);
kv2 = new KeyValue(row1, fam1, qual1, 2l, val);
@ -110,16 +110,17 @@ public class TestCellComparator {
kv = new KeyValue(r2, f1, q1, v);
buffer = ByteBuffer.wrap(kv.getBuffer());
Cell bbCell2 = new ByteBufferKeyValue(buffer, 0, buffer.remaining());
// compareColumns not on CellComparator so use Impl directly
assertEquals(0, CellComparatorImpl.COMPARATOR.compareColumns(bbCell1, bbCell2));
assertEquals(0, CellComparatorImpl.COMPARATOR.compareColumns(bbCell1, kv));
kv = new KeyValue(r2, f1, q2, v);
buffer = ByteBuffer.wrap(kv.getBuffer());
Cell bbCell3 = new ByteBufferKeyValue(buffer, 0, buffer.remaining());
assertEquals(0, CellComparatorImpl.COMPARATOR.compareFamilies(bbCell2, bbCell3));
assertTrue(CellComparatorImpl.COMPARATOR.compareQualifiers(bbCell2, bbCell3) < 0);
assertEquals(0, comparator.compareFamilies(bbCell2, bbCell3));
assertTrue(comparator.compareQualifiers(bbCell2, bbCell3) < 0);
assertTrue(CellComparatorImpl.COMPARATOR.compareColumns(bbCell2, bbCell3) < 0);
assertEquals(0, CellComparatorImpl.COMPARATOR.compareRows(bbCell2, bbCell3));
assertTrue(CellComparatorImpl.COMPARATOR.compareRows(bbCell1, bbCell2) < 0);
assertEquals(0, comparator.compareRows(bbCell2, bbCell3));
assertTrue(comparator.compareRows(bbCell1, bbCell2) < 0);
}
}

View File

@ -26,7 +26,7 @@ import java.util.Random;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.Tag;
@ -287,7 +287,7 @@ public class RedundantKVGenerator {
}
}
Collections.sort(result, CellComparatorImpl.COMPARATOR);
Collections.sort(result, CellComparator.getInstance());
return result;
}
@ -383,7 +383,7 @@ public class RedundantKVGenerator {
}
}
Collections.sort(result, CellComparatorImpl.COMPARATOR);
Collections.sort(result, CellComparator.getInstance());
return result;
}

View File

@ -37,7 +37,7 @@ import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
@ -84,7 +84,7 @@ public class IntegrationTestImportTsv extends Configured implements Tool {
public TestName name = new TestName();
protected static final Set<KeyValue> simple_expected =
new TreeSet<KeyValue>(CellComparatorImpl.COMPARATOR) {
new TreeSet<KeyValue>(CellComparator.getInstance()) {
private static final long serialVersionUID = 1L;
{
byte[] family = Bytes.toBytes("d");
@ -160,10 +160,8 @@ public class IntegrationTestImportTsv extends Configured implements Tool {
"Ran out of expected values prematurely!",
expectedIt.hasNext());
KeyValue expected = expectedIt.next();
assertTrue(
format("Scan produced surprising result. expected: <%s>, actual: %s",
expected, actual),
CellComparatorImpl.COMPARATOR.compare(expected, actual) == 0);
assertEquals("Scan produced surprising result", 0,
CellComparator.getInstance().compare(expected, actual));
}
}
assertFalse("Did not consume all expected values.", expectedIt.hasNext());

View File

@ -22,7 +22,7 @@ import java.io.IOException;
import java.util.TreeSet;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.MapReduceCell;
@ -42,7 +42,7 @@ public class CellSortReducer
protected void reduce(ImmutableBytesWritable row, Iterable<Cell> kvs,
Reducer<ImmutableBytesWritable, Cell, ImmutableBytesWritable, Cell>.Context context)
throws java.io.IOException, InterruptedException {
TreeSet<Cell> map = new TreeSet<>(CellComparatorImpl.COMPARATOR);
TreeSet<Cell> map = new TreeSet<>(CellComparator.getInstance());
for (Cell kv : kvs) {
try {
map.add(PrivateCellUtil.deepClone(kv));

View File

@ -46,7 +46,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
@ -402,12 +402,12 @@ public class HFileOutputFormat2
wl.writer =
new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), fs)
.withOutputDir(familydir).withBloomType(bloomType)
.withComparator(CellComparatorImpl.COMPARATOR).withFileContext(hFileContext).build();
.withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();
} else {
wl.writer =
new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new HFileSystem(fs))
.withOutputDir(familydir).withBloomType(bloomType)
.withComparator(CellComparatorImpl.COMPARATOR).withFileContext(hFileContext)
.withComparator(CellComparator.getInstance()).withFileContext(hFileContext)
.withFavoredNodes(favoredNodes).build();
}

View File

@ -40,7 +40,7 @@ import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.PrivateCellUtil;
@ -218,10 +218,8 @@ public class Import extends Configured implements Tool {
}
@Override
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="EQ_COMPARETO_USE_OBJECT_EQUALS",
justification="This is wrong, yes, but we should be purging Writables, not fixing them")
public int compareTo(CellWritableComparable o) {
return CellComparatorImpl.COMPARATOR.compare(this.kv, ((CellWritableComparable)o).kv);
return CellComparator.getInstance().compare(this.kv, o.kv);
}
public static class CellWritableComparator extends WritableComparator {

View File

@ -27,7 +27,7 @@ import java.util.TreeSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
@ -77,7 +77,7 @@ public class PutSortReducer extends
"putsortreducer.row.threshold", 1L * (1<<30));
Iterator<Put> iter = puts.iterator();
while (iter.hasNext()) {
TreeSet<KeyValue> map = new TreeSet<>(CellComparatorImpl.COMPARATOR);
TreeSet<KeyValue> map = new TreeSet<>(CellComparator.getInstance());
long curSize = 0;
// stop at the end or the RAM threshold
List<Tag> tags = new ArrayList<>();

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
@ -587,18 +587,18 @@ public class SyncTable extends Configured implements Tool {
return -1; // target missing cell
}
int result = CellComparatorImpl.COMPARATOR.compareFamilies(c1, c2);
int result = CellComparator.getInstance().compareFamilies(c1, c2);
if (result != 0) {
return result;
}
result = CellComparatorImpl.COMPARATOR.compareQualifiers(c1, c2);
result = CellComparator.getInstance().compareQualifiers(c1, c2);
if (result != 0) {
return result;
}
// note timestamp comparison is inverted - more recent cells first
return CellComparatorImpl.COMPARATOR.compareTimestamps(c1, c2);
return CellComparator.getInstance().compareTimestamps(c1, c2);
}
@Override

View File

@ -27,7 +27,7 @@ import java.util.TreeSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
@ -144,7 +144,7 @@ public class TextSortReducer extends
"reducer.row.threshold", 1L * (1<<30));
Iterator<Text> iter = lines.iterator();
while (iter.hasNext()) {
Set<KeyValue> kvs = new TreeSet<>(CellComparatorImpl.COMPARATOR);
Set<KeyValue> kvs = new TreeSet<>(CellComparator.getInstance());
long curSize = 0;
// stop at the end or the RAM threshold
while (iter.hasNext() && curSize < threshold) {

View File

@ -110,7 +110,7 @@ public class FixedFileTrailer {
/** Raw key comparator class name in version 3 */
// We could write the actual class name from 2.0 onwards and handle BC
private String comparatorClassName = CellComparatorImpl.COMPARATOR.getClass().getName();
private String comparatorClassName = CellComparator.getInstance().getClass().getName();
/** The encryption key */
private byte[] encryptionKey;

View File

@ -49,7 +49,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HConstants;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.fs.HFileSystem;
@ -276,8 +275,7 @@ public class HFile {
protected FileSystem fs;
protected Path path;
protected FSDataOutputStream ostream;
protected CellComparator comparator =
CellComparatorImpl.COMPARATOR;
protected CellComparator comparator = CellComparator.getInstance();
protected InetSocketAddress[] favoredNodes;
private HFileContext fileContext;
protected boolean shouldDropBehind = false;

View File

@ -53,7 +53,7 @@ import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@ -380,7 +380,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
do {
Cell cell = scanner.getCell();
if (row != null && row.length != 0) {
int result = CellComparatorImpl.COMPARATOR.compareRows(cell, row, 0, row.length);
int result = CellComparator.getInstance().compareRows(cell, row, 0, row.length);
if (result > 0) {
break;
} else if (result < 0) {
@ -409,7 +409,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
}
// check if rows are in order
if (checkRow && pCell != null) {
if (CellComparatorImpl.COMPARATOR.compareRows(pCell, cell) > 0) {
if (CellComparator.getInstance().compareRows(pCell, cell) > 0) {
err.println("WARNING, previous row is greater then"
+ " current row\n\tfilename -> " + file + "\n\tprevious -> "
+ CellUtil.getCellKeyAsString(pCell) + "\n\tcurrent -> "
@ -425,7 +425,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
+ "\n\tfilename -> " + file + "\n\tkeyvalue -> "
+ CellUtil.getCellKeyAsString(cell));
}
if (pCell != null && CellComparatorImpl.COMPARATOR.compareFamilies(pCell, cell) != 0) {
if (pCell != null && CellComparator.getInstance().compareFamilies(pCell, cell) != 0) {
err.println("WARNING, previous kv has different family"
+ " compared to current key\n\tfilename -> " + file
+ "\n\tprevious -> " + CellUtil.getCellKeyAsString(pCell)
@ -619,7 +619,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
public void collect(Cell cell) {
valLen.update(cell.getValueLength());
if (prevCell != null &&
CellComparatorImpl.COMPARATOR.compareRows(prevCell, cell) != 0) {
CellComparator.getInstance().compareRows(prevCell, cell) != 0) {
// new row
collectRow();
}

View File

@ -34,7 +34,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
@ -106,7 +105,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
private int avgValueLen = -1;
/** Key comparator */
private CellComparator comparator = CellComparatorImpl.COMPARATOR;
private CellComparator comparator = CellComparator.getInstance();
/** Size of this file. */
private final long fileSize;

View File

@ -36,7 +36,6 @@ import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
@ -178,7 +177,7 @@ public class HFileWriterImpl implements HFile.Writer {
} else {
this.blockEncoder = NoOpDataBlockEncoder.INSTANCE;
}
this.comparator = comparator != null? comparator: CellComparatorImpl.COMPARATOR;
this.comparator = comparator != null ? comparator : CellComparator.getInstance();
closeOutputStream = path != null;
this.cacheConf = cacheConf;

View File

@ -42,8 +42,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName;
@ -682,7 +681,7 @@ public final class MobUtils {
StoreFileWriter w = new StoreFileWriter.Builder(conf, writerCacheConf, fs)
.withFilePath(path)
.withComparator(CellComparatorImpl.COMPARATOR).withBloomType(bloomType)
.withComparator(CellComparator.getInstance()).withBloomType(bloomType)
.withMaxKeyCount(maxKeyCount).withFileContext(hFileContext).build();
return w;
}

View File

@ -48,6 +48,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@ -812,7 +813,7 @@ public class PartitionedMobCompactor extends MobCompactor {
List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(filesToCompact,
false, true, false, false, HConstants.LATEST_TIMESTAMP);
long ttl = HStore.determineTTLFromFamily(column);
ScanInfo scanInfo = new ScanInfo(conf, column, ttl, 0, CellComparatorImpl.COMPARATOR);
ScanInfo scanInfo = new ScanInfo(conf, column, ttl, 0, CellComparator.getInstance());
return new StoreScanner(scanInfo, scanType, scanners);
}

View File

@ -29,7 +29,6 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@ -64,7 +63,7 @@ public class DefaultMemStore extends AbstractMemStore {
* Default constructor. Used for tests.
*/
public DefaultMemStore() {
this(HBaseConfiguration.create(), CellComparatorImpl.COMPARATOR);
this(HBaseConfiguration.create(), CellComparator.getInstance());
}
/**

View File

@ -56,7 +56,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.HConstants;
@ -778,7 +777,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
+ CellUtil.getCellKeyAsString(prevCell) + " current="
+ CellUtil.getCellKeyAsString(cell));
}
if (CellComparatorImpl.COMPARATOR.compareFamilies(prevCell, cell) != 0) {
if (CellComparator.getInstance().compareFamilies(prevCell, cell) != 0) {
throw new InvalidHFileException("Previous key had different"
+ " family compared to current key: path=" + srcPath
+ " previous="

View File

@ -35,7 +35,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
@ -372,7 +371,7 @@ public class StoreFileReader {
if (bloomFilterType == BloomType.ROW) {
keyIsAfterLast = (Bytes.BYTES_RAWCOMPARATOR.compare(key, lastBloomKey) > 0);
} else {
keyIsAfterLast = (CellComparatorImpl.COMPARATOR.compare(kvKey, lastBloomKeyOnlyKV)) > 0;
keyIsAfterLast = (CellComparator.getInstance().compare(kvKey, lastBloomKeyOnlyKV)) > 0;
}
}
@ -385,7 +384,7 @@ public class StoreFileReader {
// hbase:meta does not have blooms. So we need not have special interpretation
// of the hbase:meta cells. We can safely use Bytes.BYTES_RAWCOMPARATOR for ROW Bloom
if (keyIsAfterLast
&& (CellComparatorImpl.COMPARATOR.compare(rowBloomKey, lastBloomKeyOnlyKV)) > 0) {
&& (CellComparator.getInstance().compare(rowBloomKey, lastBloomKeyOnlyKV)) > 0) {
exists = false;
} else {
exists =

View File

@ -37,8 +37,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue;
@ -348,7 +346,7 @@ public class StoreFileWriter implements CellSink, ShipperListener {
private final CacheConfig cacheConf;
private final FileSystem fs;
private CellComparator comparator = CellComparatorImpl.COMPARATOR;
private CellComparator comparator = CellComparator.getInstance();
private BloomType bloomType = BloomType.NONE;
private long maxKeyCount = 0;
private Path dir;
@ -473,7 +471,7 @@ public class StoreFileWriter implements CellSink, ShipperListener {
}
if (comparator == null) {
comparator = CellComparatorImpl.COMPARATOR;
comparator = CellComparator.getInstance();
}
return new StoreFileWriter(fs, filePath,
conf, cacheConf, comparator, bloomType, maxKeyCount, favoredNodes, fileContext,

View File

@ -26,7 +26,7 @@ import java.util.Set;
import java.util.TreeSet;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.client.RegionInfo;
@ -78,11 +78,11 @@ class FSWALEntry extends Entry {
@VisibleForTesting
static Set<byte[]> collectFamilies(List<Cell> cells) {
if (CollectionUtils.isEmpty(cells)) {
return Collections.<byte[]> emptySet();
return Collections.emptySet();
} else {
return cells.stream()
.filter(v -> !CellUtil.matchingFamily(v, WALEdit.METAFAMILY))
.collect(toCollection(() -> new TreeSet<>(CellComparatorImpl.COMPARATOR::compareFamilies)))
.collect(toCollection(() -> new TreeSet<>(CellComparator.getInstance()::compareFamilies)))
.stream()
.map(CellUtil::cloneFamily)
.collect(toCollection(() -> new TreeSet<>(Bytes.BYTES_COMPARATOR)));

View File

@ -27,7 +27,6 @@ import java.util.SortedSet;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.regionserver.NonReversedNonLazyKeyValueScanner;
/**
@ -41,7 +40,7 @@ public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner {
private Cell current;
public CollectionBackedScanner(SortedSet<Cell> set) {
this(set, CellComparatorImpl.COMPARATOR);
this(set, CellComparator.getInstance());
}
public CollectionBackedScanner(SortedSet<Cell> set,
@ -52,7 +51,7 @@ public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner {
}
public CollectionBackedScanner(List<Cell> list) {
this(list, CellComparatorImpl.COMPARATOR);
this(list, CellComparator.getInstance());
}
public CollectionBackedScanner(List<Cell> list,

View File

@ -24,6 +24,7 @@ import java.util.Locale;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@ -140,7 +141,7 @@ public class CompressionTest {
scanner.seekTo(); // position to the start of file
// Scanner does not do Cells yet. Do below for now till fixed.
cc = scanner.getCell();
if (CellComparatorImpl.COMPARATOR.compareRows(c, cc) != 0) {
if (CellComparator.getInstance().compareRows(c, cc) != 0) {
throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString());
}
} finally {

View File

@ -3538,7 +3538,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
int i;
for (i = 0; i < minLen
&& CellComparatorImpl.COMPARATOR.compare(expected.get(i), actual.get(i)) == 0;
&& CellComparator.getInstance().compare(expected.get(i), actual.get(i)) == 0;
++i) {}
if (additionalMsg == null) {

View File

@ -366,7 +366,7 @@ public class HFilePerformanceEvaluation {
writer = HFile.getWriterFactoryNoCache(conf)
.withPath(fs, mf)
.withFileContext(hFileContext)
.withComparator(CellComparatorImpl.COMPARATOR)
.withComparator(CellComparator.getInstance())
.create();
}

View File

@ -32,7 +32,7 @@ import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
@ -71,7 +71,7 @@ public class TestResult extends TestCase {
*/
public void testResultAsCellScanner() throws IOException {
Cell [] cells = genKVs(row, family, value, 1, 10);
Arrays.sort(cells, CellComparatorImpl.COMPARATOR);
Arrays.sort(cells, CellComparator.getInstance());
Result r = Result.create(cells);
assertSame(r, cells);
// Assert I run over same result multiple times.
@ -93,7 +93,7 @@ public class TestResult extends TestCase {
public void testBasicGetColumn() throws Exception {
KeyValue [] kvs = genKVs(row, family, value, 1, 100);
Arrays.sort(kvs, CellComparatorImpl.COMPARATOR);
Arrays.sort(kvs, CellComparator.getInstance());
Result r = Result.create(kvs);
@ -132,7 +132,7 @@ public class TestResult extends TestCase {
System.arraycopy(kvs1, 0, kvs, 0, kvs1.length);
System.arraycopy(kvs2, 0, kvs, kvs1.length, kvs2.length);
Arrays.sort(kvs, CellComparatorImpl.COMPARATOR);
Arrays.sort(kvs, CellComparator.getInstance());
Result r = Result.create(kvs);
for (int i = 0; i < 100; ++i) {
@ -149,7 +149,7 @@ public class TestResult extends TestCase {
public void testBasicGetValue() throws Exception {
KeyValue [] kvs = genKVs(row, family, value, 1, 100);
Arrays.sort(kvs, CellComparatorImpl.COMPARATOR);
Arrays.sort(kvs, CellComparator.getInstance());
Result r = Result.create(kvs);
@ -169,7 +169,7 @@ public class TestResult extends TestCase {
System.arraycopy(kvs1, 0, kvs, 0, kvs1.length);
System.arraycopy(kvs2, 0, kvs, kvs1.length, kvs2.length);
Arrays.sort(kvs, CellComparatorImpl.COMPARATOR);
Arrays.sort(kvs, CellComparator.getInstance());
Result r = Result.create(kvs);
for (int i = 0; i < 100; ++i) {
@ -183,7 +183,7 @@ public class TestResult extends TestCase {
public void testBasicLoadValue() throws Exception {
KeyValue [] kvs = genKVs(row, family, value, 1, 100);
Arrays.sort(kvs, CellComparatorImpl.COMPARATOR);
Arrays.sort(kvs, CellComparator.getInstance());
Result r = Result.create(kvs);
ByteBuffer loadValueBuffer = ByteBuffer.allocate(1024);
@ -208,7 +208,7 @@ public class TestResult extends TestCase {
System.arraycopy(kvs1, 0, kvs, 0, kvs1.length);
System.arraycopy(kvs2, 0, kvs, kvs1.length, kvs2.length);
Arrays.sort(kvs, CellComparatorImpl.COMPARATOR);
Arrays.sort(kvs, CellComparator.getInstance());
ByteBuffer loadValueBuffer = ByteBuffer.allocate(1024);
@ -291,7 +291,7 @@ public class TestResult extends TestCase {
KeyValue [] kvs = genKVs(Bytes.toBytes(rowSB.toString()), family,
Bytes.toBytes(valueSB.toString()), 1, n);
Arrays.sort(kvs, CellComparatorImpl.COMPARATOR);
Arrays.sort(kvs, CellComparator.getInstance());
ByteBuffer loadValueBuffer = ByteBuffer.allocate(1024);
Result r = Result.create(kvs);

View File

@ -31,7 +31,7 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -45,11 +45,9 @@ import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.testclassification.FilterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -1666,8 +1664,7 @@ public class TestFilter {
int i = 0;
for (boolean done = true; done; i++) {
done = scanner.next(results);
Arrays.sort(results.toArray(new Cell[results.size()]),
CellComparatorImpl.COMPARATOR);
Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance());
LOG.info("counter=" + i + ", " + results);
if (results.isEmpty()) break;
assertTrue("Scanned too many rows! Only expected " + expectedRows +
@ -1689,7 +1686,7 @@ public class TestFilter {
for (boolean done = true; done; i++) {
done = scanner.next(results);
Arrays.sort(results.toArray(new Cell[results.size()]),
CellComparatorImpl.COMPARATOR);
CellComparator.getInstance());
LOG.info("counter=" + i + ", " + results);
if(results.isEmpty()) break;
assertTrue("Scanned too many rows! Only expected " + expectedRows +
@ -1711,7 +1708,7 @@ public class TestFilter {
for (boolean done = true; done; row++) {
done = scanner.next(results);
Arrays.sort(results.toArray(new Cell[results.size()]),
CellComparatorImpl.COMPARATOR);
CellComparator.getInstance());
if(results.isEmpty()) break;
assertTrue("Scanned too many keys! Only expected " + kvs.length +
" total but already scanned " + (results.size() + idx) +
@ -1742,7 +1739,7 @@ public class TestFilter {
for (boolean more = true; more; row++) {
more = scanner.next(results);
Arrays.sort(results.toArray(new Cell[results.size()]),
CellComparatorImpl.COMPARATOR);
CellComparator.getInstance());
if(results.isEmpty()) break;
assertTrue("Scanned too many keys! Only expected " + kvs.length +
" total but already scanned " + (results.size() + idx) +

View File

@ -32,7 +32,7 @@ import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -493,13 +493,13 @@ public class TestFilterList {
public byte [] toByteArray() {return null;}
};
CellComparator comparator = CellComparator.getInstance();
// MUST PASS ONE
// Should take the min if given two hints
FilterList filterList = new FilterList(Operator.MUST_PASS_ONE,
Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } ));
assertEquals(0, CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null),
minKeyValue));
assertEquals(0, comparator.compare(filterList.getNextCellHint(null), minKeyValue));
// Should have no hint if any filter has no hint
filterList = new FilterList(Operator.MUST_PASS_ONE,
@ -513,8 +513,7 @@ public class TestFilterList {
// Should give max hint if its the only one
filterList = new FilterList(Operator.MUST_PASS_ONE,
Arrays.asList(new Filter[] { filterMaxHint, filterMaxHint }));
assertEquals(0,
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue));
assertEquals(0, comparator.compare(filterList.getNextCellHint(null), maxKeyValue));
// MUST PASS ALL
@ -522,31 +521,26 @@ public class TestFilterList {
filterList = new FilterList(Operator.MUST_PASS_ALL,
Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } ));
filterList.filterCell(null);
assertEquals(0,
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue));
assertEquals(0, comparator.compare(filterList.getNextCellHint(null), maxKeyValue));
filterList = new FilterList(Operator.MUST_PASS_ALL,
Arrays.asList(new Filter [] { filterMaxHint, filterMinHint } ));
filterList.filterCell(null);
assertEquals(0,
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue));
assertEquals(0, comparator.compare(filterList.getNextCellHint(null), maxKeyValue));
// Should have first hint even if a filter has no hint
filterList = new FilterList(Operator.MUST_PASS_ALL,
Arrays.asList(new Filter[] { filterNoHint, filterMinHint, filterMaxHint }));
filterList.filterCell(null);
assertEquals(0,
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue));
assertEquals(0, comparator.compare(filterList.getNextCellHint(null), maxKeyValue));
filterList = new FilterList(Operator.MUST_PASS_ALL,
Arrays.asList(new Filter[] { filterNoHint, filterMaxHint }));
filterList.filterCell(null);
assertEquals(0,
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue));
assertEquals(0, comparator.compare(filterList.getNextCellHint(null), maxKeyValue));
filterList = new FilterList(Operator.MUST_PASS_ALL,
Arrays.asList(new Filter[] { filterNoHint, filterMinHint }));
filterList.filterCell(null);
assertEquals(0,
CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), minKeyValue));
assertEquals(0, comparator.compare(filterList.getNextCellHint(null), minKeyValue));
}
/**

View File

@ -23,7 +23,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.CollectionBackedScanner;
@ -34,14 +34,14 @@ import org.apache.hadoop.hbase.util.CollectionBackedScanner;
* to be a store file scanner.
*/
public class KeyValueScanFixture extends CollectionBackedScanner {
public KeyValueScanFixture(CellComparatorImpl comparator, Cell... cells) {
public KeyValueScanFixture(CellComparator comparator, Cell... cells) {
super(comparator, cells);
}
public static List<KeyValueScanner> scanFixture(KeyValue[] ... kvArrays) {
ArrayList<KeyValueScanner> scanners = new ArrayList<>();
for (KeyValue [] kvs : kvArrays) {
scanners.add(new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, kvs));
scanners.add(new KeyValueScanFixture(CellComparator.getInstance(), kvs));
}
return scanners;
}

View File

@ -29,7 +29,7 @@ import java.util.SortedSet;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -113,9 +113,9 @@ public class TestCellFlatSet extends TestCase {
lowerOuterCell = new KeyValue(Bytes.toBytes(10), f, q, 10, v);
upperOuterCell = new KeyValue(Bytes.toBytes(50), f, q, 10, v);
ascCells = new Cell[] {kv1,kv2,kv3,kv4};
ascCbOnHeap = new CellArrayMap(CellComparatorImpl.COMPARATOR,ascCells,0,NUM_OF_CELLS,false);
ascCbOnHeap = new CellArrayMap(CellComparator.getInstance(), ascCells,0, NUM_OF_CELLS,false);
descCells = new Cell[] {kv4,kv3,kv2,kv1};
descCbOnHeap = new CellArrayMap(CellComparatorImpl.COMPARATOR,descCells,0,NUM_OF_CELLS,true);
descCbOnHeap = new CellArrayMap(CellComparator.getInstance(), descCells,0, NUM_OF_CELLS,true);
CONF.setBoolean(MemStoreLAB.USEMSLAB_KEY, true);
CONF.setFloat(MemStoreLAB.CHUNK_POOL_MAXSIZE_KEY, 0.2f);
@ -318,7 +318,7 @@ public class TestCellFlatSet extends TestCase {
idxOffset = ByteBufferUtils.putLong(idxBuffer, idxOffset, kv.getSequenceId()); // seqId
}
return new CellChunkMap(CellComparatorImpl.COMPARATOR,chunkArray,0,NUM_OF_CELLS,!asc);
return new CellChunkMap(CellComparator.getInstance(),chunkArray,0,NUM_OF_CELLS,!asc);
}
/* Create CellChunkMap with four cells inside the data jumbo chunk. This test is working only
@ -367,6 +367,6 @@ public class TestCellFlatSet extends TestCase {
dataOffset = ChunkCreator.SIZEOF_CHUNK_HEADER;
}
return new CellChunkMap(CellComparatorImpl.COMPARATOR,chunkArray,0,NUM_OF_CELLS,!asc);
return new CellChunkMap(CellComparator.getInstance(),chunkArray,0,NUM_OF_CELLS,!asc);
}
}

View File

@ -28,7 +28,6 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -88,8 +87,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
@Before
public void setUp() throws Exception {
compactingSetUp();
this.memstore = new MyCompactingMemStore(HBaseConfiguration.create(), CellComparatorImpl
.COMPARATOR,
this.memstore = new MyCompactingMemStore(HBaseConfiguration.create(), CellComparator.getInstance(),
store, regionServicesForStores, MemoryCompactionPolicy.EAGER);
}
@ -149,7 +147,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
// use case 3: first in snapshot second in kvset
this.memstore = new CompactingMemStore(HBaseConfiguration.create(),
CellComparatorImpl.COMPARATOR, store, regionServicesForStores,
CellComparator.getInstance(), store, regionServicesForStores,
MemoryCompactionPolicy.EAGER);
this.memstore.add(kv1.clone(), null);
// As compaction is starting in the background the repetition
@ -192,7 +190,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
Thread.sleep(1);
addRows(this.memstore);
Cell closestToEmpty = ((CompactingMemStore)this.memstore).getNextRow(KeyValue.LOWESTKEY);
assertTrue(CellComparatorImpl.COMPARATOR.compareRows(closestToEmpty,
assertTrue(CellComparator.getInstance().compareRows(closestToEmpty,
new KeyValue(Bytes.toBytes(0), System.currentTimeMillis())) == 0);
for (int i = 0; i < ROW_COUNT; i++) {
Cell nr = ((CompactingMemStore)this.memstore).getNextRow(new KeyValue(Bytes.toBytes(i),
@ -200,7 +198,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
if (i + 1 == ROW_COUNT) {
assertEquals(nr, null);
} else {
assertTrue(CellComparatorImpl.COMPARATOR.compareRows(nr,
assertTrue(CellComparator.getInstance().compareRows(nr,
new KeyValue(Bytes.toBytes(i + 1), System.currentTimeMillis())) == 0);
}
}
@ -218,7 +216,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
Cell left = results.get(0);
byte[] row1 = Bytes.toBytes(rowId);
assertTrue("Row name",
CellComparatorImpl.COMPARATOR.compareRows(left, row1, 0, row1.length) == 0);
CellComparator.getInstance().compareRows(left, row1, 0, row1.length) == 0);
assertEquals("Count of columns", QUALIFIER_COUNT, results.size());
List<Cell> row = new ArrayList<>();
for (Cell kv : results) {

View File

@ -24,7 +24,7 @@ import java.io.IOException;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueTestUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -46,7 +46,7 @@ public class TestKeyValueScanFixture extends TestCase {
KeyValueTestUtil.create("RowB", "family", "qf1",
10, KeyValue.Type.Put, "value-10")
};
KeyValueScanner scan = new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, kvs);
KeyValueScanner scan = new KeyValueScanFixture(CellComparator.getInstance(), kvs);
KeyValue kv = KeyValueUtil.createFirstOnRow(Bytes.toBytes("RowA"));
// should seek to this:

View File

@ -43,7 +43,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@ -76,7 +75,7 @@ public class TestStoreScanner {
private static final byte[] CF = Bytes.toBytes(CF_STR);
static Configuration CONF = HBaseConfiguration.create();
private ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, Integer.MAX_VALUE, Long.MAX_VALUE,
KeepDeletedCells.FALSE, HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false);
KeepDeletedCells.FALSE, HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.getInstance(), false);
/**
* From here on down, we have a bunch of defines and specific CELL_GRID of Cells. The
@ -162,7 +161,7 @@ public class TestStoreScanner {
CellGridStoreScanner(final Scan scan, ScanInfo scanInfo) throws IOException {
super(scan, scanInfo, scan.getFamilyMap().get(CF), Arrays.<KeyValueScanner> asList(
new KeyValueScanner[] { new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, CELL_GRID) }));
new KeyValueScanner[] { new KeyValueScanFixture(CellComparator.getInstance(), CELL_GRID) }));
}
protected void resetKVHeap(List<? extends KeyValueScanner> scanners,
@ -225,7 +224,7 @@ public class TestStoreScanner {
CellWithVersionsStoreScanner(final Scan scan, ScanInfo scanInfo) throws IOException {
super(scan, scanInfo, scan.getFamilyMap().get(CF),
Arrays.<KeyValueScanner> asList(new KeyValueScanner[] {
new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, CELL_WITH_VERSIONS) }));
new KeyValueScanFixture(CellComparator.getInstance(), CELL_WITH_VERSIONS) }));
}
protected boolean trySkipToNextColumn(Cell cell) throws IOException {
@ -253,7 +252,7 @@ public class TestStoreScanner {
CellWithVersionsNoOptimizeStoreScanner(Scan scan, ScanInfo scanInfo) throws IOException {
super(scan, scanInfo, scan.getFamilyMap().get(CF),
Arrays.<KeyValueScanner> asList(new KeyValueScanner[] {
new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, CELL_WITH_VERSIONS) }));
new KeyValueScanFixture(CellComparator.getInstance(), CELL_WITH_VERSIONS) }));
}
protected boolean trySkipToNextColumn(Cell cell) throws IOException {
@ -457,7 +456,7 @@ public class TestStoreScanner {
};
List<KeyValueScanner> scanners = Arrays.<KeyValueScanner>asList(
new KeyValueScanner[] {
new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, kvs)
new KeyValueScanFixture(CellComparator.getInstance(), kvs)
});
Scan scanSpec = new Scan().withStartRow(Bytes.toBytes(r1));
scanSpec.setTimeRange(0, 6);
@ -508,7 +507,7 @@ public class TestStoreScanner {
};
List<KeyValueScanner> scanners = Arrays.asList(
new KeyValueScanner[] {
new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, kvs)
new KeyValueScanFixture(CellComparator.getInstance(), kvs)
});
Scan scanSpec = new Scan().withStartRow(Bytes.toBytes("R1"));
@ -804,7 +803,7 @@ public class TestStoreScanner {
Scan scan = new Scan();
scan.readVersions(1);
ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, 1, 500, KeepDeletedCells.FALSE,
HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false);
HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.getInstance(), false);
try (StoreScanner scanner = new StoreScanner(scan, scanInfo, null, scanners)) {
List<Cell> results = new ArrayList<>();
assertEquals(true, scanner.next(results));
@ -868,7 +867,7 @@ public class TestStoreScanner {
scan.readVersions(1);
// scanner with ttl equal to 500
ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, 1, 500, KeepDeletedCells.FALSE,
HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false);
HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.getInstance(), false);
try (StoreScanner scanner = new StoreScanner(scan, scanInfo, null, scanners)) {
List<Cell> results = new ArrayList<>();
assertEquals(true, scanner.next(results));
@ -930,7 +929,7 @@ public class TestStoreScanner {
KeepDeletedCells.FALSE /* keepDeletedCells */,
HConstants.DEFAULT_BLOCKSIZE /* block size */,
200, /* timeToPurgeDeletes */
CellComparatorImpl.COMPARATOR, false);
CellComparator.getInstance(), false);
try (StoreScanner scanner =
new StoreScanner(scanInfo, OptionalInt.of(2), ScanType.COMPACT_DROP_DELETES, scanners)) {
List<Cell> results = new ArrayList<>();
@ -959,7 +958,7 @@ public class TestStoreScanner {
create("R1", "cf", "a", now - 10, KeyValue.Type.Put, "dont-care"), };
List<KeyValueScanner> scanners = scanFixture(kvs);
ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, 1, 500, KeepDeletedCells.FALSE,
HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false);
HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.getInstance(), false);
try (StoreScanner storeScanner = new StoreScanner(scanInfo, OptionalInt.empty(),
ScanType.COMPACT_RETAIN_DELETES, scanners)) {
assertFalse(storeScanner.isScanUsePread());

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.regionserver.querymatcher;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Scan;
@ -45,7 +45,7 @@ public class AbstractTestScanQueryMatcher {
protected Get get;
protected long ttl = Long.MAX_VALUE;
protected CellComparatorImpl rowComparator;
protected CellComparator rowComparator;
protected Scan scan;
@Before
@ -72,6 +72,6 @@ public class AbstractTestScanQueryMatcher {
get.addColumn(fam2, col5);
this.scan = new Scan(get);
rowComparator = CellComparatorImpl.COMPARATOR;
rowComparator = CellComparator.getInstance();
}
}

View File

@ -917,7 +917,7 @@ class HBaseContext(@transient sc: SparkContext,
new WriterLength(0,
new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new HFileSystem(fs))
.withBloomType(BloomType.valueOf(familyOptions.bloomType))
.withComparator(CellComparatorImpl.COMPARATOR).withFileContext(hFileContext)
.withComparator(CellComparator.getInstance()).withFileContext(hFileContext)
.withFilePath(new Path(familydir, "_" + UUID.randomUUID.toString.replaceAll("-", "")))
.withFavoredNodes(favoredNodes).build())