HBASE-10930 Change Filters and GetClosestRowBeforeTracker to work with

Cells (Ram)
This commit is contained in:
Ramkrishna 2014-07-15 22:07:00 +05:30
parent c694ec11df
commit 995a5a6c68
18 changed files with 111 additions and 127 deletions

View File

@ -20,13 +20,13 @@ package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
@ -144,10 +144,10 @@ public class ColumnPaginationFilter extends FilterBase
}
@Override
public Cell getNextCellHint(Cell kv) {
public Cell getNextCellHint(Cell cell) {
return KeyValueUtil.createFirstOnRow(
kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), kv.getFamilyArray(),
kv.getFamilyOffset(), kv.getFamilyLength(), columnOffset, 0, columnOffset.length);
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(),
cell.getFamilyOffset(), cell.getFamilyLength(), columnOffset, 0, columnOffset.length);
}
@Override

View File

@ -21,13 +21,13 @@ package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.common.base.Preconditions;
@ -130,10 +130,10 @@ public class ColumnPrefixFilter extends FilterBase {
}
@Override
public Cell getNextCellHint(Cell kv) {
public Cell getNextCellHint(Cell cell) {
return KeyValueUtil.createFirstOnRow(
kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), kv.getFamilyArray(),
kv.getFamilyOffset(), kv.getFamilyLength(), prefix, 0, prefix.length);
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(),
cell.getFamilyOffset(), cell.getFamilyLength(), prefix, 0, prefix.length);
}
@Override

View File

@ -21,9 +21,7 @@ package org.apache.hadoop.hbase.filter;
import static org.apache.hadoop.hbase.util.Bytes.len;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.util.ByteStringer;
import com.google.protobuf.InvalidProtocolBufferException;
import java.util.ArrayList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -31,9 +29,11 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.ArrayList;
import com.google.common.base.Preconditions;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* This filter is used for selecting only those keys with columns that are
@ -216,9 +216,9 @@ public class ColumnRangeFilter extends FilterBase {
}
@Override
public Cell getNextCellHint(Cell kv) {
return KeyValueUtil.createFirstOnRow(kv.getRowArray(), kv.getRowOffset(), kv
.getRowLength(), kv.getFamilyArray(), kv.getFamilyOffset(), kv
public Cell getNextCellHint(Cell cell) {
return KeyValueUtil.createFirstOnRow(cell.getRowArray(), cell.getRowOffset(), cell
.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell
.getFamilyLength(), this.minColumn, 0, len(this.minColumn));
}

View File

@ -30,8 +30,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
@ -137,20 +135,18 @@ public class DependentColumnFilter extends CompareFilter {
@Override
public ReturnCode filterKeyValue(Cell c) {
// TODO make matching Column a cell method or CellUtil method.
KeyValue v = KeyValueUtil.ensureKeyValue(c);
// Check if the column and qualifier match
if (!CellUtil.matchingColumn(v, this.columnFamily, this.columnQualifier)) {
if (!CellUtil.matchingColumn(c, this.columnFamily, this.columnQualifier)) {
// include non-matches for the time being, they'll be discarded afterwards
return ReturnCode.INCLUDE;
}
// If it doesn't pass the op, skip it
if (comparator != null
&& doCompare(compareOp, comparator, v.getValueArray(), v.getValueOffset(),
v.getValueLength()))
&& doCompare(compareOp, comparator, c.getValueArray(), c.getValueOffset(),
c.getValueLength()))
return ReturnCode.SKIP;
stampSet.add(v.getTimestamp());
stampSet.add(c.getTimestamp());
if(dropDependentColumn) {
return ReturnCode.SKIP;
}

View File

@ -223,7 +223,7 @@ public abstract class Filter {
* seek to next.
* @throws IOException in case an I/O or an filter specific failure needs to be signaled.
*/
abstract public Cell getNextCellHint(final Cell currentKV) throws IOException;
abstract public Cell getNextCellHint(final Cell currentCell) throws IOException;
/**
* Check that given column family is essential for filter to check row. Most filters always return

View File

@ -24,7 +24,6 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -143,10 +142,8 @@ public abstract class FilterBase extends Filter {
*
* @inheritDoc
*/
public Cell getNextCellHint(Cell currentKV) throws IOException {
// Old filters based off of this class will override KeyValue getNextKeyHint(KeyValue).
// Thus to maintain compatibility we need to call the old version.
return getNextKeyHint(KeyValueUtil.ensureKeyValue(currentKV));
public Cell getNextCellHint(Cell currentCell) throws IOException {
return null;
}
/**

View File

@ -26,6 +26,7 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@ -70,7 +71,7 @@ final public class FilterList extends Filter {
private Filter seekHintFilter = null;
/** Reference Cell used by {@link #transformCell(Cell)} for validation purpose. */
private Cell referenceKV = null;
private Cell referenceCell = null;
/**
* When filtering a given Cell in {@link #filterKeyValue(Cell)},
@ -79,7 +80,7 @@ final public class FilterList extends Filter {
* Individual filters transformation are applied only when the filter includes the Cell.
* Transformations are composed in the order specified by {@link #filters}.
*/
private Cell transformedKV = null;
private Cell transformedCell = null;
/**
* Constructor that takes a set of {@link Filter}s. The default operator
@ -211,8 +212,12 @@ final public class FilterList extends Filter {
}
@Override
public Cell transformCell(Cell v) throws IOException {
return transform(KeyValueUtil.ensureKeyValue(v));
public Cell transformCell(Cell c) throws IOException {
if (!CellComparator.equals(c, referenceCell)) {
throw new IllegalStateException("Reference Cell: " + this.referenceCell + " does not match: "
+ c);
}
return this.transformedCell;
}
/**
@ -226,22 +231,22 @@ final public class FilterList extends Filter {
@Override
public KeyValue transform(KeyValue v) throws IOException {
// transform() is expected to follow an inclusive filterKeyValue() immediately:
if (!v.equals(this.referenceKV)) {
if (!v.equals(this.referenceCell)) {
throw new IllegalStateException(
"Reference Cell: " + this.referenceKV + " does not match: " + v);
"Reference Cell: " + this.referenceCell + " does not match: " + v);
}
return KeyValueUtil.ensureKeyValue(this.transformedKV);
return KeyValueUtil.ensureKeyValue(this.transformedCell);
}
@Override
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",
justification="Intentional")
public ReturnCode filterKeyValue(Cell v) throws IOException {
this.referenceKV = v;
public ReturnCode filterKeyValue(Cell c) throws IOException {
this.referenceCell = c;
// Accumulates successive transformation of every filter that includes the Cell:
Cell transformed = v;
Cell transformed = c;
ReturnCode rc = operator == Operator.MUST_PASS_ONE?
ReturnCode.SKIP: ReturnCode.INCLUDE;
@ -250,7 +255,7 @@ final public class FilterList extends Filter {
if (filter.filterAllRemaining()) {
return ReturnCode.NEXT_ROW;
}
ReturnCode code = filter.filterKeyValue(v);
ReturnCode code = filter.filterKeyValue(c);
switch (code) {
// Override INCLUDE and continue to evaluate.
case INCLUDE_AND_NEXT_COL:
@ -269,7 +274,7 @@ final public class FilterList extends Filter {
continue;
}
switch (filter.filterKeyValue(v)) {
switch (filter.filterKeyValue(c)) {
case INCLUDE:
if (rc != ReturnCode.INCLUDE_AND_NEXT_COL) {
rc = ReturnCode.INCLUDE;
@ -296,7 +301,7 @@ final public class FilterList extends Filter {
}
// Save the transformed Cell for transform():
this.transformedKV = transformed;
this.transformedCell = transformed;
return rc;
}
@ -401,16 +406,16 @@ final public class FilterList extends Filter {
}
@Override
public Cell getNextCellHint(Cell currentKV) throws IOException {
public Cell getNextCellHint(Cell currentCell) throws IOException {
Cell keyHint = null;
if (operator == Operator.MUST_PASS_ALL) {
keyHint = seekHintFilter.getNextCellHint(currentKV);
keyHint = seekHintFilter.getNextCellHint(currentCell);
return keyHint;
}
// If any condition can pass, we need to keep the min hint
for (Filter filter : filters) {
Cell curKeyHint = filter.getNextCellHint(currentKV);
Cell curKeyHint = filter.getNextCellHint(currentCell);
if (curKeyHint == null) {
// If we ever don't have a hint and this is must-pass-one, then no hint
return null;

View File

@ -20,11 +20,9 @@
package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -112,10 +110,10 @@ final public class FilterWrapper extends Filter {
* Old filter wrapper descendants will implement KV getNextKeyHint(KV) so we should call it.
*/
@Override
public Cell getNextCellHint(Cell currentKV) throws IOException {
public Cell getNextCellHint(Cell currentCell) throws IOException {
// Old filters based off of this class will override KeyValue getNextKeyHint(KeyValue).
// Thus to maintain compatibility we need to call the old version.
return this.getNextKeyHint(KeyValueUtil.ensureKeyValue(currentKV));
return this.getNextKeyHint(KeyValueUtil.ensureKeyValue(currentCell));
}
@Override

View File

@ -17,11 +17,13 @@
*/
package org.apache.hadoop.hbase.filter;
import com.google.protobuf.InvalidProtocolBufferException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
@ -30,9 +32,7 @@ import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Filters data based on fuzzy row key. Performs fast-forwards during scanning.
@ -74,16 +74,12 @@ public class FuzzyRowFilter extends FilterBase {
// TODO: possible improvement: save which fuzzy row key to use when providing a hint
@Override
public ReturnCode filterKeyValue(Cell kv) {
// TODO add getRow() equivalent to Cell or change satisfies to take b[],o,l style args.
KeyValue v = KeyValueUtil.ensureKeyValue(kv);
byte[] rowKey = v.getRow();
public ReturnCode filterKeyValue(Cell c) {
// assigning "worst" result first and looking for better options
SatisfiesCode bestOption = SatisfiesCode.NO_NEXT;
for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) {
SatisfiesCode satisfiesCode =
satisfies(rowKey, fuzzyData.getFirst(), fuzzyData.getSecond());
SatisfiesCode satisfiesCode = satisfies(c.getRowArray(), c.getRowOffset(),
c.getRowLength(), fuzzyData.getFirst(), fuzzyData.getSecond());
if (satisfiesCode == SatisfiesCode.YES) {
return ReturnCode.INCLUDE;
}
@ -103,16 +99,13 @@ public class FuzzyRowFilter extends FilterBase {
}
@Override
public Cell getNextCellHint(Cell currentKV) {
// TODO make matching Column a cell method or CellUtil method.
KeyValue v = KeyValueUtil.ensureKeyValue(currentKV);
byte[] rowKey = v.getRow();
public Cell getNextCellHint(Cell currentCell) {
byte[] nextRowKey = null;
// Searching for the "smallest" row key that satisfies at least one fuzzy row key
for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) {
byte[] nextRowKeyCandidate = getNextForFuzzyRule(rowKey,
fuzzyData.getFirst(), fuzzyData.getSecond());
byte[] nextRowKeyCandidate = getNextForFuzzyRule(currentCell.getRowArray(),
currentCell.getRowOffset(), currentCell.getRowLength(), fuzzyData.getFirst(),
fuzzyData.getSecond());
if (nextRowKeyCandidate == null) {
continue;
}
@ -124,10 +117,9 @@ public class FuzzyRowFilter extends FilterBase {
if (nextRowKey == null) {
// SHOULD NEVER happen
// TODO: is there a better way than throw exception? (stop the scanner?)
throw new IllegalStateException("No next row key that satisfies fuzzy exists when" +
" getNextKeyHint() is invoked." +
" Filter: " + this.toString() +
" currentKV: " + currentKV.toString());
throw new IllegalStateException("No next row key that satisfies fuzzy exists when"
+ " getNextKeyHint() is invoked." + " Filter: " + this.toString() + " currentKV: "
+ KeyValueUtil.ensureKeyValue(currentCell).toString());
}
return KeyValueUtil.createFirstOnRow(nextRowKey);

View File

@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import com.google.common.base.Preconditions;
@ -54,6 +53,8 @@ public class KeyOnlyFilter extends FilterBase {
// TODO Move to KeyValueUtil
// TODO make matching Column a cell method or CellUtil method.
// Even if we want to make use of KeyValue.KeyOnlyKeyValue we need to convert
// the cell to KV so that we can make use of kv.getKey() to form the key part
KeyValue v = KeyValueUtil.ensureKeyValue(kv);
return v.createKeyOnly(this.lenAsVal);

View File

@ -17,7 +17,11 @@
*/
package org.apache.hadoop.hbase.filter;
import com.google.protobuf.InvalidProtocolBufferException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.TreeSet;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
@ -27,10 +31,7 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.TreeSet;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* This filter is used for selecting only those keys with columns that matches
@ -154,10 +155,10 @@ public class MultipleColumnPrefixFilter extends FilterBase {
}
@Override
public Cell getNextCellHint(Cell kv) {
public Cell getNextCellHint(Cell cell) {
return KeyValueUtil.createFirstOnRow(
kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), kv.getFamilyArray(),
kv.getFamilyOffset(), kv.getFamilyLength(), hint, 0, hint.length);
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(),
cell.getFamilyOffset(), cell.getFamilyLength(), hint, 0, hint.length);
}
public TreeSet<byte []> createTreeSet() {

View File

@ -28,8 +28,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@ -108,10 +106,9 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
public void filterRowCells(List<Cell> kvs) {
Iterator<? extends Cell> it = kvs.iterator();
while (it.hasNext()) {
KeyValue kv = KeyValueUtil.ensureKeyValue(it.next());
// If the current column is actually the tested column,
// we will skip it instead.
if (CellUtil.matchingColumn(kv, this.columnFamily, this.columnQualifier)) {
if (CellUtil.matchingColumn(it.next(), this.columnFamily, this.columnQualifier)) {
it.remove();
}
}

View File

@ -29,8 +29,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@ -172,9 +170,6 @@ public class SingleColumnValueFilter extends FilterBase {
@Override
public ReturnCode filterKeyValue(Cell c) {
// TODO get rid of this.
KeyValue keyValue = KeyValueUtil.ensureKeyValue(c);
// System.out.println("REMOVE KEY=" + keyValue.toString() + ", value=" + Bytes.toString(keyValue.getValue()));
if (this.matchedColumn) {
// We already found and matched the single column, all keys now pass
@ -183,12 +178,12 @@ public class SingleColumnValueFilter extends FilterBase {
// We found but did not match the single column, skip to next row
return ReturnCode.NEXT_ROW;
}
if (!CellUtil.matchingColumn(keyValue, this.columnFamily, this.columnQualifier)) {
if (!CellUtil.matchingColumn(c, this.columnFamily, this.columnQualifier)) {
return ReturnCode.INCLUDE;
}
foundColumn = true;
if (filterColumnValue(keyValue.getValueArray(),
keyValue.getValueOffset(), keyValue.getValueLength())) {
if (filterColumnValue(c.getValueArray(),
c.getValueOffset(), c.getValueLength())) {
return this.latestVersionOnly? ReturnCode.NEXT_ROW: ReturnCode.INCLUDE;
}
this.matchedColumn = true;

View File

@ -353,4 +353,14 @@ public class CellComparator implements Comparator<Cell>, Serializable{
return 0;
}
/**
* Counter part for the KeyValue.RowOnlyComparator
*/
public static class RowComparator extends CellComparator {
@Override
public int compare(Cell a, Cell b) {
return compareRows(a, b);
}
}
}

View File

@ -25,11 +25,11 @@ import java.util.TreeSet;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.util.Bytes;
/**
@ -52,7 +52,7 @@ class GetClosestRowBeforeTracker {
private final int tablenamePlusDelimiterLength;
// Deletes keyed by row. Comparator compares on row portion of KeyValue only.
private final NavigableMap<KeyValue, NavigableSet<KeyValue>> deletes;
private final NavigableMap<Cell, NavigableSet<Cell>> deletes;
/**
* @param c
@ -77,8 +77,7 @@ class GetClosestRowBeforeTracker {
this.tablenamePlusDelimiterLength = metaregion? l + 1: -1;
this.oldestts = System.currentTimeMillis() - ttl;
this.kvcomparator = c;
KeyValue.RowOnlyComparator rc = new KeyValue.RowOnlyComparator(this.kvcomparator);
this.deletes = new TreeMap<KeyValue, NavigableSet<KeyValue>>(rc);
this.deletes = new TreeMap<Cell, NavigableSet<Cell>>(new CellComparator.RowComparator());
}
/**
@ -94,12 +93,12 @@ class GetClosestRowBeforeTracker {
* @param kv
*/
private void addDelete(final Cell kv) {
NavigableSet<KeyValue> rowdeletes = this.deletes.get(kv);
NavigableSet<Cell> rowdeletes = this.deletes.get(kv);
if (rowdeletes == null) {
rowdeletes = new TreeSet<KeyValue>(this.kvcomparator);
this.deletes.put(KeyValueUtil.ensureKeyValue(kv), rowdeletes);
rowdeletes = new TreeSet<Cell>(this.kvcomparator);
this.deletes.put(kv, rowdeletes);
}
rowdeletes.add(KeyValueUtil.ensureKeyValue(kv));
rowdeletes.add(kv);
}
/*
@ -128,7 +127,7 @@ class GetClosestRowBeforeTracker {
*/
private boolean isDeleted(final Cell kv) {
if (this.deletes.isEmpty()) return false;
NavigableSet<KeyValue> rowdeletes = this.deletes.get(kv);
NavigableSet<Cell> rowdeletes = this.deletes.get(kv);
if (rowdeletes == null || rowdeletes.isEmpty()) return false;
return isDeleted(kv, rowdeletes);
}
@ -140,9 +139,9 @@ class GetClosestRowBeforeTracker {
* @param ds
* @return True is the specified KeyValue is deleted, false if not
*/
public boolean isDeleted(final Cell kv, final NavigableSet<KeyValue> ds) {
public boolean isDeleted(final Cell kv, final NavigableSet<Cell> ds) {
if (deletes == null || deletes.isEmpty()) return false;
for (KeyValue d: ds) {
for (Cell d: ds) {
long kvts = kv.getTimestamp();
long dts = d.getTimestamp();
if (CellUtil.isDeleteFamily(d)) {
@ -164,7 +163,7 @@ class GetClosestRowBeforeTracker {
if (kvts > dts) return false;
// Check Type
switch (KeyValue.Type.codeToType(d.getType())) {
switch (KeyValue.Type.codeToType(d.getTypeByte())) {
case Delete: return kvts == dts;
case DeleteColumn: return true;
default: continue;
@ -198,7 +197,7 @@ class GetClosestRowBeforeTracker {
* @return True if we added a candidate
*/
boolean handle(final Cell kv) {
if (KeyValueUtil.ensureKeyValue(kv).isDelete()) {
if (CellUtil.isDelete(kv)) {
handleDeletes(kv);
return false;
}

View File

@ -3899,7 +3899,7 @@ public class HRegion implements HeapSize { // , Writable{
if (region != null && region.metricsRegion != null) {
long totalSize = 0;
for(Cell c:outResults) {
// TODO clean up
// TODO clean up. Find way to remove this ensureKeyValue
KeyValue kv = KeyValueUtil.ensureKeyValue(c);
totalSize += kv.getLength();
}

View File

@ -1249,14 +1249,14 @@ public class VisibilityController extends BaseRegionObserver implements MasterOb
// We need to create another KV, unfortunately, because the current new KV
// has no space for tags
KeyValue newKv = KeyValueUtil.ensureKeyValue(newCell);
KeyValue rewriteKv = new KeyValue(newKv.getRowArray(), newKv.getRowOffset(), newKv.getRowLength(),
newKv.getFamilyArray(), newKv.getFamilyOffset(), newKv.getFamilyLength(),
newKv.getQualifierArray(), newKv.getQualifierOffset(), newKv.getQualifierLength(),
newKv.getTimestamp(), KeyValue.Type.codeToType(newKv.getTypeByte()),
newKv.getValueArray(), newKv.getValueOffset(), newKv.getValueLength(), tags);
KeyValue rewriteKv = new KeyValue(newCell.getRowArray(), newCell.getRowOffset(),
newCell.getRowLength(), newCell.getFamilyArray(), newCell.getFamilyOffset(),
newCell.getFamilyLength(), newCell.getQualifierArray(), newCell.getQualifierOffset(),
newCell.getQualifierLength(), newCell.getTimestamp(), KeyValue.Type.codeToType(newCell
.getTypeByte()), newCell.getValueArray(), newCell.getValueOffset(),
newCell.getValueLength(), tags);
// Preserve mvcc data
rewriteKv.setSequenceId(newKv.getMvccVersion());
rewriteKv.setSequenceId(newCell.getMvccVersion());
return rewriteKv;
}

View File

@ -18,23 +18,16 @@
*/
package org.apache.hadoop.hbase.filter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNull;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -451,7 +444,7 @@ public class TestFilterList {
}
@Override
public Cell getNextCellHint(Cell currentKV) {
public Cell getNextCellHint(Cell cell) {
return new KeyValue(Bytes.toBytes(Long.MAX_VALUE), null, null);
}