HBASE-1643 ScanDeleteTracker takes comparator but it unused

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@793084 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-07-10 19:30:39 +00:00
parent 60e69c6982
commit f10f85f0ff
7 changed files with 23 additions and 28 deletions

View File

@ -462,6 +462,7 @@ Release 0.20.0 - Unreleased
Scan (Jon Gray via Nitay)
HBASE-1640 Allow passing arguments to jruby script run when run by bin/hbase shell
HBASE-698 HLog recovery is not performed after master failure
HBASE-1643 ScanDeleteTracker takes comparator but it unused
OPTIMIZATIONS
HBASE-1412 Change values for delete column and column family in KeyValue

View File

@ -90,7 +90,7 @@ public class GetDeleteTracker implements DeleteTracker {
int qualifierLength, long timestamp) {
// Check against DeleteFamily
if(timestamp <= familyStamp) {
if (timestamp <= familyStamp) {
return true;
}

View File

@ -54,8 +54,8 @@ public class KeyValueHeap implements KeyValueScanner, InternalScanner {
this.comparator = new KVScannerComparator(comparator);
this.heap = new PriorityQueue<KeyValueScanner>(scanners.length,
this.comparator);
for(KeyValueScanner scanner : scanners) {
if(scanner.peek() != null) {
for (KeyValueScanner scanner : scanners) {
if (scanner.peek() != null) {
this.heap.add(scanner);
}
}
@ -102,7 +102,7 @@ public class KeyValueHeap implements KeyValueScanner, InternalScanner {
InternalScanner currentAsInternal = (InternalScanner)this.current;
currentAsInternal.next(result);
KeyValue pee = this.current.peek();
if(pee == null) {
if (pee == null) {
this.current.close();
} else {
this.heap.add(this.current);

View File

@ -41,7 +41,7 @@ public class MinorCompactingStoreScanner implements KeyValueScanner, InternalSca
MinorCompactingStoreScanner(Store store,
KeyValueScanner [] scanners) {
comparator = store.comparator;
deleteTracker = new ScanDeleteTracker(store.comparator.getRawComparator());
deleteTracker = new ScanDeleteTracker();
KeyValue firstKv = KeyValue.createFirstOnRow(HConstants.EMPTY_START_ROW);
for (KeyValueScanner scanner : scanners ) {
scanner.seek(firstKv);
@ -53,7 +53,7 @@ public class MinorCompactingStoreScanner implements KeyValueScanner, InternalSca
MinorCompactingStoreScanner(String cfName, KeyValue.KVComparator comparator,
KeyValueScanner [] scanners) {
this.comparator = comparator;
deleteTracker = new ScanDeleteTracker(comparator.getRawComparator());
deleteTracker = new ScanDeleteTracker();
KeyValue firstKv = KeyValue.createFirstOnRow(HConstants.EMPTY_START_ROW);
for (KeyValueScanner scanner : scanners ) {

View File

@ -47,14 +47,11 @@ public class ScanDeleteTracker implements DeleteTracker {
private byte deleteType = 0;
private long deleteTimestamp = 0L;
private KeyValue.KeyComparator comparator;
/**
* Constructor for ScanDeleteTracker
* @param comparator
*/
public ScanDeleteTracker(KeyValue.KeyComparator comparator) {
this.comparator = comparator;
public ScanDeleteTracker() {
super();
}
/**
@ -71,15 +68,15 @@ public class ScanDeleteTracker implements DeleteTracker {
@Override
public void add(byte[] buffer, int qualifierOffset, int qualifierLength,
long timestamp, byte type) {
if(timestamp > familyStamp) {
if(type == KeyValue.Type.DeleteFamily.getCode()) {
if (timestamp > familyStamp) {
if (type == KeyValue.Type.DeleteFamily.getCode()) {
familyStamp = timestamp;
return;
}
if(deleteBuffer != null && type < deleteType) {
if (deleteBuffer != null && type < deleteType) {
// same column, so ignore less specific delete
if(Bytes.compareTo(deleteBuffer, deleteOffset, deleteLength,
if (Bytes.compareTo(deleteBuffer, deleteOffset, deleteLength,
buffer, qualifierOffset, qualifierLength) == 0){
return;
}
@ -107,17 +104,16 @@ public class ScanDeleteTracker implements DeleteTracker {
@Override
public boolean isDeleted(byte [] buffer, int qualifierOffset,
int qualifierLength, long timestamp) {
if(timestamp < familyStamp) {
if (timestamp < familyStamp) {
return true;
}
if(deleteBuffer != null) {
// TODO ryan use a specific comparator
if (deleteBuffer != null) {
int ret = Bytes.compareTo(deleteBuffer, deleteOffset, deleteLength,
buffer, qualifierOffset, qualifierLength);
if(ret == 0) {
if(deleteType == KeyValue.Type.DeleteColumn.getCode()) {
if (ret == 0) {
if (deleteType == KeyValue.Type.DeleteColumn.getCode()) {
return true;
}
// Delete (aka DeleteVersion)
@ -158,5 +154,4 @@ public class ScanDeleteTracker implements DeleteTracker {
public void update() {
this.reset();
}
}

View File

@ -24,7 +24,6 @@ import java.util.NavigableSet;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
import org.apache.hadoop.hbase.util.Bytes;
@ -54,15 +53,14 @@ public class ScanQueryMatcher extends QueryMatcher {
this.tr = scan.getTimeRange();
this.oldestStamp = System.currentTimeMillis() - ttl;
this.rowComparator = rowComparator;
// shouldn't this be ScanDeleteTracker?
this.deletes = new ScanDeleteTracker(rowComparator);
this.deletes = new ScanDeleteTracker();
this.startKey = KeyValue.createFirstOnRow(scan.getStartRow());
this.stopKey = KeyValue.createFirstOnRow(scan.getStopRow());
this.filter = scan.getFilter();
this.oldFilter = scan.getOldFilter();
// Single branch to deal with two types of reads (columns vs all in family)
if(columns == null || columns.size() == 0) {
if (columns == null || columns.size() == 0) {
// use a specialized scan for wildcard column tracker.
this.columns = new ScanWildcardColumnTracker(maxVersions);
} else {
@ -166,7 +164,8 @@ public class ScanQueryMatcher extends QueryMatcher {
return MatchCode.SKIP;
}
if (deletes.isDeleted(bytes, offset, qualLength, timestamp)) {
if (!this.deletes.isEmpty() &&
deletes.isDeleted(bytes, offset, qualLength, timestamp)) {
return MatchCode.SKIP;
}