HBASE-12363 Improve how KEEP_DELETED_CELLS works with MIN_VERSIONS.

This commit is contained in:
Lars Hofhansl 2014-11-04 17:08:39 -08:00
parent a5d1832a30
commit ecd708671c
14 changed files with 253 additions and 50 deletions

View File

@ -159,7 +159,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
/**
* Default setting for preventing deleted from being collected immediately.
*/
public static final boolean DEFAULT_KEEP_DELETED = false;
public static final KeepDeletedCells DEFAULT_KEEP_DELETED = KeepDeletedCells.FALSE;
/**
* Default setting for whether to use a block cache or not.
@ -426,7 +426,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
*/
@Deprecated
public HColumnDescriptor(final byte[] familyName, final int minVersions,
final int maxVersions, final boolean keepDeletedCells,
final int maxVersions, final KeepDeletedCells keepDeletedCells,
final String compression, final boolean encodeOnDisk,
final String dataBlockEncoding, final boolean inMemory,
final boolean blockCacheEnabled, final int blocksize,
@ -791,10 +791,11 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
return setValue(HConstants.IN_MEMORY, Boolean.toString(inMemory));
}
public boolean getKeepDeletedCells() {
public KeepDeletedCells getKeepDeletedCells() {
String value = getValue(KEEP_DELETED_CELLS);
if (value != null) {
return Boolean.valueOf(value).booleanValue();
// toUpperCase for backwards compatibility
return KeepDeletedCells.valueOf(value.toUpperCase());
}
return DEFAULT_KEEP_DELETED;
}
@ -803,9 +804,21 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
* @param keepDeletedCells True if deleted rows should not be collected
* immediately.
* @return this (for chained invocation)
* @deprecated use {@link #setKeepDeletedCells(KeepDeletedCells)}
*/
@Deprecated
public HColumnDescriptor setKeepDeletedCells(boolean keepDeletedCells) {
return setValue(KEEP_DELETED_CELLS, Boolean.toString(keepDeletedCells));
return setValue(KEEP_DELETED_CELLS, (keepDeletedCells ? KeepDeletedCells.TRUE
: KeepDeletedCells.FALSE).toString());
}
/**
* @param keepDeletedCells True if deleted rows should not be collected
* immediately.
* @return this (for chained invocation)
*/
public HColumnDescriptor setKeepDeletedCells(KeepDeletedCells keepDeletedCells) {
return setValue(KEEP_DELETED_CELLS, keepDeletedCells.toString());
}
/**

View File

@ -0,0 +1,45 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
/**
* Ways to keep cells marked for delete around.
*/
/*
* Don't change the TRUE/FALSE labels below, these have to be called
* this way for backwards compatibility.
*/
public enum KeepDeletedCells {
/** Deleted Cells are not retained. */
FALSE,
/**
* Deleted Cells are retained until they are removed by other means
* such TTL or VERSIONS.
* If no TTL is specified or no new versions of delete cells are
* written, they are retained forever.
*/
TRUE,
/**
* Deleted Cells are retained until the delete marker expires due to TTL.
* This is useful when TTL is combined with MIN_VERSIONS and one
* wants to keep a minimum number of versions around but at the same
* time remove deleted cells after the TTL.
*/
TTL;
}

View File

@ -51,7 +51,7 @@ public class TestHColumnDescriptor {
assertEquals(v, hcd.getMaxVersions());
hcd.setMinVersions(v);
assertEquals(v, hcd.getMinVersions());
hcd.setKeepDeletedCells(!HColumnDescriptor.DEFAULT_KEEP_DELETED);
hcd.setKeepDeletedCells(KeepDeletedCells.TRUE);
hcd.setInMemory(!HColumnDescriptor.DEFAULT_IN_MEMORY);
boolean inmemory = hcd.isInMemory();
hcd.setScope(v);

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@ -33,7 +34,7 @@ public class ScanInfo {
private int minVersions;
private int maxVersions;
private long ttl;
private boolean keepDeletedCells;
private KeepDeletedCells keepDeletedCells;
private long timeToPurgeDeletes;
private KVComparator comparator;
@ -65,7 +66,7 @@ public class ScanInfo {
* @param comparator The store's comparator
*/
public ScanInfo(final byte[] family, final int minVersions, final int maxVersions,
final long ttl, final boolean keepDeletedCells, final long timeToPurgeDeletes,
final long ttl, final KeepDeletedCells keepDeletedCells, final long timeToPurgeDeletes,
final KVComparator comparator) {
this.family = family;
this.minVersions = minVersions;
@ -92,7 +93,7 @@ public class ScanInfo {
return ttl;
}
public boolean getKeepDeletedCells() {
public KeepDeletedCells getKeepDeletedCells() {
return keepDeletedCells;
}

View File

@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.Scan;
@ -72,7 +73,7 @@ public class ScanQueryMatcher {
private boolean retainDeletesInOutput;
/** whether to return deleted rows */
private final boolean keepDeletedCells;
private final KeepDeletedCells keepDeletedCells;
/** whether time range queries can see rows "behind" a delete */
private final boolean seePastDeleteMarkers;
@ -99,6 +100,7 @@ public class ScanQueryMatcher {
* deleted KVs.
*/
private final long earliestPutTs;
private final long ttl;
/** readPoint over which the KVs are unconditionally included */
protected long maxReadPointToTrackVersions;
@ -164,15 +166,18 @@ public class ScanQueryMatcher {
this.earliestPutTs = earliestPutTs;
this.maxReadPointToTrackVersions = readPointToUse;
this.timeToPurgeDeletes = scanInfo.getTimeToPurgeDeletes();
this.ttl = oldestUnexpiredTS;
/* how to deal with deletes */
this.isUserScan = scanType == ScanType.USER_SCAN;
// keep deleted cells: if compaction or raw scan
this.keepDeletedCells = (scanInfo.getKeepDeletedCells() && !isUserScan) || scan.isRaw();
// retain deletes: if minor compaction or raw scan
this.keepDeletedCells = scan.isRaw() ? KeepDeletedCells.TRUE :
isUserScan ? KeepDeletedCells.FALSE : scanInfo.getKeepDeletedCells();
// retain deletes: if minor compaction or raw scanisDone
this.retainDeletesInOutput = scanType == ScanType.COMPACT_RETAIN_DELETES || scan.isRaw();
// seePastDeleteMarker: user initiated scans
this.seePastDeleteMarkers = scanInfo.getKeepDeletedCells() && isUserScan;
this.seePastDeleteMarkers =
scanInfo.getKeepDeletedCells() != KeepDeletedCells.FALSE && isUserScan;
int maxVersions =
scan.isRaw() ? scan.getMaxVersions() : Math.min(scan.getMaxVersions(),
@ -318,7 +323,8 @@ public class ScanQueryMatcher {
byte typeByte = cell.getTypeByte();
long mvccVersion = cell.getMvccVersion();
if (CellUtil.isDelete(cell)) {
if (!keepDeletedCells) {
if (keepDeletedCells == KeepDeletedCells.FALSE
|| (keepDeletedCells == KeepDeletedCells.TTL && timestamp < ttl)) {
// first ignore delete markers if the scanner can do so, and the
// range does not include the marker
//
@ -348,7 +354,8 @@ public class ScanQueryMatcher {
// otherwise (i.e. a "raw" scan) we fall through to normal version and timerange checking
return MatchCode.INCLUDE;
}
} else if (keepDeletedCells) {
} else if (keepDeletedCells == KeepDeletedCells.TRUE
|| (keepDeletedCells == KeepDeletedCells.TTL && timestamp >= ttl)) {
if (timestamp < earliestPutTs) {
// keeping delete rows, but there are no puts older than
// this delete in the store files.

View File

@ -207,7 +207,7 @@ public abstract class HBaseTestCase extends TestCase {
* @return Column descriptor.
*/
protected HTableDescriptor createTableDescriptor(final String name,
final int minVersions, final int versions, final int ttl, boolean keepDeleted) {
final int minVersions, final int versions, final int ttl, KeepDeletedCells keepDeleted) {
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
for (byte[] cfName : new byte[][]{ fam1, fam2, fam3 }) {
htd.addFamily(new HColumnDescriptor(cfName)

View File

@ -1599,7 +1599,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
* @return Column descriptor.
*/
public HTableDescriptor createTableDescriptor(final String name,
final int minVersions, final int versions, final int ttl, boolean keepDeleted) {
final int minVersions, final int versions, final int ttl, KeepDeletedCells keepDeleted) {
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
for (byte[] cfName : new byte[][]{ fam1, fam2, fam3 }) {
htd.addFamily(new HColumnDescriptor(cfName)

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueTestUtil;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -93,8 +94,9 @@ public class TestDefaultMemStore extends TestCase {
List<KeyValueScanner> memstorescanners = this.memstore.getScanners(0);
Scan scan = new Scan();
List<Cell> result = new ArrayList<Cell>();
ScanInfo scanInfo = new ScanInfo(null, 0, 1, HConstants.LATEST_TIMESTAMP, false,
0, this.memstore.comparator);
ScanInfo scanInfo =
new ScanInfo(null, 0, 1, HConstants.LATEST_TIMESTAMP, KeepDeletedCells.FALSE, 0,
this.memstore.comparator);
ScanType scanType = ScanType.USER_SCAN;
StoreScanner s = new StoreScanner(scan, scanInfo, scanType, null, memstorescanners);
int count = 0;
@ -513,7 +515,7 @@ public class TestDefaultMemStore extends TestCase {
}
//starting from each row, validate results should contain the starting row
for (int startRowId = 0; startRowId < ROW_COUNT; startRowId++) {
ScanInfo scanInfo = new ScanInfo(FAMILY, 0, 1, Integer.MAX_VALUE, false,
ScanInfo scanInfo = new ScanInfo(FAMILY, 0, 1, Integer.MAX_VALUE, KeepDeletedCells.FALSE,
0, this.memstore.comparator);
ScanType scanType = ScanType.USER_SCAN;
InternalScanner scanner = new StoreScanner(new Scan(

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.client.Delete;
@ -96,7 +97,7 @@ public class TestKeepDeletes {
public void testBasicScenario() throws Exception {
// keep 3 versions, rows do not expire
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, true);
HConstants.FOREVER, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
@ -193,7 +194,7 @@ public class TestKeepDeletes {
public void testRawScanWithoutKeepingDeletes() throws Exception {
// KEEP_DELETED_CELLS is NOT enabled
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, false);
HConstants.FOREVER, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
@ -238,7 +239,7 @@ public class TestKeepDeletes {
public void testWithoutKeepingDeletes() throws Exception {
// KEEP_DELETED_CELLS is NOT enabled
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, false);
HConstants.FOREVER, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
@ -282,7 +283,7 @@ public class TestKeepDeletes {
@Test
public void testRawScanWithColumns() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, true);
HConstants.FOREVER, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
Scan s = new Scan();
@ -306,7 +307,7 @@ public class TestKeepDeletes {
@Test
public void testRawScan() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, true);
HConstants.FOREVER, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
@ -396,7 +397,7 @@ public class TestKeepDeletes {
@Test
public void testDeleteMarkerExpirationEmptyStore() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, true);
HConstants.FOREVER, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
@ -439,7 +440,7 @@ public class TestKeepDeletes {
@Test
public void testDeleteMarkerExpiration() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, true);
HConstants.FOREVER, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
@ -496,13 +497,91 @@ public class TestKeepDeletes {
HRegion.closeHRegion(region);
}
/**
* Test delete marker removal from store files.
*/
@Test
public void testWithOldRow() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
p.add(c0, c0, T1);
region.put(p);
// a put another (older) row in the same store
p = new Put(T2, ts-10);
p.add(c0, c0, T1);
region.put(p);
// all the following deletes affect the put
Delete d = new Delete(T1, ts);
d.deleteColumns(c0, c0, ts);
region.delete(d);
d = new Delete(T1, ts);
d.deleteFamily(c0, ts);
region.delete(d);
d = new Delete(T1, ts);
d.deleteColumn(c0, c0, ts+1);
region.delete(d);
d = new Delete(T1, ts);
d.deleteColumn(c0, c0, ts+2);
region.delete(d);
// 1 family marker, 1 column marker, 2 version markers
assertEquals(4, countDeleteMarkers(region));
region.flushcache();
assertEquals(4, countDeleteMarkers(region));
region.compactStores(false);
assertEquals(4, countDeleteMarkers(region));
// another put will push out the earlier put...
p = new Put(T1, ts+3);
p.add(c0, c0, T1);
region.put(p);
region.flushcache();
// no markers are collected, since there is an affected put
region.compactStores(true);
assertEquals(4, countDeleteMarkers(region));
// all markers remain, since we have the older row
// and we haven't pushed the inlined markers past MAX_VERSIONS
region.compactStores(true);
assertEquals(4, countDeleteMarkers(region));
// another put will push out the earlier put...
p = new Put(T1, ts+4);
p.add(c0, c0, T1);
region.put(p);
// this pushed out the column and version marker
// but the family markers remains. THIS IS A PROBLEM!
region.compactStores(true);
assertEquals(1, countDeleteMarkers(region));
// no amount of compacting is getting this of this one
// KEEP_DELETED_CELLS=>TTL is an option to avoid this.
region.compactStores(true);
assertEquals(1, countDeleteMarkers(region));
HRegion.closeHRegion(region);
}
/**
* Verify correct range demarcation
*/
@Test
public void testRanges() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, true);
HConstants.FOREVER, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
@ -584,7 +663,7 @@ public class TestKeepDeletes {
@Test
public void testDeleteMarkerVersioning() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, true);
HConstants.FOREVER, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
@ -676,7 +755,7 @@ public class TestKeepDeletes {
*/
public void testWithMixedCFs() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, true);
HConstants.FOREVER, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime();
@ -727,7 +806,8 @@ public class TestKeepDeletes {
*/
@Test
public void testWithMinVersions() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, true);
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, KeepDeletedCells.TRUE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime() - 2000; // 2s in the past
@ -799,6 +879,51 @@ public class TestKeepDeletes {
HRegion.closeHRegion(region);
}
/**
* Test keeping deleted rows together with min versions set
* @throws Exception
*/
@Test
public void testWithTTL() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 1, 1000, 1, KeepDeletedCells.TTL);
HRegion region = hbu.createLocalHRegion(htd, null, null);
long ts = EnvironmentEdgeManager.currentTime() - 2000; // 2s in the past
Put p = new Put(T1, ts);
p.add(c0, c0, T3);
region.put(p);
// place an old row, to make the family marker expires anyway
p = new Put(T2, ts-10);
p.add(c0, c0, T1);
region.put(p);
checkGet(region, T1, c0, c0, ts+1, T3);
// place a family delete marker
Delete d = new Delete(T1, ts+2);
region.delete(d);
checkGet(region, T1, c0, c0, ts+1, T3);
// 3 families, one column delete marker
assertEquals(3, countDeleteMarkers(region));
region.flushcache();
// no delete marker removes by the flush
assertEquals(3, countDeleteMarkers(region));
// but the Put is gone
checkGet(region, T1, c0, c0, ts+1);
region.compactStores(true);
// all delete marker gone
assertEquals(0, countDeleteMarkers(region));
HRegion.closeHRegion(region);
}
private void checkGet(HRegion region, byte[] row, byte[] fam, byte[] col,
long time, byte[]... vals) throws IOException {
Get g = new Get(row);

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.client.Delete;
@ -65,7 +66,8 @@ public class TestMinVersions {
*/
@Test
public void testGetClosestBefore() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 1, 1000, 1, false);
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 1, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
try {
@ -114,7 +116,8 @@ public class TestMinVersions {
@Test
public void testStoreMemStore() throws Exception {
// keep 3 versions minimum
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, false);
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
long ts = EnvironmentEdgeManager.currentTime() - 2000;
@ -168,7 +171,8 @@ public class TestMinVersions {
*/
@Test
public void testDelete() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, false);
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
@ -226,7 +230,8 @@ public class TestMinVersions {
*/
@Test
public void testMemStore() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, false);
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
// 2s in the past
@ -301,7 +306,8 @@ public class TestMinVersions {
@Test
public void testBaseCase() throws Exception {
// 1 version minimum, 1000 versions maximum, ttl = 1s
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, false);
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
try {
@ -392,7 +398,8 @@ public class TestMinVersions {
*/
@Test
public void testFilters() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, false);
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, KeepDeletedCells.FALSE);
HRegion region = hbu.createLocalHRegion(htd, null, null);
final byte [] c1 = COLUMNS[1];

View File

@ -29,6 +29,7 @@ import java.util.NavigableSet;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValue.Type;
@ -97,7 +98,7 @@ public class TestQueryMatcher extends HBaseTestCase {
private void _testMatch_ExplicitColumns(Scan scan, List<MatchCode> expected) throws IOException {
// 2,4,5
ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2,
0, 1, ttl, false, 0, rowComparator), get.getFamilyMap().get(fam2),
0, 1, ttl, KeepDeletedCells.FALSE, 0, rowComparator), get.getFamilyMap().get(fam2),
EnvironmentEdgeManager.currentTime() - ttl);
List<KeyValue> memstore = new ArrayList<KeyValue>();
@ -182,7 +183,7 @@ public class TestQueryMatcher extends HBaseTestCase {
expected.add(ScanQueryMatcher.MatchCode.DONE);
ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2,
0, 1, ttl, false, 0, rowComparator), null,
0, 1, ttl, KeepDeletedCells.FALSE, 0, rowComparator), null,
EnvironmentEdgeManager.currentTime() - ttl);
List<KeyValue> memstore = new ArrayList<KeyValue>();
@ -236,9 +237,9 @@ public class TestQueryMatcher extends HBaseTestCase {
};
long now = EnvironmentEdgeManager.currentTime();
ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2,
0, 1, testTTL, false, 0, rowComparator), get.getFamilyMap().get(fam2),
now - testTTL);
ScanQueryMatcher qm =
new ScanQueryMatcher(scan, new ScanInfo(fam2, 0, 1, testTTL, KeepDeletedCells.FALSE, 0,
rowComparator), get.getFamilyMap().get(fam2), now - testTTL);
KeyValue [] kvs = new KeyValue[] {
new KeyValue(row1, fam2, col1, now-100, data),
@ -292,7 +293,7 @@ public class TestQueryMatcher extends HBaseTestCase {
long now = EnvironmentEdgeManager.currentTime();
ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2,
0, 1, testTTL, false, 0, rowComparator), null,
0, 1, testTTL, KeepDeletedCells.FALSE, 0, rowComparator), null,
now - testTTL);
KeyValue [] kvs = new KeyValue[] {
@ -348,7 +349,7 @@ public class TestQueryMatcher extends HBaseTestCase {
byte[] from, byte[] to, byte[][] rows, MatchCode... expected) throws IOException {
long now = EnvironmentEdgeManager.currentTime();
// Set time to purge deletes to negative value to avoid it ever happening.
ScanInfo scanInfo = new ScanInfo(fam2, 0, 1, ttl, false, -1L, rowComparator);
ScanInfo scanInfo = new ScanInfo(fam2, 0, 1, ttl, KeepDeletedCells.FALSE, -1L, rowComparator);
NavigableSet<byte[]> cols = get.getFamilyMap().get(fam2);
ScanQueryMatcher qm = new ScanQueryMatcher(scan, scanInfo, cols, Long.MAX_VALUE,

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -252,7 +253,7 @@ public class TestReversibleScanners {
ScanType scanType = ScanType.USER_SCAN;
ScanInfo scanInfo = new ScanInfo(FAMILYNAME, 0, Integer.MAX_VALUE,
Long.MAX_VALUE, false, 0, KeyValue.COMPARATOR);
Long.MAX_VALUE, KeepDeletedCells.FALSE, 0, KeyValue.COMPARATOR);
// Case 1.Test a full reversed scan
Scan scan = new Scan();

View File

@ -32,6 +32,7 @@ import junit.framework.TestCase;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueTestUtil;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -48,7 +49,7 @@ public class TestStoreScanner extends TestCase {
private static final String CF_STR = "cf";
final byte [] CF = Bytes.toBytes(CF_STR);
private ScanInfo scanInfo = new ScanInfo(CF, 0, Integer.MAX_VALUE,
Long.MAX_VALUE, false, 0, KeyValue.COMPARATOR);
Long.MAX_VALUE, KeepDeletedCells.FALSE, 0, KeyValue.COMPARATOR);
private ScanType scanType = ScanType.USER_SCAN;
public void setUp() throws Exception {
@ -416,7 +417,7 @@ public class TestStoreScanner extends TestCase {
List<KeyValueScanner> scanners = scanFixture(kvs);
Scan scan = new Scan();
scan.setMaxVersions(1);
ScanInfo scanInfo = new ScanInfo(CF, 0, 1, 500, false, 0,
ScanInfo scanInfo = new ScanInfo(CF, 0, 1, 500, KeepDeletedCells.FALSE, 0,
KeyValue.COMPARATOR);
ScanType scanType = ScanType.USER_SCAN;
StoreScanner scanner =
@ -487,7 +488,7 @@ public class TestStoreScanner extends TestCase {
Scan scan = new Scan();
scan.setMaxVersions(1);
// scanner with ttl equal to 500
ScanInfo scanInfo = new ScanInfo(CF, 0, 1, 500, false, 0,
ScanInfo scanInfo = new ScanInfo(CF, 0, 1, 500, KeepDeletedCells.FALSE, 0,
KeyValue.COMPARATOR);
ScanType scanType = ScanType.USER_SCAN;
StoreScanner scanner =
@ -550,7 +551,7 @@ public class TestStoreScanner extends TestCase {
ScanInfo scanInfo = new ScanInfo(Bytes.toBytes("cf"),
0 /* minVersions */,
2 /* maxVersions */, 500 /* ttl */,
false /* keepDeletedCells */,
KeepDeletedCells.FALSE /* keepDeletedCells */,
200, /* timeToPurgeDeletes */
KeyValue.COMPARATOR);
StoreScanner scanner =

View File

@ -655,7 +655,7 @@ module Hbase
family.setBlocksize(JInteger.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::BLOCKSIZE))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::BLOCKSIZE)
family.setMaxVersions(JInteger.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::VERSIONS))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::VERSIONS)
family.setMinVersions(JInteger.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::MIN_VERSIONS))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::MIN_VERSIONS)
family.setKeepDeletedCells(JBoolean.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::KEEP_DELETED_CELLS))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::KEEP_DELETED_CELLS)
family.setKeepDeletedCells(org.apache.hadoop.hbase.KeepDeletedCells.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::KEEP_DELETED_CELLS).to_s.upcase)) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::KEEP_DELETED_CELLS)
family.setCompressTags(JBoolean.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::COMPRESS_TAGS))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::COMPRESS_TAGS)
family.setPrefetchBlocksOnOpen(JBoolean.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::PREFETCH_BLOCKS_ON_OPEN))) if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::PREFETCH_BLOCKS_ON_OPEN)
family.setValue(COMPRESSION_COMPACT, arg.delete(COMPRESSION_COMPACT)) if arg.include?(COMPRESSION_COMPACT)