From 20165cc25c853220d17a70da6cc57c1a6199ee2d Mon Sep 17 00:00:00 2001 From: Jim Kellerman Date: Wed, 13 Aug 2008 02:33:09 +0000 Subject: [PATCH] HBASE-798 Add missing classes: UnknownRowLockException and RowLock which were present in previous versions of the patches for this issue, but not in the version that was committed. Also fix a number of compilation problems that were introduced by patch. git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@685421 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 4 + .../hadoop/hbase/UnknownRowLockException.java | 41 +++++++++++ .../apache/hadoop/hbase/client/RowLock.java | 62 ++++++++++++++++ .../hbase/regionserver/HRegionServer.java | 1 - .../hadoop/hbase/AbstractMergeTestBase.java | 2 +- .../apache/hadoop/hbase/HBaseTestCase.java | 8 +- .../hadoop/hbase/TestRegionRebalancing.java | 2 +- .../hbase/TestScanMultipleVersions.java | 2 +- .../hbase/regionserver/OOMERegionServer.java | 2 +- .../hbase/regionserver/TestCompaction.java | 2 +- .../hbase/regionserver/TestDeleteAll.java | 4 +- .../hbase/regionserver/TestDeleteFamily.java | 4 +- .../hadoop/hbase/regionserver/TestGet2.java | 73 ++++++++++--------- .../hadoop/hbase/util/TestMergeTool.java | 2 +- 14 files changed, 158 insertions(+), 51 deletions(-) create mode 100644 src/java/org/apache/hadoop/hbase/UnknownRowLockException.java create mode 100644 src/java/org/apache/hadoop/hbase/client/RowLock.java diff --git a/CHANGES.txt b/CHANGES.txt index f23e534a4ef..24f075384ed 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -35,6 +35,10 @@ Release 0.3.0 - Unreleased HBASE-787 Postgresql to HBase table replication example (Tim Sell via Stack) HBASE-798 Provide Client API to explicitly lock and unlock rows (Jonathan Gray via Jim Kellerman) + HBASE-798 Add missing classes: UnknownRowLockException and RowLock which + were present in previous versions of the patches for this issue, + but not in the version that was committed. Also fix a number of + compilation problems that were introduced by patch. OPTIMIZATIONS diff --git a/src/java/org/apache/hadoop/hbase/UnknownRowLockException.java b/src/java/org/apache/hadoop/hbase/UnknownRowLockException.java new file mode 100644 index 00000000000..8cb39858f44 --- /dev/null +++ b/src/java/org/apache/hadoop/hbase/UnknownRowLockException.java @@ -0,0 +1,41 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + + +/** + * Thrown if a region server is passed an unknown row lock id + */ +public class UnknownRowLockException extends DoNotRetryIOException { + private static final long serialVersionUID = 993179627856392526L; + + /** constructor */ + public UnknownRowLockException() { + super(); + } + + /** + * Constructor + * @param s message + */ + public UnknownRowLockException(String s) { + super(s); + } +} diff --git a/src/java/org/apache/hadoop/hbase/client/RowLock.java b/src/java/org/apache/hadoop/hbase/client/RowLock.java new file mode 100644 index 00000000000..3c8c46164bc --- /dev/null +++ b/src/java/org/apache/hadoop/hbase/client/RowLock.java @@ -0,0 +1,62 @@ +/** + * Copyright 2008 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +/** + * Holds row name and lock id. + */ +public class RowLock { + private byte [] row = null; + private long lockId = -1L; + + /** + * Creates a RowLock from a row and lock id + * @param row + * @param lockId + */ + public RowLock(final byte [] row, final long lockId) { + this.row = row; + this.lockId = lockId; + } + + /** + * Creates a RowLock with only a lock id + * @param lockId + */ + public RowLock(final long lockId) { + this.lockId = lockId; + } + + /** + * Get the row for this RowLock + * @return the row + */ + public byte [] getRow() { + return row; + } + + /** + * Get the lock id from this RowLock + * @return the lock id + */ + public long getLockId() { + return lockId; + } +} diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 9024e5987ea..fb5eca4cf01 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -28,7 +28,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; diff --git a/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java b/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java index c703705a16f..570b75d3435 100644 --- a/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java +++ b/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java @@ -123,7 +123,7 @@ public abstract class AbstractMergeTestBase extends HBaseClusterTestCase { + String.format("%1$05d", i))); batchUpdate.put(COLUMN_NAME, value.get()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); if(i % 10000 == 0) { System.out.println("Flushing write #" + i); r.flushcache(); diff --git a/src/test/org/apache/hadoop/hbase/HBaseTestCase.java b/src/test/org/apache/hadoop/hbase/HBaseTestCase.java index af984a5d492..561030354da 100644 --- a/src/test/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/src/test/org/apache/hadoop/hbase/HBaseTestCase.java @@ -401,13 +401,13 @@ public abstract class HBaseTestCase extends TestCase { /** {@inheritDoc} */ public void commit(BatchUpdate batchUpdate) throws IOException { - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); }; /** {@inheritDoc} */ public void deleteAll(byte [] row, byte [] column, long ts) throws IOException { - this.region.deleteAll(row, column, ts); + this.region.deleteAll(row, column, ts, null); } /** {@inheritDoc} */ @@ -441,7 +441,7 @@ public abstract class HBaseTestCase extends TestCase { * @throws IOException */ public Map getFull(byte [] row) throws IOException { - return region.getFull(row, null, HConstants.LATEST_TIMESTAMP); + return region.getFull(row, null, HConstants.LATEST_TIMESTAMP, null); } /** {@inheritDoc} */ @@ -569,7 +569,7 @@ public abstract class HBaseTestCase extends TestCase { protected void assertCellEquals(final HRegion region, final byte [] row, final byte [] column, final long timestamp, final String value) throws IOException { - Map result = region.getFull(row, null, timestamp); + Map result = region.getFull(row, null, timestamp, null); Cell cell_value = result.get(column); if(value == null){ assertEquals(column.toString() + " at timestamp " + timestamp, null, cell_value); diff --git a/src/test/org/apache/hadoop/hbase/TestRegionRebalancing.java b/src/test/org/apache/hadoop/hbase/TestRegionRebalancing.java index 376d5677ef1..8c5c68b391c 100644 --- a/src/test/org/apache/hadoop/hbase/TestRegionRebalancing.java +++ b/src/test/org/apache/hadoop/hbase/TestRegionRebalancing.java @@ -216,7 +216,7 @@ public class TestRegionRebalancing extends HBaseClusterTestCase { BatchUpdate bu = new BatchUpdate(keyToWrite); bu.put(COLUMN_NAME, "test".getBytes()); - region.batchUpdate(bu); + region.batchUpdate(bu, null); region.close(); region.getLog().closeAndDelete(); diff --git a/src/test/org/apache/hadoop/hbase/TestScanMultipleVersions.java b/src/test/org/apache/hadoop/hbase/TestScanMultipleVersions.java index 0af704708df..498aee88aec 100644 --- a/src/test/org/apache/hadoop/hbase/TestScanMultipleVersions.java +++ b/src/test/org/apache/hadoop/hbase/TestScanMultipleVersions.java @@ -70,7 +70,7 @@ public class TestScanMultipleVersions extends HBaseClusterTestCase { for (int j = 0; j < TIMESTAMPS.length; j++) { BatchUpdate b = new BatchUpdate(ROWS[i], TIMESTAMPS[j]); b.put(HConstants.COLUMN_FAMILY, Bytes.toBytes(TIMESTAMPS[j])); - REGIONS[i].batchUpdate(b); + REGIONS[i].batchUpdate(b, null); } // Insert the region we created into the meta HRegion.addRegionToMETA(meta, REGIONS[i]); diff --git a/src/test/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java b/src/test/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java index 3bc65fe9283..d130baf5889 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java @@ -48,7 +48,7 @@ public class OOMERegionServer extends HRegionServer { public void batchUpdate(byte [] regionName, BatchUpdate b) throws IOException { - super.batchUpdate(regionName, b); + super.batchUpdate(regionName, b, -1L); for (int i = 0; i < 30; i++) { // Add the batch update 30 times to bring on the OOME faster. this.retainer.add(b); diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java index 78ab7ec237d..bacb18806d6 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java @@ -127,7 +127,7 @@ public class TestCompaction extends HBaseTestCase { // the compaction threshold of 3 store files. Compacting these store files // should result in a compacted store file that has no references to the // deleted row. - r.deleteAll(STARTROW, COLUMN_FAMILY_TEXT, System.currentTimeMillis()); + r.deleteAll(STARTROW, COLUMN_FAMILY_TEXT, System.currentTimeMillis(),null); // Now, before compacting, remove all instances of the first row so can // verify that it is removed as we compact. // Assert all delted. diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteAll.java b/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteAll.java index b8c9e15c061..0844249f763 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteAll.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteAll.java @@ -116,7 +116,7 @@ public class TestDeleteAll extends HBaseTestCase { if (flush) {region_incommon.flushcache();} // call delete all at a timestamp, make sure only the most recent stuff is left behind - region.deleteAll(row, t1); + region.deleteAll(row, t1, null); if (flush) {region_incommon.flushcache();} assertCellEquals(region, row, colA, t0, cellData(0, flush)); assertCellEquals(region, row, colA, t1, null); @@ -126,7 +126,7 @@ public class TestDeleteAll extends HBaseTestCase { assertCellEquals(region, row, colD, t2, null); // call delete all w/o a timestamp, make sure nothing is left. - region.deleteAll(row, HConstants.LATEST_TIMESTAMP); + region.deleteAll(row, HConstants.LATEST_TIMESTAMP, null); if (flush) {region_incommon.flushcache();} assertCellEquals(region, row, colA, t0, null); assertCellEquals(region, row, colA, t1, null); diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteFamily.java b/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteFamily.java index 43fbf6a089c..a2e01dfb8ab 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteFamily.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteFamily.java @@ -110,7 +110,7 @@ public class TestDeleteFamily extends HBaseTestCase { // call delete family at a timestamp, make sure only the most recent stuff // for column c is left behind - region.deleteFamily(row, COLUMNS[0], t1); + region.deleteFamily(row, COLUMNS[0], t1, null); if (flush) {region_incommon.flushcache();} // most recent for A,B,C should be fine // A,B at older timestamps should be gone @@ -127,7 +127,7 @@ public class TestDeleteFamily extends HBaseTestCase { // call delete family w/o a timestamp, make sure nothing is left except for // column C. - region.deleteFamily(row, COLUMNS[0], HConstants.LATEST_TIMESTAMP); + region.deleteFamily(row, COLUMNS[0], HConstants.LATEST_TIMESTAMP, null); if (flush) {region_incommon.flushcache();} // A,B for latest timestamp should be gone // C should still be fine diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java b/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java index f70fccbda3d..cbcdd2d4f90 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java @@ -73,15 +73,15 @@ public class TestGet2 extends HBaseTestCase implements HConstants { batchUpdate = new BatchUpdate(T00); batchUpdate.put(COLUMNS[0], T00.getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(T10); batchUpdate.put(COLUMNS[0], T10.getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(T20); batchUpdate.put(COLUMNS[0], T20.getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); Map results = region.getClosestRowBefore(Bytes.toBytes(T20)); @@ -89,21 +89,21 @@ public class TestGet2 extends HBaseTestCase implements HConstants { batchUpdate = new BatchUpdate(T20); batchUpdate.delete(COLUMNS[0]); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); results = region.getClosestRowBefore(Bytes.toBytes(T20)); assertEquals(T10, new String(results.get(COLUMNS[0]).getValue())); batchUpdate = new BatchUpdate(T30); batchUpdate.put(COLUMNS[0], T30.getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); results = region.getClosestRowBefore(Bytes.toBytes(T30)); assertEquals(T30, new String(results.get(COLUMNS[0]).getValue())); batchUpdate = new BatchUpdate(T30); batchUpdate.delete(COLUMNS[0]); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); results = region.getClosestRowBefore(Bytes.toBytes(T30)); assertEquals(T10, new String(results.get(COLUMNS[0]).getValue())); @@ -122,7 +122,7 @@ public class TestGet2 extends HBaseTestCase implements HConstants { // and answer of t20. batchUpdate = new BatchUpdate(T20); batchUpdate.put(COLUMNS[1], T20.getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); results = region.getClosestRowBefore(Bytes.toBytes(T30)); assertEquals(T20, new String(results.get(COLUMNS[1]).getValue())); @@ -138,7 +138,7 @@ public class TestGet2 extends HBaseTestCase implements HConstants { // in memory; make sure we get back t10 again. batchUpdate = new BatchUpdate(T20); batchUpdate.delete(COLUMNS[1]); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); results = region.getClosestRowBefore(Bytes.toBytes(T30)); assertEquals(T10, new String(results.get(COLUMNS[0]).getValue())); @@ -153,10 +153,10 @@ public class TestGet2 extends HBaseTestCase implements HConstants { // the candidate be in memory. batchUpdate = new BatchUpdate(T11); batchUpdate.put(COLUMNS[0], T11.getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(T10); batchUpdate.delete(COLUMNS[0]); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); results = region.getClosestRowBefore(Bytes.toBytes(T12)); assertEquals(T11, new String(results.get(COLUMNS[0]).getValue())); } finally { @@ -296,27 +296,27 @@ public class TestGet2 extends HBaseTestCase implements HConstants { batchUpdate = new BatchUpdate(t10); batchUpdate.put(COLUMNS[0], "t10 bytes".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(t20); batchUpdate.put(COLUMNS[0], "t20 bytes".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(t30); batchUpdate.put(COLUMNS[0], "t30 bytes".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(t35); batchUpdate.put(COLUMNS[0], "t35 bytes".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(t35); batchUpdate.delete(COLUMNS[0]); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(t40); batchUpdate.put(COLUMNS[0], "t40 bytes".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); // try finding "015" String t15 = "015"; @@ -386,15 +386,15 @@ public class TestGet2 extends HBaseTestCase implements HConstants { batchUpdate = new BatchUpdate(t10); batchUpdate.put(COLUMNS[0], "t10 bytes".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(t30); batchUpdate.put(COLUMNS[0], "t30 bytes".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); batchUpdate = new BatchUpdate(t40); batchUpdate.put(COLUMNS[0], "t40 bytes".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); // try finding "035" String t35 = "035"; @@ -410,7 +410,7 @@ public class TestGet2 extends HBaseTestCase implements HConstants { batchUpdate = new BatchUpdate(t20); batchUpdate.put(COLUMNS[0], "t20 bytes".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); // try finding "035" results = region.getClosestRowBefore(Bytes.toBytes(t35)); @@ -450,7 +450,7 @@ public class TestGet2 extends HBaseTestCase implements HConstants { bu.put(COLUMNS[0], "column 0".getBytes()); bu.put(COLUMNS[1], "column 1".getBytes()); bu.put(COLUMNS[2], "column 2".getBytes()); - region.batchUpdate(bu); + region.batchUpdate(bu, null); assertSpecifiedColumns(region, row); // try it again with a cache flush to involve the store, not just the @@ -483,25 +483,25 @@ public class TestGet2 extends HBaseTestCase implements HConstants { // make sure we get all of them with standard getFull Map result = region.getFull(row, null, - HConstants.LATEST_TIMESTAMP); + HConstants.LATEST_TIMESTAMP, null); assertEquals(new String(result.get(COLUMNS[0]).getValue()), "column 0"); assertEquals(new String(result.get(COLUMNS[1]).getValue()), "column 1"); assertEquals(new String(result.get(COLUMNS[2]).getValue()), "column 2"); // try to get just one - result = region.getFull(row, one, HConstants.LATEST_TIMESTAMP); + result = region.getFull(row, one, HConstants.LATEST_TIMESTAMP, null); assertEquals(new String(result.get(COLUMNS[0]).getValue()), "column 0"); assertNull(result.get(COLUMNS[1])); assertNull(result.get(COLUMNS[2])); // try to get all of them (specified) - result = region.getFull(row, all, HConstants.LATEST_TIMESTAMP); + result = region.getFull(row, all, HConstants.LATEST_TIMESTAMP, null); assertEquals(new String(result.get(COLUMNS[0]).getValue()), "column 0"); assertEquals(new String(result.get(COLUMNS[1]).getValue()), "column 1"); assertEquals(new String(result.get(COLUMNS[2]).getValue()), "column 2"); // try to get none with empty column set - result = region.getFull(row, none, HConstants.LATEST_TIMESTAMP); + result = region.getFull(row, none, HConstants.LATEST_TIMESTAMP, null); assertNull(result.get(COLUMNS[0])); assertNull(result.get(COLUMNS[1])); assertNull(result.get(COLUMNS[2])); @@ -526,25 +526,25 @@ public class TestGet2 extends HBaseTestCase implements HConstants { // write some data batchUpdate = new BatchUpdate(row); batchUpdate.put(COLUMNS[0], "olderValue".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); // flush region.flushcache(); // assert that getFull gives us the older value - results = region.getFull(row, (Set)null, LATEST_TIMESTAMP); + results = region.getFull(row, (Set)null, LATEST_TIMESTAMP, null); assertEquals("olderValue", new String(results.get(COLUMNS[0]).getValue())); // write a new value for the cell batchUpdate = new BatchUpdate(row); batchUpdate.put(COLUMNS[0], "newerValue".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); // flush region.flushcache(); // assert that getFull gives us the later value - results = region.getFull(row, (Set)null, LATEST_TIMESTAMP); + results = region.getFull(row, (Set)null, LATEST_TIMESTAMP, null); assertEquals("newerValue", new String(results.get(COLUMNS[0]).getValue())); // @@ -559,13 +559,13 @@ public class TestGet2 extends HBaseTestCase implements HConstants { batchUpdate = new BatchUpdate(row2); batchUpdate.put(cell1, "column0 value".getBytes()); batchUpdate.put(cell2, "column1 value".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); // flush region.flushcache(); // assert i get both columns - results = region.getFull(row2, (Set)null, LATEST_TIMESTAMP); + results = region.getFull(row2, (Set)null, LATEST_TIMESTAMP, null); assertEquals("Should have two columns in the results map", 2, results.size()); assertEquals("column0 value", new String(results.get(cell1).getValue())); assertEquals("column1 value", new String(results.get(cell2).getValue())); @@ -574,13 +574,13 @@ public class TestGet2 extends HBaseTestCase implements HConstants { batchUpdate = new BatchUpdate(row2); batchUpdate.delete(cell1); batchUpdate.put(cell2, "column1 new value".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); // flush region.flushcache(); // assert i get the second column only - results = region.getFull(row2, (Set)null, LATEST_TIMESTAMP); + results = region.getFull(row2, (Set)null, LATEST_TIMESTAMP, null); assertEquals("Should have one column in the results map", 1, results.size()); assertNull("column0 value", results.get(cell1)); assertEquals("column1 new value", new String(results.get(cell2).getValue())); @@ -591,10 +591,10 @@ public class TestGet2 extends HBaseTestCase implements HConstants { batchUpdate = new BatchUpdate(row2); batchUpdate.delete(cell2); batchUpdate.put(cell3, "column2 value!".getBytes()); - region.batchUpdate(batchUpdate); + region.batchUpdate(batchUpdate, null); // assert i get the third column only - results = region.getFull(row2, (Set)null, LATEST_TIMESTAMP); + results = region.getFull(row2, (Set)null, LATEST_TIMESTAMP, null); assertEquals("Should have one column in the results map", 1, results.size()); assertNull("column0 value", results.get(cell1)); assertNull("column1 value", results.get(cell2)); @@ -614,7 +614,8 @@ public class TestGet2 extends HBaseTestCase implements HConstants { private void assertColumnsPresent(final HRegion r, final byte [] row) throws IOException { - Map result = r.getFull(row, null, HConstants.LATEST_TIMESTAMP); + Map result = + r.getFull(row, null, HConstants.LATEST_TIMESTAMP, null); int columnCount = 0; for (Map.Entry e: result.entrySet()) { columnCount++; diff --git a/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java b/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java index e2ede3e560c..c1c0e5f493a 100644 --- a/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java +++ b/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java @@ -125,7 +125,7 @@ public class TestMergeTool extends HBaseTestCase { byte [] row = rows[i][j]; BatchUpdate b = new BatchUpdate(row); b.put(COLUMN_NAME, new ImmutableBytesWritable(row).get()); - regions[i].batchUpdate(b); + regions[i].batchUpdate(b, null); } HRegion.addRegionToMETA(meta, regions[i]); }