From 73d6cbb4cf28655de2845b900e8ff018128fc10b Mon Sep 17 00:00:00 2001 From: Enis Soztutar Date: Wed, 23 Jan 2013 19:32:06 +0000 Subject: [PATCH] HBASE-6832. [WINDOWS] Tests should use explicit timestamp for Puts, and not rely on implicit RS timing git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1437643 13f79535-47bb-0310-9956-ffa450edef68 --- .../util/IncrementingEnvironmentEdge.java | 25 ++++++- .../coprocessor/TestRegionObserverBypass.java | 37 ++++++++--- .../hfile/TestScannerSelectionUsingTTL.java | 7 +- .../hbase/master/cleaner/TestLogsCleaner.java | 2 +- .../hbase/regionserver/TestKeepDeletes.java | 66 ++++++++++++++----- .../hadoop/hbase/regionserver/TestStore.java | 11 ++-- .../hbase/thrift/TestThriftServerCmdLine.java | 7 ++ .../util/TestIncrementingEnvironmentEdge.java | 2 +- 8 files changed, 123 insertions(+), 34 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java index fbbdd84cc14..c1ce25c33dd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java @@ -26,7 +26,22 @@ import org.apache.hadoop.classification.InterfaceAudience; @InterfaceAudience.Private public class IncrementingEnvironmentEdge implements EnvironmentEdge { - private long timeIncrement = 1; + private long timeIncrement; + + /** + * Construct an incremental edge starting from currentTimeMillis + */ + public IncrementingEnvironmentEdge() { + this(System.currentTimeMillis()); + } + + /** + * Construct an incremental edge with an initial amount + * @param initialAmount the initial value to start with + */ + public IncrementingEnvironmentEdge(long initialAmount) { + this.timeIncrement = initialAmount; + } /** * {@inheritDoc} @@ -38,4 +53,12 @@ public class IncrementingEnvironmentEdge implements EnvironmentEdge { public synchronized long currentTimeMillis() { return timeIncrement++; } + + /** + * Increment the time by the given amount + */ + public synchronized long incrementTime(long amount) { + timeIncrement += amount; + return timeIncrement; + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java index 88539d1e360..ef4efe52d95 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java @@ -18,31 +18,35 @@ */ package org.apache.hadoop.hbase.coprocessor; +import static junit.framework.Assert.assertEquals; + import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; -import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; -import org.apache.hadoop.hbase.coprocessor.ObserverContext; -import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.junit.AfterClass; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; - @Category(MediumTests.class) public class TestRegionObserverBypass { private static HBaseTestingUtility util; @@ -60,7 +64,6 @@ public class TestRegionObserverBypass { TestCoprocessor.class.getName()); util = new HBaseTestingUtility(conf); util.startMiniCluster(); - util.createTable(tableName, new byte[][] {dummy, test}); } @AfterClass @@ -68,6 +71,18 @@ public class TestRegionObserverBypass { util.shutdownMiniCluster(); } + @Before + public void setUp() throws Exception { + HBaseAdmin admin = util.getHBaseAdmin(); + if (admin.tableExists(tableName)) { + if (admin.isTableEnabled(tableName)) { + admin.disableTable(tableName); + } + admin.deleteTable(tableName); + } + util.createTable(tableName, new byte[][] {dummy, test}); + } + /** * do a single put that is bypassed by a RegionObserver * @throws Exception @@ -89,6 +104,10 @@ public class TestRegionObserverBypass { */ @Test public void testMulti() throws Exception { + //ensure that server time increments every time we do an operation, otherwise + //previous deletes will eclipse successive puts having the same timestamp + EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); + HTable t = new HTable(util.getConfiguration(), tableName); List puts = new ArrayList(); Put p = new Put(row1); @@ -170,6 +189,8 @@ public class TestRegionObserverBypass { checkRowAndDelete(t,row2,1); checkRowAndDelete(t,row3,0); t.close(); + + EnvironmentEdgeManager.reset(); } private void checkRowAndDelete(HTable t, byte[] row, int count) throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java index 0179724193b..b501eb55458 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -109,20 +110,24 @@ public class TestScannerSelectionUsingTTL { HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(info.getEncodedName()), conf, htd); + long ts = EnvironmentEdgeManager.currentTimeMillis(); + long version = 0; //make sure each new set of Put's have a new ts for (int iFile = 0; iFile < totalNumFiles; ++iFile) { if (iFile == NUM_EXPIRED_FILES) { Threads.sleepWithoutInterrupt(TTL_MS); + version += TTL_MS; } for (int iRow = 0; iRow < NUM_ROWS; ++iRow) { Put put = new Put(Bytes.toBytes("row" + iRow)); for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) { put.add(FAMILY_BYTES, Bytes.toBytes("col" + iCol), - Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol)); + ts + version, Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol)); } region.put(put); } region.flushcache(); + version++; } Scan scan = new Scan(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java index 80d30924bbb..5205d129233 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java @@ -63,7 +63,7 @@ public class TestLogsCleaner { public void testLogCleaning() throws Exception{ Configuration conf = TEST_UTIL.getConfiguration(); // set TTL - long ttl = 2000; + long ttl = 10000; conf.setLong("hbase.master.logcleaner.ttl", ttl); conf.setBoolean(HConstants.REPLICATION_ENABLE_KEY, true); Replication.decorateMasterConfiguration(conf); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java index 260431ffd3e..d0ecf4d8ef6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java @@ -21,13 +21,21 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HBaseTestCase; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.junit.experimental.categories.Category; @Category(SmallTests.class) @@ -43,6 +51,28 @@ public class TestKeepDeletes extends HBaseTestCase { private final byte[] c0 = COLUMNS[0]; private final byte[] c1 = COLUMNS[1]; + @Override + protected void setUp() throws Exception { + super.setUp(); + /* HBASE-6832: [WINDOWS] Tests should use explicit timestamp for Puts, and not rely on + * implicit RS timing. + * Use an explicit timer (IncrementingEnvironmentEdge) so that the put, delete + * compact timestamps are tracked. Otherwise, forced major compaction will not purge + * Delete's having the same timestamp. see ScanQueryMatcher.match(): + * if (retainDeletesInOutput + * || (!isUserScan && (EnvironmentEdgeManager.currentTimeMillis() - timestamp) + * <= timeToPurgeDeletes) ... ) + * + */ + EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); + } + + @Override + protected void tearDown() throws Exception { + super.tearDown(); + EnvironmentEdgeManager.reset(); + } + /** * Make sure that deleted rows are retained. * Family delete markers are deleted. @@ -55,7 +85,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -138,7 +168,7 @@ public class TestKeepDeletes extends HBaseTestCase { } /** - * Even when the store does not keep deletes a "raw" scan will + * Even when the store does not keep deletes a "raw" scan will * return everything it can find (unless discarding cells is guaranteed * to have no effect). * Assuming this the desired behavior. Could also disallow "raw" scanning @@ -151,7 +181,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, false); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -195,7 +225,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, false); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -242,7 +272,7 @@ public class TestKeepDeletes extends HBaseTestCase { s.setRaw(true); s.setMaxVersions(); s.addColumn(c0, c0); - + try { InternalScanner scan = region.getScanner(s); fail("raw scanner with columns should have failed"); @@ -261,7 +291,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -307,7 +337,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Delete d = new Delete(T1, ts); d.deleteColumns(c0, c0, ts); @@ -320,7 +350,7 @@ public class TestKeepDeletes extends HBaseTestCase { d = new Delete(T1, ts); d.deleteColumn(c0, c0, ts+1); region.delete(d, true); - + d = new Delete(T1, ts); d.deleteColumn(c0, c0, ts+2); region.delete(d, true); @@ -349,7 +379,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); @@ -372,7 +402,7 @@ public class TestKeepDeletes extends HBaseTestCase { d = new Delete(T1, ts); d.deleteColumn(c0, c0, ts+1); region.delete(d, true); - + d = new Delete(T1, ts); d.deleteColumn(c0, c0, ts+2); region.delete(d, true); @@ -411,7 +441,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); p.add(c0, c1, T1); @@ -492,7 +522,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -502,7 +532,7 @@ public class TestKeepDeletes extends HBaseTestCase { p = new Put(T1, ts-10); p.add(c0, c1, T1); region.put(p); - + Delete d = new Delete(T1, ts); // test corner case (Put and Delete have same TS) d.deleteColumns(c0, c0, ts); @@ -511,7 +541,7 @@ public class TestKeepDeletes extends HBaseTestCase { d = new Delete(T1, ts+1); d.deleteColumn(c0, c0, ts+1); region.delete(d, true); - + d = new Delete(T1, ts+3); d.deleteColumn(c0, c0, ts+3); region.delete(d, true); @@ -527,7 +557,7 @@ public class TestKeepDeletes extends HBaseTestCase { p = new Put(T1, ts+2); p.add(c0, c0, T2); region.put(p); - + // delete, put, delete, delete, put assertEquals(3, countDeleteMarkers(region)); @@ -584,7 +614,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); @@ -634,7 +664,7 @@ public class TestKeepDeletes extends HBaseTestCase { HTableDescriptor htd = createTableDescriptor(getName(), 3, 1000, 1, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis() - 2000; // 2s in the past + long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000; // 2s in the past Put p = new Put(T1, ts); p.add(c0, c0, T3); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java index 01627da9daf..881fc4a9465 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java @@ -63,6 +63,7 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.apache.hadoop.hbase.util.ManualEnvironmentEdge; import org.apache.hadoop.util.Progressable; import org.junit.experimental.categories.Category; @@ -186,6 +187,8 @@ public class TestStore extends TestCase { public void testDeleteExpiredStoreFiles() throws Exception { int storeFileNum = 4; int ttl = 4; + IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge(); + EnvironmentEdgeManagerTestHelper.injectEdge(edge); Configuration conf = HBaseConfiguration.create(); // Enable the expired store file deletion @@ -205,7 +208,7 @@ public class TestStore extends TestCase { this.store.add(new KeyValue(row, family, qf2, timeStamp, (byte[]) null)); this.store.add(new KeyValue(row, family, qf3, timeStamp, (byte[]) null)); flush(i); - Thread.sleep(sleepTime); + edge.incrementTime(sleepTime); } // Verify the total number of store files @@ -220,8 +223,8 @@ public class TestStore extends TestCase { // If not the first compaction, there is another empty store file, assertEquals(Math.min(i, 2), cr.getFiles().size()); for (int j = 0; i < cr.getFiles().size(); j++) { - assertTrue(cr.getFiles().get(j).getReader().getMaxTimestamp() < (System - .currentTimeMillis() - this.store.scanInfo.getTtl())); + assertTrue(cr.getFiles().get(j).getReader().getMaxTimestamp() < + (EnvironmentEdgeManager.currentTimeMillis() - this.store.scanInfo.getTtl())); } // Verify that the expired store file is compacted to an empty store file. StoreFile compactedFile = this.store.compact(cr); @@ -229,7 +232,7 @@ public class TestStore extends TestCase { assertEquals(0, compactedFile.getReader().getEntries()); // Let the next store file expired. - Thread.sleep(sleepTime); + edge.incrementTime(sleepTime); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java index 747b14bf19c..e7633d31b26 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java @@ -32,6 +32,9 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType; import org.apache.hadoop.hbase.thrift.generated.Hbase; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.apache.hadoop.hbase.util.Threads; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TCompactProtocol; @@ -115,11 +118,15 @@ public class TestThriftServerCmdLine { @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); + //ensure that server time increments every time we do an operation, otherwise + //successive puts having the same timestamp will override each other + EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); + EnvironmentEdgeManager.reset(); } private void startCmdLineThread(final String[] args) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java index 6126df3d634..89450b229c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java @@ -34,7 +34,7 @@ public class TestIncrementingEnvironmentEdge { @Test public void testGetCurrentTimeUsesSystemClock() { - IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge(); + IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge(1); assertEquals(1, edge.currentTimeMillis()); assertEquals(2, edge.currentTimeMillis()); assertEquals(3, edge.currentTimeMillis());