From c68cea5b456c1949e49fc32d80ed7c9acbee14ec Mon Sep 17 00:00:00 2001 From: Enis Soztutar Date: Mon, 6 May 2013 23:12:55 +0000 Subject: [PATCH] HBASE-8478 HBASE-2231 breaks TestHRegion#testRecoveredEditsReplayCompaction under hadoop2 profile git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1479716 13f79535-47bb-0310-9956-ffa450edef68 --- .../java/org/apache/hadoop/hbase/util/FSUtils.java | 4 +++- .../hadoop/hbase/regionserver/TestHRegion.java | 14 ++++++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index fdaf9527e05..4c6235390d1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -1398,7 +1398,9 @@ public abstract class FSUtils { /** * Calls fs.listStatus() and treats FileNotFoundException as non-fatal - * This accommodates differences between hadoop versions + * This accommodates differences between hadoop versions, where hadoop 1 + * does not throw a FileNotFoundException, and return an empty FileStatus[] + * while Hadoop 2 will throw FileNotFoundException. * * @param fs file system * @param dir directory diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 57f49060906..07f02ade105 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -95,6 +95,7 @@ import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.PairOfSameType; @@ -422,8 +423,11 @@ public class TestHRegion extends HBaseTestCase { //now find the compacted file, and manually add it to the recovered edits Path tmpDir = region.getRegionFileSystem().getTempDir(); - FileStatus[] files = region.getRegionFileSystem().getFileSystem().listStatus(tmpDir); - assertEquals(1, files.length); + FileStatus[] files = FSUtils.listStatus(fs, tmpDir); + String errorMsg = "Expected to find 1 file in the region temp directory " + + "from the compaction, could not find any"; + assertNotNull(errorMsg, files); + assertEquals(errorMsg, 1, files.length); //move the file inside region dir Path newFile = region.getRegionFileSystem().commitStoreFile(Bytes.toString(family), files[0].getPath()); @@ -459,8 +463,9 @@ public class TestHRegion extends HBaseTestCase { LOG.info(sf.getPath()); } assertEquals(1, region.getStore(family).getStorefilesCount()); - files = region.getRegionFileSystem().getFileSystem().listStatus(tmpDir); - assertEquals(0, files.length); + files = FSUtils.listStatus(fs, tmpDir); + assertTrue("Expected to find 0 files inside " + tmpDir, + files == null || files.length == 0); for (long i = minSeqId; i < maxSeqId; i++) { Get get = new Get(Bytes.toBytes(i)); @@ -3176,6 +3181,7 @@ public class TestHRegion extends HBaseTestCase { ctx.addThread(new RepeatingTestThread(ctx) { private int flushesSinceCompact = 0; private final int maxFlushesSinceCompact = 20; + @Override public void doAnAction() throws Exception { if (region.flushcache()) { ++flushesSinceCompact;