diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java index 915534487da..ce7cfd72930 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java @@ -130,17 +130,33 @@ public void shutDownCluster() throws Exception { public Timeout timeout = new Timeout(300000); protected final LocatedBlocks ensureFileReplicasOnStorageType( - Path path, StorageType storageType) throws IOException { + final Path path, final StorageType storageType) + throws IOException, TimeoutException, InterruptedException { // Ensure that returned block locations returned are correct! LOG.info("Ensure path: " + path + " is on StorageType: " + storageType); assertThat(fs.exists(path), is(true)); - long fileLength = client.getFileInfo(path.toString()).getLen(); - LocatedBlocks locatedBlocks = - client.getLocatedBlocks(path.toString(), 0, fileLength); - for (LocatedBlock locatedBlock : locatedBlocks.getLocatedBlocks()) { - assertThat(locatedBlock.getStorageTypes()[0], is(storageType)); - } - return locatedBlocks; + final long fileLength = client.getFileInfo(path.toString()).getLen(); + + GenericTestUtils.waitFor(new Supplier() { + @Override + public Boolean get() { + try { + LocatedBlocks locatedBlocks = + client.getLocatedBlocks(path.toString(), 0, fileLength); + for (LocatedBlock locatedBlock : locatedBlocks.getLocatedBlocks()) { + if (locatedBlock.getStorageTypes()[0] != storageType) { + return false; + } + } + return true; + } catch (IOException ioe) { + LOG.warn("Exception got in ensureFileReplicasOnStorageType()", ioe); + return false; + } + } + }, 100, 30 * 1000); + + return client.getLocatedBlocks(path.toString(), 0, fileLength); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java index 950e9dc314a..8c435923e77 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java @@ -29,6 +29,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import static org.apache.hadoop.fs.StorageType.RAM_DISK; @@ -89,7 +90,7 @@ public void testTruncateIsDenied() throws IOException { */ @Test public void testCorruptFilesAreDiscarded() - throws IOException, InterruptedException { + throws IOException, InterruptedException, TimeoutException { getClusterBuilder().setRamDiskReplicaCapacity(2).build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path1 = new Path("/" + METHOD_NAME + ".01.dat"); @@ -123,7 +124,7 @@ public void testCorruptFilesAreDiscarded() @Test public void testDisableLazyPersistFileScrubber() - throws IOException, InterruptedException { + throws IOException, InterruptedException, TimeoutException { getClusterBuilder().setRamDiskReplicaCapacity(2).disableScrubber().build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path1 = new Path("/" + METHOD_NAME + ".01.dat"); @@ -151,8 +152,8 @@ public void testDisableLazyPersistFileScrubber() * If NN restarted then lazyPersist files should not deleted */ @Test - public void testFileShouldNotDiscardedIfNNRestarted() throws IOException, - InterruptedException { + public void testFileShouldNotDiscardedIfNNRestarted() + throws IOException, InterruptedException, TimeoutException { getClusterBuilder().setRamDiskReplicaCapacity(2).build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path1 = new Path("/" + METHOD_NAME + ".01.dat"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistLockedMemory.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistLockedMemory.java index eef8f0bbf71..d154b1fcad6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistLockedMemory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistLockedMemory.java @@ -53,7 +53,8 @@ public class TestLazyPersistLockedMemory extends LazyPersistTestCase { * fall back to disk. */ @Test - public void testWithNoLockedMemory() throws IOException { + public void testWithNoLockedMemory() + throws IOException, TimeoutException, InterruptedException { getClusterBuilder().setNumDatanodes(1) .setMaxLockedMemory(0).build(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistReplicaPlacement.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistReplicaPlacement.java index c89475aee8b..c16dbe5604d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistReplicaPlacement.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistReplicaPlacement.java @@ -26,6 +26,7 @@ import org.junit.Test; import java.io.IOException; +import java.util.concurrent.TimeoutException; import static org.apache.hadoop.fs.StorageType.DEFAULT; import static org.apache.hadoop.fs.StorageType.RAM_DISK; @@ -35,7 +36,8 @@ public class TestLazyPersistReplicaPlacement extends LazyPersistTestCase { @Test - public void testPlacementOnRamDisk() throws IOException { + public void testPlacementOnRamDisk() + throws IOException, TimeoutException, InterruptedException { getClusterBuilder().build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path = new Path("/" + METHOD_NAME + ".dat"); @@ -45,7 +47,8 @@ public void testPlacementOnRamDisk() throws IOException { } @Test - public void testPlacementOnSizeLimitedRamDisk() throws IOException { + public void testPlacementOnSizeLimitedRamDisk() + throws IOException, TimeoutException, InterruptedException { getClusterBuilder().setRamDiskReplicaCapacity(3).build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path1 = new Path("/" + METHOD_NAME + ".01.dat"); @@ -64,7 +67,8 @@ public void testPlacementOnSizeLimitedRamDisk() throws IOException { * @throws IOException */ @Test - public void testFallbackToDisk() throws IOException { + public void testFallbackToDisk() + throws IOException, TimeoutException, InterruptedException { getClusterBuilder().setHasTransientStorage(false).build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); Path path = new Path("/" + METHOD_NAME + ".dat"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistReplicaRecovery.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistReplicaRecovery.java index 231353ad945..537f9e8d621 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistReplicaRecovery.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistReplicaRecovery.java @@ -23,6 +23,7 @@ import org.junit.Test; import java.io.IOException; +import java.util.concurrent.TimeoutException; import static org.apache.hadoop.fs.StorageType.DEFAULT; import static org.apache.hadoop.fs.StorageType.RAM_DISK; @@ -30,7 +31,7 @@ public class TestLazyPersistReplicaRecovery extends LazyPersistTestCase { @Test public void testDnRestartWithSavedReplicas() - throws IOException, InterruptedException { + throws IOException, InterruptedException, TimeoutException { getClusterBuilder().build(); final String METHOD_NAME = GenericTestUtils.getMethodName(); @@ -55,7 +56,7 @@ public void testDnRestartWithSavedReplicas() @Test public void testDnRestartWithUnsavedReplicas() - throws IOException, InterruptedException { + throws IOException, InterruptedException, TimeoutException { getClusterBuilder().build(); FsDatasetTestUtil.stopLazyWriter(cluster.getDataNodes().get(0));