diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java index ac25da3fbdd..1a1fc16215a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRedudantBlocks.java @@ -35,8 +35,10 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; +import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoStriped; import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset; import org.apache.hadoop.hdfs.util.StripedBlockUtil; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -108,18 +110,26 @@ public class TestRedudantBlocks { blk.setBlockId(groupId + 2); cluster.injectBlocks(i, Arrays.asList(blk), bpid); + BlockInfoStriped blockInfo = + (BlockInfoStriped)cluster.getNamesystem().getBlockManager() + .getStoredBlock(new Block(groupId)); // update blocksMap cluster.triggerBlockReports(); // delete redundant block cluster.triggerHeartbeats(); //wait for IBR - Thread.sleep(1100); + GenericTestUtils.waitFor( + () -> cluster.getNamesystem().getBlockManager() + .countNodes(blockInfo).liveReplicas() >= groupSize -1, + 500, 10000); // trigger reconstruction cluster.triggerHeartbeats(); - //wait for IBR - Thread.sleep(1100); + GenericTestUtils.waitFor( + () -> cluster.getNamesystem().getBlockManager() + .countNodes(blockInfo).liveReplicas() >= groupSize, + 500, 10000); HashSet blockIdsSet = new HashSet();