diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 1110eabf4b1..4c00732e030 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -40,6 +40,8 @@ Release 2.7.3 - UNRELEASED HDFS-9383. TestByteArrayManager#testByteArrayManager fails. (szetszwo via kihwal) + HDFS-9072. Fix random failures in TestJMXGet (J.Andreina via kihwal) + HDFS-6481. DatanodeManager#getDatanodeStorageInfos() should check the length of storageIDs. (szetszwo via Arpit Agarwal) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java index d7e1d92d6f3..0fbc6316b7b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java @@ -27,6 +27,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.commons.io.FileUtils; +import org.apache.commons.lang.UnhandledException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -86,6 +87,7 @@ import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo; import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo.BlockStatus; import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks; import org.apache.hadoop.hdfs.tools.DFSAdmin; +import org.apache.hadoop.hdfs.tools.JMXGet; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.nativeio.NativeIO; import org.apache.hadoop.net.NetUtils; @@ -1811,4 +1813,22 @@ public class DFSTestUtil { lastBlock.setNumBytes(len); return lastBlock; } + + public static void waitForMetric(final JMXGet jmx, final String metricName, final int expectedValue) + throws TimeoutException, InterruptedException { + GenericTestUtils.waitFor(new Supplier() { + @Override + public Boolean get() { + try { + final int currentValue = Integer.parseInt(jmx.getValue(metricName)); + LOG.info("Waiting for " + metricName + + " to reach value " + expectedValue + + ", current value = " + currentValue); + return currentValue == expectedValue; + } catch (Exception e) { + throw new UnhandledException("Test failed due to unexpected exception", e); + } + } + }, 1000, Integer.MAX_VALUE); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestJMXGet.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestJMXGet.java index c69e73a0598..278fbb80dad 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestJMXGet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestJMXGet.java @@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.tools.JMXGet; @@ -114,10 +115,12 @@ public class TestJMXGet { assertTrue("error printAllValues", checkPrintAllValues(jmx)); //get some data from different source + DFSTestUtil.waitForMetric(jmx, "NumLiveDataNodes", numDatanodes); assertEquals(numDatanodes, Integer.parseInt( jmx.getValue("NumLiveDataNodes"))); assertGauge("CorruptBlocks", Long.parseLong(jmx.getValue("CorruptBlocks")), getMetrics("FSNamesystem")); + DFSTestUtil.waitForMetric(jmx, "NumOpenConnections", numDatanodes); assertEquals(numDatanodes, Integer.parseInt( jmx.getValue("NumOpenConnections"))); @@ -161,6 +164,7 @@ public class TestJMXGet { String serviceName = "DataNode"; jmx.setService(serviceName); jmx.init(); + DFSTestUtil.waitForMetric(jmx, "BytesWritten", fileSize); assertEquals(fileSize, Integer.parseInt(jmx.getValue("BytesWritten"))); cluster.shutdown();