HDFS-9072. Fix random failures in TestJMXGet. Contributed by J.Andreina.
This commit is contained in:
parent
3d88293dda
commit
2f031830e8
|
@ -40,6 +40,8 @@ Release 2.7.3 - UNRELEASED
|
|||
HDFS-9383. TestByteArrayManager#testByteArrayManager fails.
|
||||
(szetszwo via kihwal)
|
||||
|
||||
HDFS-9072. Fix random failures in TestJMXGet (J.Andreina via kihwal)
|
||||
|
||||
HDFS-6481. DatanodeManager#getDatanodeStorageInfos() should check the
|
||||
length of storageIDs. (szetszwo via Arpit Agarwal)
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import com.google.common.collect.Lists;
|
|||
import com.google.common.collect.Maps;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.UnhandledException;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -86,6 +87,7 @@ import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo;
|
|||
import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo.BlockStatus;
|
||||
import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
|
||||
import org.apache.hadoop.hdfs.tools.DFSAdmin;
|
||||
import org.apache.hadoop.hdfs.tools.JMXGet;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
|
@ -1811,4 +1813,22 @@ public class DFSTestUtil {
|
|||
lastBlock.setNumBytes(len);
|
||||
return lastBlock;
|
||||
}
|
||||
|
||||
public static void waitForMetric(final JMXGet jmx, final String metricName, final int expectedValue)
|
||||
throws TimeoutException, InterruptedException {
|
||||
GenericTestUtils.waitFor(new Supplier<Boolean>() {
|
||||
@Override
|
||||
public Boolean get() {
|
||||
try {
|
||||
final int currentValue = Integer.parseInt(jmx.getValue(metricName));
|
||||
LOG.info("Waiting for " + metricName +
|
||||
" to reach value " + expectedValue +
|
||||
", current value = " + currentValue);
|
||||
return currentValue == expectedValue;
|
||||
} catch (Exception e) {
|
||||
throw new UnhandledException("Test failed due to unexpected exception", e);
|
||||
}
|
||||
}
|
||||
}, 1000, Integer.MAX_VALUE);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
|
|||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.hdfs.tools.JMXGet;
|
||||
|
@ -114,10 +115,12 @@ public class TestJMXGet {
|
|||
assertTrue("error printAllValues", checkPrintAllValues(jmx));
|
||||
|
||||
//get some data from different source
|
||||
DFSTestUtil.waitForMetric(jmx, "NumLiveDataNodes", numDatanodes);
|
||||
assertEquals(numDatanodes, Integer.parseInt(
|
||||
jmx.getValue("NumLiveDataNodes")));
|
||||
assertGauge("CorruptBlocks", Long.parseLong(jmx.getValue("CorruptBlocks")),
|
||||
getMetrics("FSNamesystem"));
|
||||
DFSTestUtil.waitForMetric(jmx, "NumOpenConnections", numDatanodes);
|
||||
assertEquals(numDatanodes, Integer.parseInt(
|
||||
jmx.getValue("NumOpenConnections")));
|
||||
|
||||
|
@ -161,6 +164,7 @@ public class TestJMXGet {
|
|||
String serviceName = "DataNode";
|
||||
jmx.setService(serviceName);
|
||||
jmx.init();
|
||||
DFSTestUtil.waitForMetric(jmx, "BytesWritten", fileSize);
|
||||
assertEquals(fileSize, Integer.parseInt(jmx.getValue("BytesWritten")));
|
||||
|
||||
cluster.shutdown();
|
||||
|
|
Loading…
Reference in New Issue