HDFS-1386. TestJMXGet fails in jdk7 (jeagles)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1543613 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Turner Eagles 2013-11-19 22:47:42 +00:00
parent b98b50b46c
commit dd889fc773
6 changed files with 52 additions and 11 deletions

View File

@ -125,6 +125,8 @@ Release 2.3.0 - UNRELEASED
HDFS-5073. TestListCorruptFileBlocks fails intermittently. (Arpit Agarwal) HDFS-5073. TestListCorruptFileBlocks fails intermittently. (Arpit Agarwal)
HDFS-1386. TestJMXGet fails in jdk7 (jeagles)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn) HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn)

View File

@ -24,6 +24,8 @@ import java.net.InetSocketAddress;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import javax.management.ObjectName;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -61,6 +63,7 @@ public class JournalNode implements Tool, Configurable, JournalNodeMXBean {
private JournalNodeRpcServer rpcServer; private JournalNodeRpcServer rpcServer;
private JournalNodeHttpServer httpServer; private JournalNodeHttpServer httpServer;
private Map<String, Journal> journalsById = Maps.newHashMap(); private Map<String, Journal> journalsById = Maps.newHashMap();
private ObjectName journalNodeInfoBeanName;
private File localDir; private File localDir;
@ -181,6 +184,11 @@ public class JournalNode implements Tool, Configurable, JournalNodeMXBean {
for (Journal j : journalsById.values()) { for (Journal j : journalsById.values()) {
IOUtils.cleanup(LOG, j); IOUtils.cleanup(LOG, j);
} }
if (journalNodeInfoBeanName != null) {
MBeans.unregister(journalNodeInfoBeanName);
journalNodeInfoBeanName = null;
}
} }
/** /**
@ -256,7 +264,7 @@ public class JournalNode implements Tool, Configurable, JournalNodeMXBean {
* Register JournalNodeMXBean * Register JournalNodeMXBean
*/ */
private void registerJNMXBean() { private void registerJNMXBean() {
MBeans.register("JournalNode", "JournalNodeInfo", this); journalNodeInfoBeanName = MBeans.register("JournalNode", "JournalNodeInfo", this);
} }
private class ErrorReporter implements StorageErrorReporter { private class ErrorReporter implements StorageErrorReporter {

View File

@ -96,6 +96,8 @@ import java.security.PrivilegedExceptionAction;
import java.util.*; import java.util.*;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import javax.management.ObjectName;
import static org.apache.hadoop.hdfs.DFSConfigKeys.*; import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
import static org.apache.hadoop.util.ExitUtil.terminate; import static org.apache.hadoop.util.ExitUtil.terminate;
@ -210,6 +212,7 @@ public class DataNode extends Configured
private boolean connectToDnViaHostname; private boolean connectToDnViaHostname;
ReadaheadPool readaheadPool; ReadaheadPool readaheadPool;
private final boolean getHdfsBlockLocationsEnabled; private final boolean getHdfsBlockLocationsEnabled;
private ObjectName dataNodeInfoBeanName;
/** /**
* Create the DataNode given a configuration and an array of dataDirs. * Create the DataNode given a configuration and an array of dataDirs.
@ -861,7 +864,7 @@ public class DataNode extends Configured
} }
private void registerMXBean() { private void registerMXBean() {
MBeans.register("DataNode", "DataNodeInfo", this); dataNodeInfoBeanName = MBeans.register("DataNode", "DataNodeInfo", this);
} }
@VisibleForTesting @VisibleForTesting
@ -1218,6 +1221,10 @@ public class DataNode extends Configured
if (metrics != null) { if (metrics != null) {
metrics.shutdown(); metrics.shutdown();
} }
if (dataNodeInfoBeanName != null) {
MBeans.unregister(dataNodeInfoBeanName);
dataNodeInfoBeanName = null;
}
} }

View File

@ -5479,6 +5479,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
} }
private ObjectName mbeanName; private ObjectName mbeanName;
private ObjectName mxbeanName;
/** /**
* Register the FSNamesystem MBean using the name * Register the FSNamesystem MBean using the name
@ -5502,6 +5503,11 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
void shutdown() { void shutdown() {
if (mbeanName != null) { if (mbeanName != null) {
MBeans.unregister(mbeanName); MBeans.unregister(mbeanName);
mbeanName = null;
}
if (mxbeanName != null) {
MBeans.unregister(mxbeanName);
mxbeanName = null;
} }
if (dir != null) { if (dir != null) {
dir.shutdown(); dir.shutdown();
@ -6315,7 +6321,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
* Register NameNodeMXBean * Register NameNodeMXBean
*/ */
private void registerMXBean() { private void registerMXBean() {
MBeans.register("NameNode", "NameNodeInfo", this); mxbeanName = MBeans.register("NameNode", "NameNodeInfo", this);
} }
/** /**

View File

@ -28,6 +28,9 @@ import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import javax.management.ObjectName;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
@ -263,6 +266,7 @@ public class NameNode implements NameNodeStatusMXBean {
private NameNodeRpcServer rpcServer; private NameNodeRpcServer rpcServer;
private JvmPauseMonitor pauseMonitor; private JvmPauseMonitor pauseMonitor;
private ObjectName nameNodeStatusBeanName;
/** Format a new filesystem. Destroys any filesystem that may already /** Format a new filesystem. Destroys any filesystem that may already
* exist at this location. **/ * exist at this location. **/
@ -742,6 +746,10 @@ public class NameNode implements NameNodeStatusMXBean {
if (namesystem != null) { if (namesystem != null) {
namesystem.shutdown(); namesystem.shutdown();
} }
if (nameNodeStatusBeanName != null) {
MBeans.unregister(nameNodeStatusBeanName);
nameNodeStatusBeanName = null;
}
} }
} }
@ -1407,7 +1415,7 @@ public class NameNode implements NameNodeStatusMXBean {
* Register NameNodeStatusMXBean * Register NameNodeStatusMXBean
*/ */
private void registerNNSMXBean() { private void registerNNSMXBean() {
MBeans.register("NameNode", "NameNodeStatus", this); nameNodeStatusBeanName = MBeans.register("NameNode", "NameNodeStatus", this);
} }
@Override // NameNodeStatusMXBean @Override // NameNodeStatusMXBean

View File

@ -28,7 +28,12 @@ import java.io.IOException;
import java.io.PipedInputStream; import java.io.PipedInputStream;
import java.io.PipedOutputStream; import java.io.PipedOutputStream;
import java.io.PrintStream; import java.io.PrintStream;
import java.lang.management.ManagementFactory;
import java.util.Random; import java.util.Random;
import java.util.Set;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
@ -103,9 +108,8 @@ public class TestJMXGet {
writeFile(cluster.getFileSystem(), new Path("/test1"), 2); writeFile(cluster.getFileSystem(), new Path("/test1"), 2);
JMXGet jmx = new JMXGet(); JMXGet jmx = new JMXGet();
//jmx.setService("*"); // list all hadoop services String serviceName = "NameNode";
//jmx.init(); jmx.setService(serviceName);
//jmx = new JMXGet();
jmx.init(); // default lists namenode mbeans only jmx.init(); // default lists namenode mbeans only
assertTrue("error printAllValues", checkPrintAllValues(jmx)); assertTrue("error printAllValues", checkPrintAllValues(jmx));
@ -118,6 +122,10 @@ public class TestJMXGet {
jmx.getValue("NumOpenConnections"))); jmx.getValue("NumOpenConnections")));
cluster.shutdown(); cluster.shutdown();
MBeanServerConnection mbsc = ManagementFactory.getPlatformMBeanServer();
ObjectName query = new ObjectName("Hadoop:service=" + serviceName + ",*");
Set<ObjectName> names = mbsc.queryNames(query, null);
assertTrue("No beans should be registered for " + serviceName, names.isEmpty());
} }
private static boolean checkPrintAllValues(JMXGet jmx) throws Exception { private static boolean checkPrintAllValues(JMXGet jmx) throws Exception {
@ -150,13 +158,15 @@ public class TestJMXGet {
writeFile(cluster.getFileSystem(), new Path("/test"), 2); writeFile(cluster.getFileSystem(), new Path("/test"), 2);
JMXGet jmx = new JMXGet(); JMXGet jmx = new JMXGet();
//jmx.setService("*"); // list all hadoop services String serviceName = "DataNode";
//jmx.init(); jmx.setService(serviceName);
//jmx = new JMXGet();
jmx.setService("DataNode");
jmx.init(); jmx.init();
assertEquals(fileSize, Integer.parseInt(jmx.getValue("BytesWritten"))); assertEquals(fileSize, Integer.parseInt(jmx.getValue("BytesWritten")));
cluster.shutdown(); cluster.shutdown();
MBeanServerConnection mbsc = ManagementFactory.getPlatformMBeanServer();
ObjectName query = new ObjectName("Hadoop:service=" + serviceName + ",*");
Set<ObjectName> names = mbsc.queryNames(query, null);
assertTrue("No beans should be registered for " + serviceName, names.isEmpty());
} }
} }