svn merge -c 1337363 FIXES: MAPREDUCE-4233. NPE can happen in RMNMNodeInfo. (bobby)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1337364 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Joseph Evans 2012-05-11 20:27:58 +00:00
parent 4f5a189765
commit 5922f4f064
3 changed files with 49 additions and 6 deletions

View File

@ -372,6 +372,8 @@ Release 0.23.3 - UNRELEASED
MAPREDUCE-4237. TestNodeStatusUpdater can fail if localhost has a domain MAPREDUCE-4237. TestNodeStatusUpdater can fail if localhost has a domain
associated with it (bobby) associated with it (bobby)
MAPREDUCE-4233. NPE can happen in RMNMNodeInfo. (bobby)
Release 0.23.2 - UNRELEASED Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.v2;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -28,8 +30,11 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.server.resourcemanager.MockNodes;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.RMNMInfo; import org.apache.hadoop.yarn.server.resourcemanager.RMNMInfo;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
@ -37,6 +42,7 @@ import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import static org.mockito.Mockito.*;
public class TestRMNMInfo { public class TestRMNMInfo {
private static final Log LOG = LogFactory.getLog(TestRMNMInfo.class); private static final Log LOG = LogFactory.getLog(TestRMNMInfo.class);
@ -116,14 +122,47 @@ public class TestRMNMInfo {
n.get("HealthStatus").getValueAsText().contains("Healthy")); n.get("HealthStatus").getValueAsText().contains("Healthy"));
Assert.assertNotNull(n.get("LastHealthUpdate")); Assert.assertNotNull(n.get("LastHealthUpdate"));
Assert.assertNotNull(n.get("HealthReport")); Assert.assertNotNull(n.get("HealthReport"));
Assert.assertNotNull(n.get("NumContainersMB")); Assert.assertNotNull(n.get("NumContainers"));
Assert.assertEquals( Assert.assertEquals(
n.get("NodeId") + ": Unexpected number of used containers", n.get("NodeId") + ": Unexpected number of used containers",
0, n.get("NumContainersMB").getValueAsInt()); 0, n.get("NumContainers").getValueAsInt());
Assert.assertEquals( Assert.assertEquals(
n.get("NodeId") + ": Unexpected amount of used memory", n.get("NodeId") + ": Unexpected amount of used memory",
0, n.get("UsedMemoryMB").getValueAsInt()); 0, n.get("UsedMemoryMB").getValueAsInt());
Assert.assertNotNull(n.get("AvailableMemoryMB")); Assert.assertNotNull(n.get("AvailableMemoryMB"));
} }
} }
@Test
public void testRMNMInfoMissmatch() throws Exception {
RMContext rmc = mock(RMContext.class);
ResourceScheduler rms = mock(ResourceScheduler.class);
ConcurrentMap<NodeId, RMNode> map = new ConcurrentHashMap<NodeId, RMNode>();
RMNode node = MockNodes.newNodeInfo(1, MockNodes.newResource(4 * 1024));
map.put(node.getNodeID(), node);
when(rmc.getRMNodes()).thenReturn(map);
RMNMInfo rmInfo = new RMNMInfo(rmc,rms);
String liveNMs = rmInfo.getLiveNodeManagers();
ObjectMapper mapper = new ObjectMapper();
JsonNode jn = mapper.readTree(liveNMs);
Assert.assertEquals("Unexpected number of live nodes:",
1, jn.size());
Iterator<JsonNode> it = jn.iterator();
while (it.hasNext()) {
JsonNode n = it.next();
Assert.assertNotNull(n.get("HostName"));
Assert.assertNotNull(n.get("Rack"));
Assert.assertTrue("Node " + n.get("NodeId") + " should be RUNNING",
n.get("State").getValueAsText().contains("RUNNING"));
Assert.assertNotNull(n.get("NodeHTTPAddress"));
Assert.assertTrue("Node " + n.get("NodeId") + " should be Healthy",
n.get("HealthStatus").getValueAsText().contains("Healthy"));
Assert.assertNotNull(n.get("LastHealthUpdate"));
Assert.assertNotNull(n.get("HealthReport"));
Assert.assertNull(n.get("NumContainers"));
Assert.assertNull(n.get("UsedMemoryMB"));
Assert.assertNull(n.get("AvailableMemoryMB"));
}
}
} }

View File

@ -93,10 +93,12 @@ public class RMNMInfo implements RMNMInfoBeans {
ni.getNodeHealthStatus().getLastHealthReportTime()); ni.getNodeHealthStatus().getLastHealthReportTime());
info.put("HealthReport", info.put("HealthReport",
ni.getNodeHealthStatus().getHealthReport()); ni.getNodeHealthStatus().getHealthReport());
info.put("NumContainersMB", report.getNumContainers()); if(report != null) {
info.put("NumContainers", report.getNumContainers());
info.put("UsedMemoryMB", report.getUsedResource().getMemory()); info.put("UsedMemoryMB", report.getUsedResource().getMemory());
info.put("AvailableMemoryMB", info.put("AvailableMemoryMB",
report.getAvailableResource().getMemory()); report.getAvailableResource().getMemory());
}
nodesInfo.add(info); nodesInfo.add(info);
} }