HADOOP-10104. Update jackson to 1.9.13 (Akira Ajisaka via stevel)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1585932 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Steve Loughran 2014-04-09 11:11:20 +00:00
parent 1e1200d594
commit b8f250a99d
8 changed files with 21 additions and 21 deletions

View File

@ -326,6 +326,8 @@ Release 2.5.0 - UNRELEASED
HADOOP-10454. Provide FileContext version of har file system. (Kihwal Lee
via jeagles)
HADOOP-10104. Update jackson to 1.9.13 (Akira Ajisaka via stevel)
OPTIMIZATIONS
BUG FIXES

View File

@ -358,8 +358,8 @@ class ClusterJspHelper {
nn.missingBlocksCount = getProperty(props, "NumberOfMissingBlocks")
.getLongValue();
nn.httpAddress = httpAddress.toURL();
getLiveNodeCount(getProperty(props, "LiveNodes").getValueAsText(), nn);
getDeadNodeCount(getProperty(props, "DeadNodes").getValueAsText(), nn);
getLiveNodeCount(getProperty(props, "LiveNodes").asText(), nn);
getDeadNodeCount(getProperty(props, "DeadNodes").asText(), nn);
nn.softwareVersion = getProperty(props, "SoftwareVersion").getTextValue();
return nn;
}
@ -373,11 +373,11 @@ class ClusterJspHelper {
Map<String, Map<String, String>> statusMap, String props)
throws IOException, MalformedObjectNameException {
getLiveNodeStatus(statusMap, host, getProperty(props, "LiveNodes")
.getValueAsText());
.asText());
getDeadNodeStatus(statusMap, host, getProperty(props, "DeadNodes")
.getValueAsText());
.asText());
getDecommissionNodeStatus(statusMap, host,
getProperty(props, "DecomNodes").getValueAsText());
getProperty(props, "DecomNodes").asText());
}
/**

View File

@ -116,7 +116,7 @@ public class TestRMNMInfo {
Assert.assertNotNull(n.get("HostName"));
Assert.assertNotNull(n.get("Rack"));
Assert.assertTrue("Node " + n.get("NodeId") + " should be RUNNING",
n.get("State").getValueAsText().contains("RUNNING"));
n.get("State").asText().contains("RUNNING"));
Assert.assertNotNull(n.get("NodeHTTPAddress"));
Assert.assertNotNull(n.get("LastHealthUpdate"));
Assert.assertNotNull(n.get("HealthReport"));
@ -124,10 +124,10 @@ public class TestRMNMInfo {
Assert.assertNotNull(n.get("NumContainers"));
Assert.assertEquals(
n.get("NodeId") + ": Unexpected number of used containers",
0, n.get("NumContainers").getValueAsInt());
0, n.get("NumContainers").asInt());
Assert.assertEquals(
n.get("NodeId") + ": Unexpected amount of used memory",
0, n.get("UsedMemoryMB").getValueAsInt());
0, n.get("UsedMemoryMB").asInt());
Assert.assertNotNull(n.get("AvailableMemoryMB"));
}
}
@ -153,7 +153,7 @@ public class TestRMNMInfo {
Assert.assertNotNull(n.get("HostName"));
Assert.assertNotNull(n.get("Rack"));
Assert.assertTrue("Node " + n.get("NodeId") + " should be RUNNING",
n.get("State").getValueAsText().contains("RUNNING"));
n.get("State").asText().contains("RUNNING"));
Assert.assertNotNull(n.get("NodeHTTPAddress"));
Assert.assertNotNull(n.get("LastHealthUpdate"));
Assert.assertNotNull(n.get("HealthReport"));

View File

@ -631,22 +631,22 @@
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.8.8</version>
<version>1.9.13</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>1.8.8</version>
<version>1.9.13</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
<version>1.8.8</version>
<version>1.9.13</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
<version>1.8.8</version>
<version>1.9.13</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>

View File

@ -24,7 +24,7 @@ import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonProcessingException;
import org.codehaus.jackson.map.DeserializationContext;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.deser.StdDeserializer;
import org.codehaus.jackson.map.deser.std.StdDeserializer;
import org.codehaus.jackson.node.ObjectNode;
/**

View File

@ -120,7 +120,7 @@ public class RumenToSLSConverter {
Writer output = new FileWriter(outputFile);
try {
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.defaultPrettyPrintingWriter();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
Iterator<Map> i = mapper.readValues(
new JsonFactory().createJsonParser(input), Map.class);
while (i.hasNext()) {
@ -141,7 +141,7 @@ public class RumenToSLSConverter {
Writer output = new FileWriter(outputFile);
try {
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.defaultPrettyPrintingWriter();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
Map rack = new LinkedHashMap();
rack.put("rack", entry.getKey());

View File

@ -43,8 +43,7 @@ public class TimelineUtils {
mapper = new ObjectMapper();
AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
mapper.setAnnotationIntrospector(introspector);
mapper.getSerializationConfig()
.setSerializationInclusion(Inclusion.NON_NULL);
mapper.setSerializationInclusion(Inclusion.NON_NULL);
}
/**
@ -77,7 +76,7 @@ public class TimelineUtils {
public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
throws JsonGenerationException, JsonMappingException, IOException {
if (pretty) {
return mapper.defaultPrettyPrintingWriter().writeValueAsString(o);
return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(o);
} else {
return mapper.writeValueAsString(o);
}

View File

@ -51,8 +51,7 @@ public class YarnJacksonJaxbJsonProvider extends JacksonJaxbJsonProvider {
ObjectMapper mapper = super.locateMapper(type, mediaType);
AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
mapper.setAnnotationIntrospector(introspector);
mapper.getSerializationConfig()
.setSerializationInclusion(Inclusion.NON_NULL);
mapper.setSerializationInclusion(Inclusion.NON_NULL);
return mapper;
}
}