diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index c727f62b9c9..2733db7f699 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -165,6 +165,9 @@ Release 2.8.0 - UNRELEASED MAPREDUCE-6473. Job submission can take a long time during Cluster initialization (Kuhu Shukla via jlowe) + MAPREDUCE-6626. Reuse ObjectMapper instance in MapReduce. + (Lin Yiqun via aajisaka) + BUG FIXES MAPREDUCE-6314. TestPipeApplication fails on trunk. diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java index 1031e38ee53..c227b8983dc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java @@ -73,6 +73,7 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.node.ArrayNode; +import org.codehaus.jackson.node.JsonNodeFactory; import org.codehaus.jackson.node.ObjectNode; import com.google.common.annotations.VisibleForTesting; @@ -84,6 +85,8 @@ import com.google.common.annotations.VisibleForTesting; */ public class JobHistoryEventHandler extends AbstractService implements EventHandler { + private static final JsonNodeFactory FACTORY = + new ObjectMapper().getNodeFactory(); private final AppContext context; private final int startCount; @@ -1041,8 +1044,7 @@ public class JobHistoryEventHandler extends AbstractService @Private public JsonNode countersToJSON(Counters counters) { - ObjectMapper mapper = new ObjectMapper(); - ArrayNode nodes = mapper.createArrayNode(); + ArrayNode nodes = FACTORY.arrayNode(); if (counters != null) { for (CounterGroup counterGroup : counters) { ObjectNode groupNode = nodes.addObject(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java index a458e2c3685..18b76a1653a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java @@ -63,6 +63,7 @@ import org.apache.hadoop.yarn.api.records.ReservationId; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import com.google.common.base.Charsets; @@ -70,6 +71,8 @@ import com.google.common.base.Charsets; @InterfaceStability.Unstable class JobSubmitter { protected static final Log LOG = LogFactory.getLog(JobSubmitter.class); + private static final ObjectReader READER = + new ObjectMapper().reader(Map.class); private static final String SHUFFLE_KEYGEN_ALGORITHM = "HmacSHA1"; private static final int SHUFFLE_KEY_LENGTH = 64; private FileSystem jtFs; @@ -396,9 +399,7 @@ class JobSubmitter { boolean json_error = false; try { // read JSON - ObjectMapper mapper = new ObjectMapper(); - Map nm = - mapper.readValue(new File(localFileName), Map.class); + Map nm = READER.readValue(new File(localFileName)); for(Map.Entry ent: nm.entrySet()) { credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()