MAPREDUCE-6626. Reuse ObjectMapper instance in MapReduce. Contributed by Lin Yiqun.

This commit is contained in:
Akira Ajisaka 2016-02-10 03:03:49 +09:00
parent fba6e9f5ef
commit a0b1f10a30
3 changed files with 11 additions and 5 deletions

View File

@ -479,6 +479,9 @@ Release 2.8.0 - UNRELEASED
MAPREDUCE-6473. Job submission can take a long time during Cluster MAPREDUCE-6473. Job submission can take a long time during Cluster
initialization (Kuhu Shukla via jlowe) initialization (Kuhu Shukla via jlowe)
MAPREDUCE-6626. Reuse ObjectMapper instance in MapReduce.
(Lin Yiqun via aajisaka)
BUG FIXES BUG FIXES
MAPREDUCE-6314. TestPipeApplication fails on trunk. MAPREDUCE-6314. TestPipeApplication fails on trunk.

View File

@ -73,6 +73,7 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ArrayNode; import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.JsonNodeFactory;
import org.codehaus.jackson.node.ObjectNode; import org.codehaus.jackson.node.ObjectNode;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@ -84,6 +85,8 @@ import com.google.common.annotations.VisibleForTesting;
*/ */
public class JobHistoryEventHandler extends AbstractService public class JobHistoryEventHandler extends AbstractService
implements EventHandler<JobHistoryEvent> { implements EventHandler<JobHistoryEvent> {
private static final JsonNodeFactory FACTORY =
new ObjectMapper().getNodeFactory();
private final AppContext context; private final AppContext context;
private final int startCount; private final int startCount;
@ -1040,8 +1043,7 @@ public class JobHistoryEventHandler extends AbstractService
@Private @Private
public JsonNode countersToJSON(Counters counters) { public JsonNode countersToJSON(Counters counters) {
ObjectMapper mapper = new ObjectMapper(); ArrayNode nodes = FACTORY.arrayNode();
ArrayNode nodes = mapper.createArrayNode();
if (counters != null) { if (counters != null) {
for (CounterGroup counterGroup : counters) { for (CounterGroup counterGroup : counters) {
ObjectNode groupNode = nodes.addObject(); ObjectNode groupNode = nodes.addObject();

View File

@ -63,6 +63,7 @@ import org.apache.hadoop.yarn.api.records.ReservationId;
import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
@ -70,6 +71,8 @@ import com.google.common.base.Charsets;
@InterfaceStability.Unstable @InterfaceStability.Unstable
class JobSubmitter { class JobSubmitter {
protected static final Log LOG = LogFactory.getLog(JobSubmitter.class); protected static final Log LOG = LogFactory.getLog(JobSubmitter.class);
private static final ObjectReader READER =
new ObjectMapper().reader(Map.class);
private static final String SHUFFLE_KEYGEN_ALGORITHM = "HmacSHA1"; private static final String SHUFFLE_KEYGEN_ALGORITHM = "HmacSHA1";
private static final int SHUFFLE_KEY_LENGTH = 64; private static final int SHUFFLE_KEY_LENGTH = 64;
private FileSystem jtFs; private FileSystem jtFs;
@ -396,9 +399,7 @@ class JobSubmitter {
boolean json_error = false; boolean json_error = false;
try { try {
// read JSON // read JSON
ObjectMapper mapper = new ObjectMapper(); Map<String, String> nm = READER.readValue(new File(localFileName));
Map<String, String> nm =
mapper.readValue(new File(localFileName), Map.class);
for(Map.Entry<String, String> ent: nm.entrySet()) { for(Map.Entry<String, String> ent: nm.entrySet()) {
credentials.addSecretKey(new Text(ent.getKey()), ent.getValue() credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()