From 17b17aed0f136c6a17341bf45f56e8119366781b Mon Sep 17 00:00:00 2001 From: Anu Engineer Date: Mon, 25 Sep 2017 13:44:51 -0700 Subject: [PATCH] HADOOP-14901. ReuseObjectMapper in Hadoop Common. Contributed by Hanisha Koneru. --- .../hadoop/crypto/key/kms/KMSClientProvider.java | 7 +++++-- .../org/apache/hadoop/ipc/DecayRpcScheduler.java | 9 +++++---- .../main/java/org/apache/hadoop/log/Log4Json.java | 5 +++-- .../apache/hadoop/metrics2/MetricsJsonBuilder.java | 14 +++++++++----- .../web/DelegationTokenAuthenticator.java | 7 +++++-- .../org/apache/hadoop/util/HttpExceptionUtils.java | 13 +++++++++---- 6 files changed, 36 insertions(+), 19 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index 026cbbfbcfb..f57472d9b17 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -43,6 +43,7 @@ import org.apache.hadoop.util.HttpExceptionUtils; import org.apache.hadoop.util.KMSUtil; import org.apache.http.client.utils.URIBuilder; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -127,6 +128,9 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, private final ValueQueue encKeyVersionQueue; + private static final ObjectWriter WRITER = + new ObjectMapper().writerWithDefaultPrettyPrinter(); + private class EncryptedQueueRefiller implements ValueQueue.QueueRefiller { @@ -282,8 +286,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, private static void writeJson(Map map, OutputStream os) throws IOException { Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8); - ObjectMapper jsonMapper = new ObjectMapper(); - jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map); + WRITER.writeValue(writer, map); } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java index fe9298f6704..79562aee9e7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java @@ -50,6 +50,7 @@ import org.apache.hadoop.metrics2.util.Metrics2Util.NameValuePair; import org.apache.hadoop.metrics2.util.Metrics2Util.TopN; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectWriter; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -128,6 +129,8 @@ public class DecayRpcScheduler implements RpcScheduler, public static final Logger LOG = LoggerFactory.getLogger(DecayRpcScheduler.class); + private static final ObjectWriter WRITER = new ObjectMapper().writer(); + // Track the decayed and raw (no decay) number of calls for each schedulable // identity from all previous decay windows: idx 0 for decayed call count and // idx 1 for the raw call count @@ -909,8 +912,7 @@ public class DecayRpcScheduler implements RpcScheduler, return "{}"; } else { try { - ObjectMapper om = new ObjectMapper(); - return om.writeValueAsString(decisions); + return WRITER.writeValueAsString(decisions); } catch (Exception e) { return "Error: " + e.getMessage(); } @@ -919,8 +921,7 @@ public class DecayRpcScheduler implements RpcScheduler, public String getCallVolumeSummary() { try { - ObjectMapper om = new ObjectMapper(); - return om.writeValueAsString(getDecayedCallCounts()); + return WRITER.writeValueAsString(getDecayedCallCounts()); } catch (Exception e) { return "Error: " + e.getMessage(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java index d998fbc3270..5ba0a9c1b24 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java @@ -28,6 +28,7 @@ import org.codehaus.jackson.JsonGenerator; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.MappingJsonFactory; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import org.codehaus.jackson.node.ContainerNode; import java.io.IOException; @@ -105,6 +106,7 @@ public class Log4Json extends Layout { * configuration it must be done in a static intializer block. */ private static final JsonFactory factory = new MappingJsonFactory(); + private static final ObjectReader READER = new ObjectMapper(factory).reader(); public static final String DATE = "date"; public static final String EXCEPTION_CLASS = "exceptionclass"; public static final String LEVEL = "level"; @@ -252,8 +254,7 @@ public class Log4Json extends Layout { * @throws IOException on any parsing problems */ public static ContainerNode parse(String json) throws IOException { - ObjectMapper mapper = new ObjectMapper(factory); - JsonNode jsonNode = mapper.readTree(json); + JsonNode jsonNode = READER.readTree(json); if (!(jsonNode instanceof ContainerNode)) { throw new IOException("Wrong JSON data: " + json); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java index 8e42909b07c..ce6fbe1d82f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java @@ -18,17 +18,18 @@ package org.apache.hadoop.metrics2; -import java.io.IOException; -import java.util.LinkedHashMap; -import java.util.Map; - import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; + /** * Build a JSON dump of the metrics. * @@ -44,6 +45,9 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder { private final MetricsCollector parent; private Map innerMetrics = new LinkedHashMap<>(); + private static final ObjectWriter WRITER = + new ObjectMapper().writer(); + /** * Build an instance. * @param parent parent collector. Unused in this instance; only used for @@ -116,7 +120,7 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder { @Override public String toString() { try { - return new ObjectMapper().writeValueAsString(innerMetrics); + return WRITER.writeValueAsString(innerMetrics); } catch (IOException e) { LOG.warn("Failed to dump to Json.", e); return ExceptionUtils.getStackTrace(e); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java index b7db5a69e20..71a911f45e5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java @@ -30,6 +30,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdenti import org.apache.hadoop.util.HttpExceptionUtils; import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -54,6 +55,9 @@ public abstract class DelegationTokenAuthenticator implements Authenticator { private static final String CONTENT_TYPE = "Content-Type"; private static final String APPLICATION_JSON_MIME = "application/json"; + private static final ObjectReader READER = + new ObjectMapper().reader(Map.class); + private static final String HTTP_GET = "GET"; private static final String HTTP_PUT = "PUT"; @@ -316,8 +320,7 @@ public abstract class DelegationTokenAuthenticator implements Authenticator { if (contentType != null && contentType.contains(APPLICATION_JSON_MIME)) { try { - ObjectMapper mapper = new ObjectMapper(); - ret = mapper.readValue(conn.getInputStream(), Map.class); + ret = READER.readValue(conn.getInputStream()); } catch (Exception ex) { throw new AuthenticationException(String.format( "'%s' did not handle the '%s' delegation token operation: %s", diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java index 7072d9a93c0..0f05ef33ae4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java @@ -20,6 +20,8 @@ package org.apache.hadoop.util; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; +import org.codehaus.jackson.map.ObjectWriter; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.MediaType; @@ -54,6 +56,11 @@ public class HttpExceptionUtils { private static final String ENTER = System.getProperty("line.separator"); + private static final ObjectReader READER = + new ObjectMapper().reader(Map.class); + private static final ObjectWriter WRITER = + new ObjectMapper().writerWithDefaultPrettyPrinter(); + /** * Creates a HTTP servlet response serializing the exception in it as JSON. * @@ -74,9 +81,8 @@ public class HttpExceptionUtils { json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName()); Map jsonResponse = new LinkedHashMap(); jsonResponse.put(ERROR_JSON, json); - ObjectMapper jsonMapper = new ObjectMapper(); Writer writer = response.getWriter(); - jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, jsonResponse); + WRITER.writeValue(writer, jsonResponse); writer.flush(); } @@ -144,8 +150,7 @@ public class HttpExceptionUtils { InputStream es = null; try { es = conn.getErrorStream(); - ObjectMapper mapper = new ObjectMapper(); - Map json = mapper.readValue(es, Map.class); + Map json = READER.readValue(es); json = (Map) json.get(ERROR_JSON); String exClass = (String) json.get(ERROR_CLASSNAME_JSON); String exMsg = (String) json.get(ERROR_MESSAGE_JSON);