HADOOP-14901. ReuseObjectMapper in Hadoop Common. Contributed by Hanisha Koneru.

This commit is contained in:
Anu Engineer 2017-09-22 15:45:22 -07:00
parent 164a063e74
commit e1b32e0959
6 changed files with 36 additions and 19 deletions

View File

@ -77,6 +77,7 @@
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Strings; import com.google.common.base.Strings;
@ -133,6 +134,9 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
private final ValueQueue<EncryptedKeyVersion> encKeyVersionQueue; private final ValueQueue<EncryptedKeyVersion> encKeyVersionQueue;
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private class EncryptedQueueRefiller implements private class EncryptedQueueRefiller implements
ValueQueue.QueueRefiller<EncryptedKeyVersion> { ValueQueue.QueueRefiller<EncryptedKeyVersion> {
@ -226,8 +230,7 @@ public KMSEncryptedKeyVersion(String keyName, String keyVersionName,
private static void writeJson(Object obj, OutputStream os) private static void writeJson(Object obj, OutputStream os)
throws IOException { throws IOException {
Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8); Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
ObjectMapper jsonMapper = new ObjectMapper(); WRITER.writeValue(writer, obj);
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, obj);
} }
/** /**

View File

@ -36,6 +36,7 @@
import javax.management.ObjectName; import javax.management.ObjectName;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.AtomicDoubleArray; import com.google.common.util.concurrent.AtomicDoubleArray;
import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang.exception.ExceptionUtils;
@ -128,6 +129,8 @@ public class DecayRpcScheduler implements RpcScheduler,
public static final Logger LOG = public static final Logger LOG =
LoggerFactory.getLogger(DecayRpcScheduler.class); LoggerFactory.getLogger(DecayRpcScheduler.class);
private static final ObjectWriter WRITER = new ObjectMapper().writer();
// Track the decayed and raw (no decay) number of calls for each schedulable // Track the decayed and raw (no decay) number of calls for each schedulable
// identity from all previous decay windows: idx 0 for decayed call count and // identity from all previous decay windows: idx 0 for decayed call count and
// idx 1 for the raw call count // idx 1 for the raw call count
@ -909,8 +912,7 @@ public String getSchedulingDecisionSummary() {
return "{}"; return "{}";
} else { } else {
try { try {
ObjectMapper om = new ObjectMapper(); return WRITER.writeValueAsString(decisions);
return om.writeValueAsString(decisions);
} catch (Exception e) { } catch (Exception e) {
return "Error: " + e.getMessage(); return "Error: " + e.getMessage();
} }
@ -919,8 +921,7 @@ public String getSchedulingDecisionSummary() {
public String getCallVolumeSummary() { public String getCallVolumeSummary() {
try { try {
ObjectMapper om = new ObjectMapper(); return WRITER.writeValueAsString(getDecayedCallCounts());
return om.writeValueAsString(getDecayedCallCounts());
} catch (Exception e) { } catch (Exception e) {
return "Error: " + e.getMessage(); return "Error: " + e.getMessage();
} }

View File

@ -24,6 +24,7 @@
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.MappingJsonFactory; import com.fasterxml.jackson.databind.MappingJsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.node.ContainerNode; import com.fasterxml.jackson.databind.node.ContainerNode;
import org.apache.log4j.Layout; import org.apache.log4j.Layout;
import org.apache.log4j.helpers.ISO8601DateFormat; import org.apache.log4j.helpers.ISO8601DateFormat;
@ -105,6 +106,7 @@ public class Log4Json extends Layout {
* configuration it must be done in a static intializer block. * configuration it must be done in a static intializer block.
*/ */
private static final JsonFactory factory = new MappingJsonFactory(); private static final JsonFactory factory = new MappingJsonFactory();
private static final ObjectReader READER = new ObjectMapper(factory).reader();
public static final String DATE = "date"; public static final String DATE = "date";
public static final String EXCEPTION_CLASS = "exceptionclass"; public static final String EXCEPTION_CLASS = "exceptionclass";
public static final String LEVEL = "level"; public static final String LEVEL = "level";
@ -252,8 +254,7 @@ public void activateOptions() {
* @throws IOException on any parsing problems * @throws IOException on any parsing problems
*/ */
public static ContainerNode parse(String json) throws IOException { public static ContainerNode parse(String json) throws IOException {
ObjectMapper mapper = new ObjectMapper(factory); JsonNode jsonNode = READER.readTree(json);
JsonNode jsonNode = mapper.readTree(json);
if (!(jsonNode instanceof ContainerNode)) { if (!(jsonNode instanceof ContainerNode)) {
throw new IOException("Wrong JSON data: " + json); throw new IOException("Wrong JSON data: " + json);
} }

View File

@ -18,17 +18,18 @@
package org.apache.hadoop.metrics2; package org.apache.hadoop.metrics2;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
/** /**
* Build a JSON dump of the metrics. * Build a JSON dump of the metrics.
* *
@ -44,6 +45,9 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
private final MetricsCollector parent; private final MetricsCollector parent;
private Map<String, Object> innerMetrics = new LinkedHashMap<>(); private Map<String, Object> innerMetrics = new LinkedHashMap<>();
private static final ObjectWriter WRITER =
new ObjectMapper().writer();
/** /**
* Build an instance. * Build an instance.
* @param parent parent collector. Unused in this instance; only used for * @param parent parent collector. Unused in this instance; only used for
@ -116,7 +120,7 @@ public MetricsCollector parent() {
@Override @Override
public String toString() { public String toString() {
try { try {
return new ObjectMapper().writeValueAsString(innerMetrics); return WRITER.writeValueAsString(innerMetrics);
} catch (IOException e) { } catch (IOException e) {
LOG.warn("Failed to dump to Json.", e); LOG.warn("Failed to dump to Json.", e);
return ExceptionUtils.getStackTrace(e); return ExceptionUtils.getStackTrace(e);

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.security.token.delegation.web; package org.apache.hadoop.security.token.delegation.web;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
@ -54,6 +55,9 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
private static final String CONTENT_TYPE = "Content-Type"; private static final String CONTENT_TYPE = "Content-Type";
private static final String APPLICATION_JSON_MIME = "application/json"; private static final String APPLICATION_JSON_MIME = "application/json";
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
private static final String HTTP_GET = "GET"; private static final String HTTP_GET = "GET";
private static final String HTTP_PUT = "PUT"; private static final String HTTP_PUT = "PUT";
@ -316,8 +320,7 @@ private Map doDelegationTokenOperation(URL url,
if (contentType != null && if (contentType != null &&
contentType.contains(APPLICATION_JSON_MIME)) { contentType.contains(APPLICATION_JSON_MIME)) {
try { try {
ObjectMapper mapper = new ObjectMapper(); ret = READER.readValue(conn.getInputStream());
ret = mapper.readValue(conn.getInputStream(), Map.class);
} catch (Exception ex) { } catch (Exception ex) {
throw new AuthenticationException(String.format( throw new AuthenticationException(String.format(
"'%s' did not handle the '%s' delegation token operation: %s", "'%s' did not handle the '%s' delegation token operation: %s",

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.util; package org.apache.hadoop.util;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -54,6 +56,11 @@ public class HttpExceptionUtils {
private static final String ENTER = System.getProperty("line.separator"); private static final String ENTER = System.getProperty("line.separator");
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
/** /**
* Creates a HTTP servlet response serializing the exception in it as JSON. * Creates a HTTP servlet response serializing the exception in it as JSON.
* *
@ -74,9 +81,8 @@ public static void createServletExceptionResponse(
json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName()); json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
Map<String, Object> jsonResponse = new LinkedHashMap<String, Object>(); Map<String, Object> jsonResponse = new LinkedHashMap<String, Object>();
jsonResponse.put(ERROR_JSON, json); jsonResponse.put(ERROR_JSON, json);
ObjectMapper jsonMapper = new ObjectMapper();
Writer writer = response.getWriter(); Writer writer = response.getWriter();
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, jsonResponse); WRITER.writeValue(writer, jsonResponse);
writer.flush(); writer.flush();
} }
@ -144,8 +150,7 @@ public static void validateResponse(HttpURLConnection conn,
InputStream es = null; InputStream es = null;
try { try {
es = conn.getErrorStream(); es = conn.getErrorStream();
ObjectMapper mapper = new ObjectMapper(); Map json = READER.readValue(es);
Map json = mapper.readValue(es, Map.class);
json = (Map) json.get(ERROR_JSON); json = (Map) json.get(ERROR_JSON);
String exClass = (String) json.get(ERROR_CLASSNAME_JSON); String exClass = (String) json.get(ERROR_CLASSNAME_JSON);
String exMsg = (String) json.get(ERROR_MESSAGE_JSON); String exMsg = (String) json.get(ERROR_MESSAGE_JSON);