HADOOP-14901. ReuseObjectMapper in Hadoop Common. Contributed by Hanisha Koneru.

This commit is contained in:
Anu Engineer 2017-09-22 15:45:22 -07:00
parent 164a063e74
commit e1b32e0959
6 changed files with 36 additions and 19 deletions

View File

@ -77,6 +77,7 @@
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
@ -133,6 +134,9 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
private final ValueQueue<EncryptedKeyVersion> encKeyVersionQueue;
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private class EncryptedQueueRefiller implements
ValueQueue.QueueRefiller<EncryptedKeyVersion> {
@ -226,8 +230,7 @@ public KMSEncryptedKeyVersion(String keyName, String keyVersionName,
private static void writeJson(Object obj, OutputStream os)
throws IOException {
Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
ObjectMapper jsonMapper = new ObjectMapper();
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, obj);
WRITER.writeValue(writer, obj);
}
/**

View File

@ -36,6 +36,7 @@
import javax.management.ObjectName;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.AtomicDoubleArray;
import org.apache.commons.lang.exception.ExceptionUtils;
@ -128,6 +129,8 @@ public class DecayRpcScheduler implements RpcScheduler,
public static final Logger LOG =
LoggerFactory.getLogger(DecayRpcScheduler.class);
private static final ObjectWriter WRITER = new ObjectMapper().writer();
// Track the decayed and raw (no decay) number of calls for each schedulable
// identity from all previous decay windows: idx 0 for decayed call count and
// idx 1 for the raw call count
@ -909,8 +912,7 @@ public String getSchedulingDecisionSummary() {
return "{}";
} else {
try {
ObjectMapper om = new ObjectMapper();
return om.writeValueAsString(decisions);
return WRITER.writeValueAsString(decisions);
} catch (Exception e) {
return "Error: " + e.getMessage();
}
@ -919,8 +921,7 @@ public String getSchedulingDecisionSummary() {
public String getCallVolumeSummary() {
try {
ObjectMapper om = new ObjectMapper();
return om.writeValueAsString(getDecayedCallCounts());
return WRITER.writeValueAsString(getDecayedCallCounts());
} catch (Exception e) {
return "Error: " + e.getMessage();
}

View File

@ -24,6 +24,7 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.MappingJsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.node.ContainerNode;
import org.apache.log4j.Layout;
import org.apache.log4j.helpers.ISO8601DateFormat;
@ -105,6 +106,7 @@ public class Log4Json extends Layout {
* configuration it must be done in a static intializer block.
*/
private static final JsonFactory factory = new MappingJsonFactory();
private static final ObjectReader READER = new ObjectMapper(factory).reader();
public static final String DATE = "date";
public static final String EXCEPTION_CLASS = "exceptionclass";
public static final String LEVEL = "level";
@ -252,8 +254,7 @@ public void activateOptions() {
* @throws IOException on any parsing problems
*/
public static ContainerNode parse(String json) throws IOException {
ObjectMapper mapper = new ObjectMapper(factory);
JsonNode jsonNode = mapper.readTree(json);
JsonNode jsonNode = READER.readTree(json);
if (!(jsonNode instanceof ContainerNode)) {
throw new IOException("Wrong JSON data: " + json);
}

View File

@ -18,17 +18,18 @@
package org.apache.hadoop.metrics2;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Build a JSON dump of the metrics.
*
@ -44,6 +45,9 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
private final MetricsCollector parent;
private Map<String, Object> innerMetrics = new LinkedHashMap<>();
private static final ObjectWriter WRITER =
new ObjectMapper().writer();
/**
* Build an instance.
* @param parent parent collector. Unused in this instance; only used for
@ -116,7 +120,7 @@ public MetricsCollector parent() {
@Override
public String toString() {
try {
return new ObjectMapper().writeValueAsString(innerMetrics);
return WRITER.writeValueAsString(innerMetrics);
} catch (IOException e) {
LOG.warn("Failed to dump to Json.", e);
return ExceptionUtils.getStackTrace(e);

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.security.token.delegation.web;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.security.SecurityUtil;
@ -54,6 +55,9 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
private static final String CONTENT_TYPE = "Content-Type";
private static final String APPLICATION_JSON_MIME = "application/json";
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
private static final String HTTP_GET = "GET";
private static final String HTTP_PUT = "PUT";
@ -316,8 +320,7 @@ private Map doDelegationTokenOperation(URL url,
if (contentType != null &&
contentType.contains(APPLICATION_JSON_MIME)) {
try {
ObjectMapper mapper = new ObjectMapper();
ret = mapper.readValue(conn.getInputStream(), Map.class);
ret = READER.readValue(conn.getInputStream());
} catch (Exception ex) {
throw new AuthenticationException(String.format(
"'%s' did not handle the '%s' delegation token operation: %s",

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.util;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -54,6 +56,11 @@ public class HttpExceptionUtils {
private static final String ENTER = System.getProperty("line.separator");
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
/**
* Creates a HTTP servlet response serializing the exception in it as JSON.
*
@ -74,9 +81,8 @@ public static void createServletExceptionResponse(
json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
Map<String, Object> jsonResponse = new LinkedHashMap<String, Object>();
jsonResponse.put(ERROR_JSON, json);
ObjectMapper jsonMapper = new ObjectMapper();
Writer writer = response.getWriter();
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, jsonResponse);
WRITER.writeValue(writer, jsonResponse);
writer.flush();
}
@ -144,8 +150,7 @@ public static void validateResponse(HttpURLConnection conn,
InputStream es = null;
try {
es = conn.getErrorStream();
ObjectMapper mapper = new ObjectMapper();
Map json = mapper.readValue(es, Map.class);
Map json = READER.readValue(es);
json = (Map) json.get(ERROR_JSON);
String exClass = (String) json.get(ERROR_CLASSNAME_JSON);
String exMsg = (String) json.get(ERROR_MESSAGE_JSON);