HADOOP-14901. ReuseObjectMapper in Hadoop Common. Contributed by Hanisha Koneru.

This commit is contained in:
Anu Engineer 2017-09-25 13:44:51 -07:00
parent 09be0acae0
commit 17b17aed0f
6 changed files with 36 additions and 19 deletions

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.KMSUtil;
import org.apache.http.client.utils.URIBuilder;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -127,6 +128,9 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
private final ValueQueue<EncryptedKeyVersion> encKeyVersionQueue;
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private class EncryptedQueueRefiller implements
ValueQueue.QueueRefiller<EncryptedKeyVersion> {
@ -282,8 +286,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
private static void writeJson(Map map, OutputStream os) throws IOException {
Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
ObjectMapper jsonMapper = new ObjectMapper();
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map);
WRITER.writeValue(writer, map);
}
/**

View File

@ -50,6 +50,7 @@ import org.apache.hadoop.metrics2.util.Metrics2Util.NameValuePair;
import org.apache.hadoop.metrics2.util.Metrics2Util.TopN;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -128,6 +129,8 @@ public class DecayRpcScheduler implements RpcScheduler,
public static final Logger LOG =
LoggerFactory.getLogger(DecayRpcScheduler.class);
private static final ObjectWriter WRITER = new ObjectMapper().writer();
// Track the decayed and raw (no decay) number of calls for each schedulable
// identity from all previous decay windows: idx 0 for decayed call count and
// idx 1 for the raw call count
@ -909,8 +912,7 @@ public class DecayRpcScheduler implements RpcScheduler,
return "{}";
} else {
try {
ObjectMapper om = new ObjectMapper();
return om.writeValueAsString(decisions);
return WRITER.writeValueAsString(decisions);
} catch (Exception e) {
return "Error: " + e.getMessage();
}
@ -919,8 +921,7 @@ public class DecayRpcScheduler implements RpcScheduler,
public String getCallVolumeSummary() {
try {
ObjectMapper om = new ObjectMapper();
return om.writeValueAsString(getDecayedCallCounts());
return WRITER.writeValueAsString(getDecayedCallCounts());
} catch (Exception e) {
return "Error: " + e.getMessage();
}

View File

@ -28,6 +28,7 @@ import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.MappingJsonFactory;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.codehaus.jackson.node.ContainerNode;
import java.io.IOException;
@ -105,6 +106,7 @@ public class Log4Json extends Layout {
* configuration it must be done in a static intializer block.
*/
private static final JsonFactory factory = new MappingJsonFactory();
private static final ObjectReader READER = new ObjectMapper(factory).reader();
public static final String DATE = "date";
public static final String EXCEPTION_CLASS = "exceptionclass";
public static final String LEVEL = "level";
@ -252,8 +254,7 @@ public class Log4Json extends Layout {
* @throws IOException on any parsing problems
*/
public static ContainerNode parse(String json) throws IOException {
ObjectMapper mapper = new ObjectMapper(factory);
JsonNode jsonNode = mapper.readTree(json);
JsonNode jsonNode = READER.readTree(json);
if (!(jsonNode instanceof ContainerNode)) {
throw new IOException("Wrong JSON data: " + json);
}

View File

@ -18,17 +18,18 @@
package org.apache.hadoop.metrics2;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Build a JSON dump of the metrics.
*
@ -44,6 +45,9 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
private final MetricsCollector parent;
private Map<String, Object> innerMetrics = new LinkedHashMap<>();
private static final ObjectWriter WRITER =
new ObjectMapper().writer();
/**
* Build an instance.
* @param parent parent collector. Unused in this instance; only used for
@ -116,7 +120,7 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
@Override
public String toString() {
try {
return new ObjectMapper().writeValueAsString(innerMetrics);
return WRITER.writeValueAsString(innerMetrics);
} catch (IOException e) {
LOG.warn("Failed to dump to Json.", e);
return ExceptionUtils.getStackTrace(e);

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdenti
import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.StringUtils;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -54,6 +55,9 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
private static final String CONTENT_TYPE = "Content-Type";
private static final String APPLICATION_JSON_MIME = "application/json";
private static final ObjectReader READER =
new ObjectMapper().reader(Map.class);
private static final String HTTP_GET = "GET";
private static final String HTTP_PUT = "PUT";
@ -316,8 +320,7 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
if (contentType != null &&
contentType.contains(APPLICATION_JSON_MIME)) {
try {
ObjectMapper mapper = new ObjectMapper();
ret = mapper.readValue(conn.getInputStream(), Map.class);
ret = READER.readValue(conn.getInputStream());
} catch (Exception ex) {
throw new AuthenticationException(String.format(
"'%s' did not handle the '%s' delegation token operation: %s",

View File

@ -20,6 +20,8 @@ package org.apache.hadoop.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.codehaus.jackson.map.ObjectWriter;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.MediaType;
@ -54,6 +56,11 @@ public class HttpExceptionUtils {
private static final String ENTER = System.getProperty("line.separator");
private static final ObjectReader READER =
new ObjectMapper().reader(Map.class);
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
/**
* Creates a HTTP servlet response serializing the exception in it as JSON.
*
@ -74,9 +81,8 @@ public class HttpExceptionUtils {
json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
Map<String, Object> jsonResponse = new LinkedHashMap<String, Object>();
jsonResponse.put(ERROR_JSON, json);
ObjectMapper jsonMapper = new ObjectMapper();
Writer writer = response.getWriter();
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, jsonResponse);
WRITER.writeValue(writer, jsonResponse);
writer.flush();
}
@ -144,8 +150,7 @@ public class HttpExceptionUtils {
InputStream es = null;
try {
es = conn.getErrorStream();
ObjectMapper mapper = new ObjectMapper();
Map json = mapper.readValue(es, Map.class);
Map json = READER.readValue(es);
json = (Map) json.get(ERROR_JSON);
String exClass = (String) json.get(ERROR_CLASSNAME_JSON);
String exMsg = (String) json.get(ERROR_MESSAGE_JSON);