HDFS-12064. Reuse object mapper in HDFS. Contributed by Hanisha Koneru.

This commit is contained in:
Anu Engineer 2017-09-23 10:11:00 -07:00
parent cda3378659
commit 36fb90c939
6 changed files with 39 additions and 25 deletions

View File

@ -22,6 +22,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
@ -67,6 +68,11 @@ public class SlowDiskTracker {
*/
private final Timer timer;
/**
* ObjectWriter to convert JSON reports to String.
*/
private static final ObjectWriter WRITER = new ObjectMapper().writer();
/**
* Number of disks to include in JSON report per operation. We will return
* disks with the highest latency.
@ -254,12 +260,11 @@ public int compare(DiskLatency o1, DiskLatency o2) {
* serialization failed.
*/
public String getSlowDiskReportAsJsonString() {
ObjectMapper objectMapper = new ObjectMapper();
try {
if (slowDisksReport.isEmpty()) {
return null;
}
return objectMapper.writeValueAsString(slowDisksReport);
return WRITER.writeValueAsString(slowDisksReport);
} catch (JsonProcessingException e) {
// Failed to serialize. Don't log the exception call stack.
LOG.debug("Failed to serialize statistics" + e);

View File

@ -21,6 +21,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.google.common.primitives.Ints;
@ -69,6 +70,10 @@ public class SlowPeerTracker {
*/
private final Timer timer;
/**
* ObjectWriter to convert JSON reports to String.
*/
private static final ObjectWriter WRITER = new ObjectMapper().writer();
/**
* Number of nodes to include in JSON report. We will return nodes with
* the highest number of votes from peers.
@ -188,9 +193,8 @@ private SortedSet<String> filterNodeReports(
public String getJson() {
Collection<ReportForJson> validReports = getJsonReports(
MAX_NODES_TO_REPORT);
ObjectMapper objectMapper = new ObjectMapper();
try {
return objectMapper.writeValueAsString(validReports);
return WRITER.writeValueAsString(validReports);
} catch (JsonProcessingException e) {
// Failed to serialize. Don't log the exception call stack.
LOG.debug("Failed to serialize statistics" + e);

View File

@ -18,8 +18,9 @@
package org.apache.hadoop.hdfs.server.diskbalancer.planner;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.base.Preconditions;
import java.io.IOException;
@ -38,6 +39,10 @@ public class NodePlan {
private int port;
private long timeStamp;
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class);
private static final ObjectWriter WRITER = MAPPER.writerFor(
MAPPER.constructType(NodePlan.class));
/**
* returns timestamp when this plan was created.
*
@ -153,8 +158,7 @@ public void setPort(int port) {
* @throws IOException
*/
public static NodePlan parseJson(String json) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(json, NodePlan.class);
return READER.readValue(json);
}
/**
@ -164,10 +168,7 @@ public static NodePlan parseJson(String json) throws IOException {
* @throws IOException
*/
public String toJson() throws IOException {
ObjectMapper mapper = new ObjectMapper();
JavaType planType = mapper.constructType(NodePlan.class);
return mapper.writerFor(planType)
.writeValueAsString(this);
return WRITER.writeValueAsString(this);
}
/**

View File

@ -25,6 +25,7 @@
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.conf.Configuration;
import static org.apache.hadoop.hdfs.DFSConfigKeys
@ -76,6 +77,8 @@ public class TestSlowDiskTracker {
private FakeTimer timer;
private long reportValidityMs;
private static final long OUTLIERS_REPORT_INTERVAL = 1000;
private static final ObjectReader READER = new ObjectMapper().readerFor(
new TypeReference<ArrayList<DiskLatency>>() {});
static {
conf = new HdfsConfiguration();
@ -416,8 +419,7 @@ private boolean isDiskInReports(ArrayList<DiskLatency> reports,
private ArrayList<DiskLatency> getAndDeserializeJson(
final String json) throws IOException {
return (new ObjectMapper()).readValue(json,
new TypeReference<ArrayList<DiskLatency>>() {});
return READER.readValue(json);
}
private void addSlowDiskForTesting(String dnID, String disk,

View File

@ -20,9 +20,11 @@
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.blockmanagement.SlowPeerTracker.ReportForJson;
import org.apache.hadoop.hdfs.server.blockmanagement.SlowPeerTracker
.ReportForJson;
import org.apache.hadoop.util.FakeTimer;
import org.junit.Before;
import org.junit.Rule;
@ -39,7 +41,6 @@
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
* Tests for {@link SlowPeerTracker}.
*/
@ -57,6 +58,8 @@ public class TestSlowPeerTracker {
private SlowPeerTracker tracker;
private FakeTimer timer;
private long reportValidityMs;
private static final ObjectReader READER =
new ObjectMapper().readerFor(new TypeReference<Set<ReportForJson>>() {});
@Before
public void setup() {
@ -220,7 +223,6 @@ private Set<ReportForJson> getAndDeserializeJson()
throws IOException {
final String json = tracker.getJson();
LOG.info("Got JSON: {}", json);
return (new ObjectMapper()).readValue(
json, new TypeReference<Set<ReportForJson>>() {});
return READER.readValue(json);
}
}

View File

@ -52,6 +52,10 @@
import com.google.common.collect.Lists;
public class TestJsonUtil {
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
static FileStatus toFileStatus(HdfsFileStatus f, String parent) {
return new FileStatus(f.getLen(), f.isDirectory(), f.getReplication(),
f.getBlockSize(), f.getModificationTime(), f.getAccessTime(),
@ -76,9 +80,8 @@ public void testHdfsFileStatus() throws IOException {
System.out.println("fstatus = " + fstatus);
final String json = JsonUtil.toJsonString(status, true);
System.out.println("json = " + json.replace(",", ",\n "));
ObjectReader reader = new ObjectMapper().readerFor(Map.class);
final HdfsFileStatus s2 =
JsonUtilClient.toFileStatus((Map<?, ?>) reader.readValue(json), true);
JsonUtilClient.toFileStatus((Map<?, ?>) READER.readValue(json), true);
final FileStatus fs2 = toFileStatus(s2, parent);
System.out.println("s2 = " + s2);
System.out.println("fs2 = " + fs2);
@ -164,8 +167,7 @@ public void testToDatanodeInfoWithName() throws Exception {
public void testToAclStatus() throws IOException {
String jsonString =
"{\"AclStatus\":{\"entries\":[\"user::rwx\",\"user:user1:rw-\",\"group::rw-\",\"other::r-x\"],\"group\":\"supergroup\",\"owner\":\"testuser\",\"stickyBit\":false}}";
ObjectReader reader = new ObjectMapper().readerFor(Map.class);
Map<?, ?> json = reader.readValue(jsonString);
Map<?, ?> json = READER.readValue(jsonString);
List<AclEntry> aclSpec =
Lists.newArrayList(aclEntry(ACCESS, USER, ALL),
@ -224,8 +226,7 @@ public void testToXAttrMap() throws IOException {
String jsonString =
"{\"XAttrs\":[{\"name\":\"user.a1\",\"value\":\"0x313233\"}," +
"{\"name\":\"user.a2\",\"value\":\"0x313131\"}]}";
ObjectReader reader = new ObjectMapper().readerFor(Map.class);
Map<?, ?> json = reader.readValue(jsonString);
Map<?, ?> json = READER.readValue(jsonString);
XAttr xAttr1 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER).
setName("a1").setValue(XAttrCodec.decodeValue("0x313233")).build();
XAttr xAttr2 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER).
@ -250,8 +251,7 @@ public void testGetXAttrFromJson() throws IOException {
String jsonString =
"{\"XAttrs\":[{\"name\":\"user.a1\",\"value\":\"0x313233\"}," +
"{\"name\":\"user.a2\",\"value\":\"0x313131\"}]}";
ObjectReader reader = new ObjectMapper().readerFor(Map.class);
Map<?, ?> json = reader.readValue(jsonString);
Map<?, ?> json = READER.readValue(jsonString);
// Get xattr: user.a2
byte[] value = JsonUtilClient.getXAttr(json, "user.a2");