HDFS-12064. Reuse object mapper in HDFS. Contributed by Hanisha Koneru.

This commit is contained in:
Anu Engineer 2017-09-23 10:11:00 -07:00
parent cda3378659
commit 36fb90c939
6 changed files with 39 additions and 25 deletions

View File

@ -22,6 +22,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
@ -67,6 +68,11 @@ public class SlowDiskTracker {
*/ */
private final Timer timer; private final Timer timer;
/**
* ObjectWriter to convert JSON reports to String.
*/
private static final ObjectWriter WRITER = new ObjectMapper().writer();
/** /**
* Number of disks to include in JSON report per operation. We will return * Number of disks to include in JSON report per operation. We will return
* disks with the highest latency. * disks with the highest latency.
@ -254,12 +260,11 @@ public class SlowDiskTracker {
* serialization failed. * serialization failed.
*/ */
public String getSlowDiskReportAsJsonString() { public String getSlowDiskReportAsJsonString() {
ObjectMapper objectMapper = new ObjectMapper();
try { try {
if (slowDisksReport.isEmpty()) { if (slowDisksReport.isEmpty()) {
return null; return null;
} }
return objectMapper.writeValueAsString(slowDisksReport); return WRITER.writeValueAsString(slowDisksReport);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
// Failed to serialize. Don't log the exception call stack. // Failed to serialize. Don't log the exception call stack.
LOG.debug("Failed to serialize statistics" + e); LOG.debug("Failed to serialize statistics" + e);

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs.server.blockmanagement;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
@ -69,6 +70,10 @@ public class SlowPeerTracker {
*/ */
private final Timer timer; private final Timer timer;
/**
* ObjectWriter to convert JSON reports to String.
*/
private static final ObjectWriter WRITER = new ObjectMapper().writer();
/** /**
* Number of nodes to include in JSON report. We will return nodes with * Number of nodes to include in JSON report. We will return nodes with
* the highest number of votes from peers. * the highest number of votes from peers.
@ -188,9 +193,8 @@ public class SlowPeerTracker {
public String getJson() { public String getJson() {
Collection<ReportForJson> validReports = getJsonReports( Collection<ReportForJson> validReports = getJsonReports(
MAX_NODES_TO_REPORT); MAX_NODES_TO_REPORT);
ObjectMapper objectMapper = new ObjectMapper();
try { try {
return objectMapper.writeValueAsString(validReports); return WRITER.writeValueAsString(validReports);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
// Failed to serialize. Don't log the exception call stack. // Failed to serialize. Don't log the exception call stack.
LOG.debug("Failed to serialize statistics" + e); LOG.debug("Failed to serialize statistics" + e);

View File

@ -18,8 +18,9 @@
package org.apache.hadoop.hdfs.server.diskbalancer.planner; package org.apache.hadoop.hdfs.server.diskbalancer.planner;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import java.io.IOException; import java.io.IOException;
@ -38,6 +39,10 @@ public class NodePlan {
private int port; private int port;
private long timeStamp; private long timeStamp;
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class);
private static final ObjectWriter WRITER = MAPPER.writerFor(
MAPPER.constructType(NodePlan.class));
/** /**
* returns timestamp when this plan was created. * returns timestamp when this plan was created.
* *
@ -153,8 +158,7 @@ public class NodePlan {
* @throws IOException * @throws IOException
*/ */
public static NodePlan parseJson(String json) throws IOException { public static NodePlan parseJson(String json) throws IOException {
ObjectMapper mapper = new ObjectMapper(); return READER.readValue(json);
return mapper.readValue(json, NodePlan.class);
} }
/** /**
@ -164,10 +168,7 @@ public class NodePlan {
* @throws IOException * @throws IOException
*/ */
public String toJson() throws IOException { public String toJson() throws IOException {
ObjectMapper mapper = new ObjectMapper(); return WRITER.writeValueAsString(this);
JavaType planType = mapper.constructType(NodePlan.class);
return mapper.writerFor(planType)
.writeValueAsString(this);
} }
/** /**

View File

@ -25,6 +25,7 @@ import static org.junit.Assert.assertTrue;
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import static org.apache.hadoop.hdfs.DFSConfigKeys import static org.apache.hadoop.hdfs.DFSConfigKeys
@ -76,6 +77,8 @@ public class TestSlowDiskTracker {
private FakeTimer timer; private FakeTimer timer;
private long reportValidityMs; private long reportValidityMs;
private static final long OUTLIERS_REPORT_INTERVAL = 1000; private static final long OUTLIERS_REPORT_INTERVAL = 1000;
private static final ObjectReader READER = new ObjectMapper().readerFor(
new TypeReference<ArrayList<DiskLatency>>() {});
static { static {
conf = new HdfsConfiguration(); conf = new HdfsConfiguration();
@ -416,8 +419,7 @@ public class TestSlowDiskTracker {
private ArrayList<DiskLatency> getAndDeserializeJson( private ArrayList<DiskLatency> getAndDeserializeJson(
final String json) throws IOException { final String json) throws IOException {
return (new ObjectMapper()).readValue(json, return READER.readValue(json);
new TypeReference<ArrayList<DiskLatency>>() {});
} }
private void addSlowDiskForTesting(String dnID, String disk, private void addSlowDiskForTesting(String dnID, String disk,

View File

@ -20,9 +20,11 @@ package org.apache.hadoop.hdfs.server.blockmanagement;
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.blockmanagement.SlowPeerTracker.ReportForJson; import org.apache.hadoop.hdfs.server.blockmanagement.SlowPeerTracker
.ReportForJson;
import org.apache.hadoop.util.FakeTimer; import org.apache.hadoop.util.FakeTimer;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
@ -39,7 +41,6 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
/** /**
* Tests for {@link SlowPeerTracker}. * Tests for {@link SlowPeerTracker}.
*/ */
@ -57,6 +58,8 @@ public class TestSlowPeerTracker {
private SlowPeerTracker tracker; private SlowPeerTracker tracker;
private FakeTimer timer; private FakeTimer timer;
private long reportValidityMs; private long reportValidityMs;
private static final ObjectReader READER =
new ObjectMapper().readerFor(new TypeReference<Set<ReportForJson>>() {});
@Before @Before
public void setup() { public void setup() {
@ -220,7 +223,6 @@ public class TestSlowPeerTracker {
throws IOException { throws IOException {
final String json = tracker.getJson(); final String json = tracker.getJson();
LOG.info("Got JSON: {}", json); LOG.info("Got JSON: {}", json);
return (new ObjectMapper()).readValue( return READER.readValue(json);
json, new TypeReference<Set<ReportForJson>>() {});
} }
} }

View File

@ -52,6 +52,10 @@ import com.fasterxml.jackson.databind.ObjectReader;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
public class TestJsonUtil { public class TestJsonUtil {
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
static FileStatus toFileStatus(HdfsFileStatus f, String parent) { static FileStatus toFileStatus(HdfsFileStatus f, String parent) {
return new FileStatus(f.getLen(), f.isDirectory(), f.getReplication(), return new FileStatus(f.getLen(), f.isDirectory(), f.getReplication(),
f.getBlockSize(), f.getModificationTime(), f.getAccessTime(), f.getBlockSize(), f.getModificationTime(), f.getAccessTime(),
@ -76,9 +80,8 @@ public class TestJsonUtil {
System.out.println("fstatus = " + fstatus); System.out.println("fstatus = " + fstatus);
final String json = JsonUtil.toJsonString(status, true); final String json = JsonUtil.toJsonString(status, true);
System.out.println("json = " + json.replace(",", ",\n ")); System.out.println("json = " + json.replace(",", ",\n "));
ObjectReader reader = new ObjectMapper().readerFor(Map.class);
final HdfsFileStatus s2 = final HdfsFileStatus s2 =
JsonUtilClient.toFileStatus((Map<?, ?>) reader.readValue(json), true); JsonUtilClient.toFileStatus((Map<?, ?>) READER.readValue(json), true);
final FileStatus fs2 = toFileStatus(s2, parent); final FileStatus fs2 = toFileStatus(s2, parent);
System.out.println("s2 = " + s2); System.out.println("s2 = " + s2);
System.out.println("fs2 = " + fs2); System.out.println("fs2 = " + fs2);
@ -164,8 +167,7 @@ public class TestJsonUtil {
public void testToAclStatus() throws IOException { public void testToAclStatus() throws IOException {
String jsonString = String jsonString =
"{\"AclStatus\":{\"entries\":[\"user::rwx\",\"user:user1:rw-\",\"group::rw-\",\"other::r-x\"],\"group\":\"supergroup\",\"owner\":\"testuser\",\"stickyBit\":false}}"; "{\"AclStatus\":{\"entries\":[\"user::rwx\",\"user:user1:rw-\",\"group::rw-\",\"other::r-x\"],\"group\":\"supergroup\",\"owner\":\"testuser\",\"stickyBit\":false}}";
ObjectReader reader = new ObjectMapper().readerFor(Map.class); Map<?, ?> json = READER.readValue(jsonString);
Map<?, ?> json = reader.readValue(jsonString);
List<AclEntry> aclSpec = List<AclEntry> aclSpec =
Lists.newArrayList(aclEntry(ACCESS, USER, ALL), Lists.newArrayList(aclEntry(ACCESS, USER, ALL),
@ -224,8 +226,7 @@ public class TestJsonUtil {
String jsonString = String jsonString =
"{\"XAttrs\":[{\"name\":\"user.a1\",\"value\":\"0x313233\"}," + "{\"XAttrs\":[{\"name\":\"user.a1\",\"value\":\"0x313233\"}," +
"{\"name\":\"user.a2\",\"value\":\"0x313131\"}]}"; "{\"name\":\"user.a2\",\"value\":\"0x313131\"}]}";
ObjectReader reader = new ObjectMapper().readerFor(Map.class); Map<?, ?> json = READER.readValue(jsonString);
Map<?, ?> json = reader.readValue(jsonString);
XAttr xAttr1 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER). XAttr xAttr1 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER).
setName("a1").setValue(XAttrCodec.decodeValue("0x313233")).build(); setName("a1").setValue(XAttrCodec.decodeValue("0x313233")).build();
XAttr xAttr2 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER). XAttr xAttr2 = (new XAttr.Builder()).setNameSpace(XAttr.NameSpace.USER).
@ -250,8 +251,7 @@ public class TestJsonUtil {
String jsonString = String jsonString =
"{\"XAttrs\":[{\"name\":\"user.a1\",\"value\":\"0x313233\"}," + "{\"XAttrs\":[{\"name\":\"user.a1\",\"value\":\"0x313233\"}," +
"{\"name\":\"user.a2\",\"value\":\"0x313131\"}]}"; "{\"name\":\"user.a2\",\"value\":\"0x313131\"}]}";
ObjectReader reader = new ObjectMapper().readerFor(Map.class); Map<?, ?> json = READER.readValue(jsonString);
Map<?, ?> json = reader.readValue(jsonString);
// Get xattr: user.a2 // Get xattr: user.a2
byte[] value = JsonUtilClient.getXAttr(json, "user.a2"); byte[] value = JsonUtilClient.getXAttr(json, "user.a2");