From e6a7044b8530afded8f8e86ff309dd0e4d39238a Mon Sep 17 00:00:00 2001 From: Akira Ajisaka Date: Sat, 13 Feb 2016 01:57:24 +0900 Subject: [PATCH] HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance. Contributed by Lin Yiqun. --- ...ConfRefreshTokenBasedAccessTokenProvider.java | 8 ++++---- .../CredentialBasedAccessTokenProvider.java | 8 +++++--- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../datanode/fsdataset/impl/FsVolumeImpl.java | 16 ++++++++++------ .../hdfs/server/namenode/FSNamesystem.java | 5 ++--- .../tools/offlineImageViewer/FSImageLoader.java | 10 +++------- .../org/apache/hadoop/hdfs/web/JsonUtil.java | 4 ++++ 7 files changed, 31 insertions(+), 23 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java index 773eeaebf16..2d62d101141 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.util.Timer; import org.apache.http.HttpStatus; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import java.io.IOException; import java.util.Map; @@ -54,6 +55,8 @@ import static org.apache.hadoop.hdfs.web.oauth2.Utils.notNull; @InterfaceStability.Evolving public class ConfRefreshTokenBasedAccessTokenProvider extends AccessTokenProvider { + private static final ObjectReader READER = + new ObjectMapper().reader(Map.class); public static final String OAUTH_REFRESH_TOKEN_KEY = "dfs.webhdfs.oauth2.refresh.token"; @@ -126,10 +129,7 @@ public class ConfRefreshTokenBasedAccessTokenProvider + responseBody.code() + ", text = " + responseBody.toString()); } - ObjectMapper mapper = new ObjectMapper(); - Map response = mapper.reader(Map.class) - .readValue(responseBody.body().string()); - + Map response = READER.readValue(responseBody.body().string()); String newExpiresIn = response.get(EXPIRES_IN).toString(); accessTokenTimer.setExpiresIn(newExpiresIn); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java index 15cda88f1af..0d9006e086a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.util.Timer; import org.apache.http.HttpStatus; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import java.io.IOException; import java.util.Map; @@ -54,6 +55,9 @@ import static org.apache.hadoop.hdfs.web.oauth2.Utils.notNull; @InterfaceStability.Evolving public abstract class CredentialBasedAccessTokenProvider extends AccessTokenProvider { + private static final ObjectReader READER = + new ObjectMapper().reader(Map.class); + public static final String OAUTH_CREDENTIAL_KEY = "dfs.webhdfs.oauth2.credential"; @@ -119,9 +123,7 @@ public abstract class CredentialBasedAccessTokenProvider + responseBody.code() + ", text = " + responseBody.toString()); } - ObjectMapper mapper = new ObjectMapper(); - Map response = mapper.reader(Map.class) - .readValue(responseBody.body().string()); + Map response = READER.readValue(responseBody.body().string()); String newExpiresIn = response.get(EXPIRES_IN).toString(); timer.setExpiresIn(newExpiresIn); diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index c0d7113b771..27c4c0f2191 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1978,6 +1978,9 @@ Release 2.8.0 - UNRELEASED HDFS-9686. Remove useless boxing/unboxing code. (Kousuke Saruta via aajisaka) + HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance. + (Lin Yiqun via aajisaka) + BUG FIXES HDFS-7501. TransactionsSinceLastCheckpoint can be negative on SBNs. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java index 86f03c2d435..1533a323500 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java @@ -62,6 +62,8 @@ import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Timer; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; +import org.codehaus.jackson.map.ObjectWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,6 +82,10 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; public class FsVolumeImpl implements FsVolumeSpi { public static final Logger LOG = LoggerFactory.getLogger(FsVolumeImpl.class); + private static final ObjectWriter WRITER = + new ObjectMapper().writerWithDefaultPrettyPrinter(); + private static final ObjectReader READER = + new ObjectMapper().reader(BlockIteratorState.class); private final FsDatasetImpl dataset; private final String storageID; @@ -708,10 +714,9 @@ public class FsVolumeImpl implements FsVolumeSpi { public void save() throws IOException { state.lastSavedMs = Time.now(); boolean success = false; - ObjectMapper mapper = new ObjectMapper(); try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(getTempSaveFile(), false), "UTF-8"))) { - mapper.writerWithDefaultPrettyPrinter().writeValue(writer, state); + WRITER.writeValue(writer, state); success = true; } finally { if (!success) { @@ -725,17 +730,16 @@ public class FsVolumeImpl implements FsVolumeSpi { StandardCopyOption.ATOMIC_MOVE); if (LOG.isTraceEnabled()) { LOG.trace("save({}, {}): saved {}", storageID, bpid, - mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state)); + WRITER.writeValueAsString(state)); } } public void load() throws IOException { - ObjectMapper mapper = new ObjectMapper(); File file = getSaveFile(); - this.state = mapper.reader(BlockIteratorState.class).readValue(file); + this.state = READER.readValue(file); LOG.trace("load({}, {}): loaded iterator {} from {}: {}", storageID, bpid, name, file.getAbsoluteFile(), - mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state)); + WRITER.writeValueAsString(state)); } File getSaveFile() { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 0387c329d44..c3a0058b4ec 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -248,6 +248,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; +import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.CallerContext; @@ -275,7 +276,6 @@ import org.apache.hadoop.util.VersionInfo; import org.apache.log4j.Appender; import org.apache.log4j.AsyncAppender; import org.apache.log4j.Logger; -import org.codehaus.jackson.map.ObjectMapper; import org.mortbay.util.ajax.JSON; import com.google.common.annotations.VisibleForTesting; @@ -4557,9 +4557,8 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, Map topMap = new TreeMap(); topMap.put("windows", topWindows); topMap.put("timestamp", DFSUtil.dateToIso8601String(now)); - ObjectMapper mapper = new ObjectMapper(); try { - return mapper.writeValueAsString(topMap); + return JsonUtil.toJsonString(topMap); } catch (IOException e) { LOG.warn("Failed to fetch TopUser metrics", e); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java index 172f5992915..21e9d2e827a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java @@ -54,7 +54,6 @@ import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.LimitInputStream; -import org.codehaus.jackson.map.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -263,10 +262,9 @@ class FSImageLoader { * @throws IOException if failed to serialize fileStatus to JSON. */ String getFileStatus(String path) throws IOException { - ObjectMapper mapper = new ObjectMapper(); FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path)); return "{\"FileStatus\":\n" - + mapper.writeValueAsString(getFileStatus(inode, false)) + "\n}\n"; + + JsonUtil.toJsonString(getFileStatus(inode, false)) + "\n}\n"; } /** @@ -277,7 +275,6 @@ class FSImageLoader { */ String listStatus(String path) throws IOException { StringBuilder sb = new StringBuilder(); - ObjectMapper mapper = new ObjectMapper(); List> fileStatusList = getFileStatusList(path); sb.append("{\"FileStatuses\":{\"FileStatus\":[\n"); int i = 0; @@ -285,7 +282,7 @@ class FSImageLoader { if (i++ != 0) { sb.append(','); } - sb.append(mapper.writeValueAsString(fileStatusMap)); + sb.append(JsonUtil.toJsonString(fileStatusMap)); } sb.append("\n]}}\n"); return sb.toString(); @@ -318,9 +315,8 @@ class FSImageLoader { * @throws IOException if failed to serialize ContentSummary to JSON. */ String getContentSummary(String path) throws IOException { - ObjectMapper mapper = new ObjectMapper(); return "{\"ContentSummary\":\n" - + mapper.writeValueAsString(getContentSummaryMap(path)) + "\n}\n"; + + JsonUtil.toJsonString(getContentSummaryMap(path)) + "\n}\n"; } private Map getContentSummaryMap(String path) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index 342f7198725..368e8790845 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -389,4 +389,8 @@ public class JsonUtil { return MAPPER.writeValueAsString(finalMap); } + public static String toJsonString(Object obj) throws IOException { + return MAPPER.writeValueAsString(obj); + } + }