HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance. Contributed by Lin Yiqun.

(cherry picked from commit e6a7044b85)
This commit is contained in:
Akira Ajisaka 2016-02-13 01:57:24 +09:00
parent 2ea1913cad
commit a714d04f5d
7 changed files with 32 additions and 23 deletions

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
@ -54,6 +55,8 @@ import static org.apache.hadoop.hdfs.web.oauth2.Utils.notNull;
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class ConfRefreshTokenBasedAccessTokenProvider public class ConfRefreshTokenBasedAccessTokenProvider
extends AccessTokenProvider { extends AccessTokenProvider {
private static final ObjectReader READER =
new ObjectMapper().reader(Map.class);
public static final String OAUTH_REFRESH_TOKEN_KEY public static final String OAUTH_REFRESH_TOKEN_KEY
= "dfs.webhdfs.oauth2.refresh.token"; = "dfs.webhdfs.oauth2.refresh.token";
@ -126,10 +129,7 @@ public class ConfRefreshTokenBasedAccessTokenProvider
+ responseBody.code() + ", text = " + responseBody.toString()); + responseBody.code() + ", text = " + responseBody.toString());
} }
ObjectMapper mapper = new ObjectMapper(); Map<?, ?> response = READER.readValue(responseBody.body().string());
Map<?, ?> response = mapper.reader(Map.class)
.readValue(responseBody.body().string());
String newExpiresIn = response.get(EXPIRES_IN).toString(); String newExpiresIn = response.get(EXPIRES_IN).toString();
accessTokenTimer.setExpiresIn(newExpiresIn); accessTokenTimer.setExpiresIn(newExpiresIn);

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
@ -54,6 +55,9 @@ import static org.apache.hadoop.hdfs.web.oauth2.Utils.notNull;
@InterfaceStability.Evolving @InterfaceStability.Evolving
public abstract class CredentialBasedAccessTokenProvider public abstract class CredentialBasedAccessTokenProvider
extends AccessTokenProvider { extends AccessTokenProvider {
private static final ObjectReader READER =
new ObjectMapper().reader(Map.class);
public static final String OAUTH_CREDENTIAL_KEY public static final String OAUTH_CREDENTIAL_KEY
= "dfs.webhdfs.oauth2.credential"; = "dfs.webhdfs.oauth2.credential";
@ -119,9 +123,7 @@ public abstract class CredentialBasedAccessTokenProvider
+ responseBody.code() + ", text = " + responseBody.toString()); + responseBody.code() + ", text = " + responseBody.toString());
} }
ObjectMapper mapper = new ObjectMapper(); Map<?, ?> response = READER.readValue(responseBody.body().string());
Map<?, ?> response = mapper.reader(Map.class)
.readValue(responseBody.body().string());
String newExpiresIn = response.get(EXPIRES_IN).toString(); String newExpiresIn = response.get(EXPIRES_IN).toString();
timer.setExpiresIn(newExpiresIn); timer.setExpiresIn(newExpiresIn);

View File

@ -1053,6 +1053,9 @@ Release 2.8.0 - UNRELEASED
HDFS-9686. Remove useless boxing/unboxing code. HDFS-9686. Remove useless boxing/unboxing code.
(Kousuke Saruta via aajisaka) (Kousuke Saruta via aajisaka)
HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance.
(Lin Yiqun via aajisaka)
BUG FIXES BUG FIXES
HDFS-8091: ACLStatus and XAttributes should be presented to HDFS-8091: ACLStatus and XAttributes should be presented to

View File

@ -62,6 +62,8 @@ import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.codehaus.jackson.map.ObjectWriter;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -80,6 +82,10 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
public class FsVolumeImpl implements FsVolumeSpi { public class FsVolumeImpl implements FsVolumeSpi {
public static final Logger LOG = public static final Logger LOG =
LoggerFactory.getLogger(FsVolumeImpl.class); LoggerFactory.getLogger(FsVolumeImpl.class);
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private static final ObjectReader READER =
new ObjectMapper().reader(BlockIteratorState.class);
private final FsDatasetImpl dataset; private final FsDatasetImpl dataset;
private final String storageID; private final String storageID;
@ -708,10 +714,9 @@ public class FsVolumeImpl implements FsVolumeSpi {
public void save() throws IOException { public void save() throws IOException {
state.lastSavedMs = Time.now(); state.lastSavedMs = Time.now();
boolean success = false; boolean success = false;
ObjectMapper mapper = new ObjectMapper();
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter( try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(getTempSaveFile(), false), "UTF-8"))) { new FileOutputStream(getTempSaveFile(), false), "UTF-8"))) {
mapper.writerWithDefaultPrettyPrinter().writeValue(writer, state); WRITER.writeValue(writer, state);
success = true; success = true;
} finally { } finally {
if (!success) { if (!success) {
@ -725,17 +730,16 @@ public class FsVolumeImpl implements FsVolumeSpi {
StandardCopyOption.ATOMIC_MOVE); StandardCopyOption.ATOMIC_MOVE);
if (LOG.isTraceEnabled()) { if (LOG.isTraceEnabled()) {
LOG.trace("save({}, {}): saved {}", storageID, bpid, LOG.trace("save({}, {}): saved {}", storageID, bpid,
mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state)); WRITER.writeValueAsString(state));
} }
} }
public void load() throws IOException { public void load() throws IOException {
ObjectMapper mapper = new ObjectMapper();
File file = getSaveFile(); File file = getSaveFile();
this.state = mapper.reader(BlockIteratorState.class).readValue(file); this.state = READER.readValue(file);
LOG.trace("load({}, {}): loaded iterator {} from {}: {}", storageID, LOG.trace("load({}, {}): loaded iterator {} from {}: {}", storageID,
bpid, name, file.getAbsoluteFile(), bpid, name, file.getAbsoluteFile(),
mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state)); WRITER.writeValueAsString(state));
} }
File getSaveFile() { File getSaveFile() {

View File

@ -250,6 +250,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks; import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.StorageReport;
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
import org.apache.hadoop.hdfs.web.JsonUtil;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.CallerContext; import org.apache.hadoop.ipc.CallerContext;
@ -277,7 +278,6 @@ import org.apache.hadoop.util.VersionInfo;
import org.apache.log4j.Appender; import org.apache.log4j.Appender;
import org.apache.log4j.AsyncAppender; import org.apache.log4j.AsyncAppender;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.codehaus.jackson.map.ObjectMapper;
import org.mortbay.util.ajax.JSON; import org.mortbay.util.ajax.JSON;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@ -4518,9 +4518,8 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
Map<String, Object> topMap = new TreeMap<String, Object>(); Map<String, Object> topMap = new TreeMap<String, Object>();
topMap.put("windows", topWindows); topMap.put("windows", topWindows);
topMap.put("timestamp", DFSUtil.dateToIso8601String(now)); topMap.put("timestamp", DFSUtil.dateToIso8601String(now));
ObjectMapper mapper = new ObjectMapper();
try { try {
return mapper.writeValueAsString(topMap); return JsonUtil.toJsonString(topMap);
} catch (IOException e) { } catch (IOException e) {
LOG.warn("Failed to fetch TopUser metrics", e); LOG.warn("Failed to fetch TopUser metrics", e);
} }

View File

@ -54,7 +54,6 @@ import org.apache.hadoop.hdfs.web.JsonUtil;
import org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam; import org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.LimitInputStream; import org.apache.hadoop.util.LimitInputStream;
import org.codehaus.jackson.map.ObjectMapper;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
@ -263,10 +262,9 @@ class FSImageLoader {
* @throws IOException if failed to serialize fileStatus to JSON. * @throws IOException if failed to serialize fileStatus to JSON.
*/ */
String getFileStatus(String path) throws IOException { String getFileStatus(String path) throws IOException {
ObjectMapper mapper = new ObjectMapper();
FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path)); FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path));
return "{\"FileStatus\":\n" return "{\"FileStatus\":\n"
+ mapper.writeValueAsString(getFileStatus(inode, false)) + "\n}\n"; + JsonUtil.toJsonString(getFileStatus(inode, false)) + "\n}\n";
} }
/** /**
@ -277,7 +275,6 @@ class FSImageLoader {
*/ */
String listStatus(String path) throws IOException { String listStatus(String path) throws IOException {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
ObjectMapper mapper = new ObjectMapper();
List<Map<String, Object>> fileStatusList = getFileStatusList(path); List<Map<String, Object>> fileStatusList = getFileStatusList(path);
sb.append("{\"FileStatuses\":{\"FileStatus\":[\n"); sb.append("{\"FileStatuses\":{\"FileStatus\":[\n");
int i = 0; int i = 0;
@ -285,7 +282,7 @@ class FSImageLoader {
if (i++ != 0) { if (i++ != 0) {
sb.append(','); sb.append(',');
} }
sb.append(mapper.writeValueAsString(fileStatusMap)); sb.append(JsonUtil.toJsonString(fileStatusMap));
} }
sb.append("\n]}}\n"); sb.append("\n]}}\n");
return sb.toString(); return sb.toString();
@ -318,9 +315,8 @@ class FSImageLoader {
* @throws IOException if failed to serialize ContentSummary to JSON. * @throws IOException if failed to serialize ContentSummary to JSON.
*/ */
String getContentSummary(String path) throws IOException { String getContentSummary(String path) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return "{\"ContentSummary\":\n" return "{\"ContentSummary\":\n"
+ mapper.writeValueAsString(getContentSummaryMap(path)) + "\n}\n"; + JsonUtil.toJsonString(getContentSummaryMap(path)) + "\n}\n";
} }
private Map<String, Object> getContentSummaryMap(String path) private Map<String, Object> getContentSummaryMap(String path)

View File

@ -388,4 +388,9 @@ public class JsonUtil {
finalMap.put("XAttrNames", ret); finalMap.put("XAttrNames", ret);
return MAPPER.writeValueAsString(finalMap); return MAPPER.writeValueAsString(finalMap);
} }
public static String toJsonString(Object obj) throws IOException {
return MAPPER.writeValueAsString(obj);
}
} }