HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance. Contributed by Lin Yiqun.
This commit is contained in:
parent
51fc7f5427
commit
e6a7044b85
|
@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory;
|
|||
import org.apache.hadoop.util.Timer;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
import org.codehaus.jackson.map.ObjectReader;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -54,6 +55,8 @@ import static org.apache.hadoop.hdfs.web.oauth2.Utils.notNull;
|
|||
@InterfaceStability.Evolving
|
||||
public class ConfRefreshTokenBasedAccessTokenProvider
|
||||
extends AccessTokenProvider {
|
||||
private static final ObjectReader READER =
|
||||
new ObjectMapper().reader(Map.class);
|
||||
|
||||
public static final String OAUTH_REFRESH_TOKEN_KEY
|
||||
= "dfs.webhdfs.oauth2.refresh.token";
|
||||
|
@ -126,10 +129,7 @@ public class ConfRefreshTokenBasedAccessTokenProvider
|
|||
+ responseBody.code() + ", text = " + responseBody.toString());
|
||||
}
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
Map<?, ?> response = mapper.reader(Map.class)
|
||||
.readValue(responseBody.body().string());
|
||||
|
||||
Map<?, ?> response = READER.readValue(responseBody.body().string());
|
||||
|
||||
String newExpiresIn = response.get(EXPIRES_IN).toString();
|
||||
accessTokenTimer.setExpiresIn(newExpiresIn);
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory;
|
|||
import org.apache.hadoop.util.Timer;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
import org.codehaus.jackson.map.ObjectReader;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -54,6 +55,9 @@ import static org.apache.hadoop.hdfs.web.oauth2.Utils.notNull;
|
|||
@InterfaceStability.Evolving
|
||||
public abstract class CredentialBasedAccessTokenProvider
|
||||
extends AccessTokenProvider {
|
||||
private static final ObjectReader READER =
|
||||
new ObjectMapper().reader(Map.class);
|
||||
|
||||
public static final String OAUTH_CREDENTIAL_KEY
|
||||
= "dfs.webhdfs.oauth2.credential";
|
||||
|
||||
|
@ -119,9 +123,7 @@ public abstract class CredentialBasedAccessTokenProvider
|
|||
+ responseBody.code() + ", text = " + responseBody.toString());
|
||||
}
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
Map<?, ?> response = mapper.reader(Map.class)
|
||||
.readValue(responseBody.body().string());
|
||||
Map<?, ?> response = READER.readValue(responseBody.body().string());
|
||||
|
||||
String newExpiresIn = response.get(EXPIRES_IN).toString();
|
||||
timer.setExpiresIn(newExpiresIn);
|
||||
|
|
|
@ -1978,6 +1978,9 @@ Release 2.8.0 - UNRELEASED
|
|||
HDFS-9686. Remove useless boxing/unboxing code.
|
||||
(Kousuke Saruta via aajisaka)
|
||||
|
||||
HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance.
|
||||
(Lin Yiqun via aajisaka)
|
||||
|
||||
BUG FIXES
|
||||
|
||||
HDFS-7501. TransactionsSinceLastCheckpoint can be negative on SBNs.
|
||||
|
|
|
@ -62,6 +62,8 @@ import org.apache.hadoop.util.Time;
|
|||
import org.apache.hadoop.util.Timer;
|
||||
import org.codehaus.jackson.annotate.JsonProperty;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
import org.codehaus.jackson.map.ObjectReader;
|
||||
import org.codehaus.jackson.map.ObjectWriter;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -80,6 +82,10 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
|||
public class FsVolumeImpl implements FsVolumeSpi {
|
||||
public static final Logger LOG =
|
||||
LoggerFactory.getLogger(FsVolumeImpl.class);
|
||||
private static final ObjectWriter WRITER =
|
||||
new ObjectMapper().writerWithDefaultPrettyPrinter();
|
||||
private static final ObjectReader READER =
|
||||
new ObjectMapper().reader(BlockIteratorState.class);
|
||||
|
||||
private final FsDatasetImpl dataset;
|
||||
private final String storageID;
|
||||
|
@ -708,10 +714,9 @@ public class FsVolumeImpl implements FsVolumeSpi {
|
|||
public void save() throws IOException {
|
||||
state.lastSavedMs = Time.now();
|
||||
boolean success = false;
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
|
||||
new FileOutputStream(getTempSaveFile(), false), "UTF-8"))) {
|
||||
mapper.writerWithDefaultPrettyPrinter().writeValue(writer, state);
|
||||
WRITER.writeValue(writer, state);
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
|
@ -725,17 +730,16 @@ public class FsVolumeImpl implements FsVolumeSpi {
|
|||
StandardCopyOption.ATOMIC_MOVE);
|
||||
if (LOG.isTraceEnabled()) {
|
||||
LOG.trace("save({}, {}): saved {}", storageID, bpid,
|
||||
mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state));
|
||||
WRITER.writeValueAsString(state));
|
||||
}
|
||||
}
|
||||
|
||||
public void load() throws IOException {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
File file = getSaveFile();
|
||||
this.state = mapper.reader(BlockIteratorState.class).readValue(file);
|
||||
this.state = READER.readValue(file);
|
||||
LOG.trace("load({}, {}): loaded iterator {} from {}: {}", storageID,
|
||||
bpid, name, file.getAbsoluteFile(),
|
||||
mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state));
|
||||
WRITER.writeValueAsString(state));
|
||||
}
|
||||
|
||||
File getSaveFile() {
|
||||
|
|
|
@ -248,6 +248,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
|||
import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
|
||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
||||
import org.apache.hadoop.hdfs.web.JsonUtil;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ipc.CallerContext;
|
||||
|
@ -275,7 +276,6 @@ import org.apache.hadoop.util.VersionInfo;
|
|||
import org.apache.log4j.Appender;
|
||||
import org.apache.log4j.AsyncAppender;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
import org.mortbay.util.ajax.JSON;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -4557,9 +4557,8 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
|
|||
Map<String, Object> topMap = new TreeMap<String, Object>();
|
||||
topMap.put("windows", topWindows);
|
||||
topMap.put("timestamp", DFSUtil.dateToIso8601String(now));
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
try {
|
||||
return mapper.writeValueAsString(topMap);
|
||||
return JsonUtil.toJsonString(topMap);
|
||||
} catch (IOException e) {
|
||||
LOG.warn("Failed to fetch TopUser metrics", e);
|
||||
}
|
||||
|
|
|
@ -54,7 +54,6 @@ import org.apache.hadoop.hdfs.web.JsonUtil;
|
|||
import org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.util.LimitInputStream;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -263,10 +262,9 @@ class FSImageLoader {
|
|||
* @throws IOException if failed to serialize fileStatus to JSON.
|
||||
*/
|
||||
String getFileStatus(String path) throws IOException {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path));
|
||||
return "{\"FileStatus\":\n"
|
||||
+ mapper.writeValueAsString(getFileStatus(inode, false)) + "\n}\n";
|
||||
+ JsonUtil.toJsonString(getFileStatus(inode, false)) + "\n}\n";
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -277,7 +275,6 @@ class FSImageLoader {
|
|||
*/
|
||||
String listStatus(String path) throws IOException {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
List<Map<String, Object>> fileStatusList = getFileStatusList(path);
|
||||
sb.append("{\"FileStatuses\":{\"FileStatus\":[\n");
|
||||
int i = 0;
|
||||
|
@ -285,7 +282,7 @@ class FSImageLoader {
|
|||
if (i++ != 0) {
|
||||
sb.append(',');
|
||||
}
|
||||
sb.append(mapper.writeValueAsString(fileStatusMap));
|
||||
sb.append(JsonUtil.toJsonString(fileStatusMap));
|
||||
}
|
||||
sb.append("\n]}}\n");
|
||||
return sb.toString();
|
||||
|
@ -318,9 +315,8 @@ class FSImageLoader {
|
|||
* @throws IOException if failed to serialize ContentSummary to JSON.
|
||||
*/
|
||||
String getContentSummary(String path) throws IOException {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
return "{\"ContentSummary\":\n"
|
||||
+ mapper.writeValueAsString(getContentSummaryMap(path)) + "\n}\n";
|
||||
+ JsonUtil.toJsonString(getContentSummaryMap(path)) + "\n}\n";
|
||||
}
|
||||
|
||||
private Map<String, Object> getContentSummaryMap(String path)
|
||||
|
|
|
@ -389,4 +389,8 @@ public class JsonUtil {
|
|||
return MAPPER.writeValueAsString(finalMap);
|
||||
}
|
||||
|
||||
public static String toJsonString(Object obj) throws IOException {
|
||||
return MAPPER.writeValueAsString(obj);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue