HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance. Contributed by Lin Yiqun.
(cherry picked from commit e6a7044b8530afded8f8e86ff309dd0e4d39238a) (cherry picked from commit a714d04f5d8eaba34a24ee9454ceaec68ab00a15)
This commit is contained in:
parent
706351afb8
commit
df23f9c49d
@ -29,6 +29,7 @@
|
|||||||
import org.apache.hadoop.util.Timer;
|
import org.apache.hadoop.util.Timer;
|
||||||
import org.apache.http.HttpStatus;
|
import org.apache.http.HttpStatus;
|
||||||
import org.codehaus.jackson.map.ObjectMapper;
|
import org.codehaus.jackson.map.ObjectMapper;
|
||||||
|
import org.codehaus.jackson.map.ObjectReader;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -54,6 +55,8 @@
|
|||||||
@InterfaceStability.Evolving
|
@InterfaceStability.Evolving
|
||||||
public class ConfRefreshTokenBasedAccessTokenProvider
|
public class ConfRefreshTokenBasedAccessTokenProvider
|
||||||
extends AccessTokenProvider {
|
extends AccessTokenProvider {
|
||||||
|
private static final ObjectReader READER =
|
||||||
|
new ObjectMapper().reader(Map.class);
|
||||||
|
|
||||||
public static final String OAUTH_REFRESH_TOKEN_KEY
|
public static final String OAUTH_REFRESH_TOKEN_KEY
|
||||||
= "dfs.webhdfs.oauth2.refresh.token";
|
= "dfs.webhdfs.oauth2.refresh.token";
|
||||||
@ -126,10 +129,7 @@ void refresh() throws IOException {
|
|||||||
+ responseBody.code() + ", text = " + responseBody.toString());
|
+ responseBody.code() + ", text = " + responseBody.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
Map<?, ?> response = READER.readValue(responseBody.body().string());
|
||||||
Map<?, ?> response = mapper.reader(Map.class)
|
|
||||||
.readValue(responseBody.body().string());
|
|
||||||
|
|
||||||
|
|
||||||
String newExpiresIn = response.get(EXPIRES_IN).toString();
|
String newExpiresIn = response.get(EXPIRES_IN).toString();
|
||||||
accessTokenTimer.setExpiresIn(newExpiresIn);
|
accessTokenTimer.setExpiresIn(newExpiresIn);
|
||||||
|
@ -29,6 +29,7 @@
|
|||||||
import org.apache.hadoop.util.Timer;
|
import org.apache.hadoop.util.Timer;
|
||||||
import org.apache.http.HttpStatus;
|
import org.apache.http.HttpStatus;
|
||||||
import org.codehaus.jackson.map.ObjectMapper;
|
import org.codehaus.jackson.map.ObjectMapper;
|
||||||
|
import org.codehaus.jackson.map.ObjectReader;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -54,6 +55,9 @@
|
|||||||
@InterfaceStability.Evolving
|
@InterfaceStability.Evolving
|
||||||
public abstract class CredentialBasedAccessTokenProvider
|
public abstract class CredentialBasedAccessTokenProvider
|
||||||
extends AccessTokenProvider {
|
extends AccessTokenProvider {
|
||||||
|
private static final ObjectReader READER =
|
||||||
|
new ObjectMapper().reader(Map.class);
|
||||||
|
|
||||||
public static final String OAUTH_CREDENTIAL_KEY
|
public static final String OAUTH_CREDENTIAL_KEY
|
||||||
= "dfs.webhdfs.oauth2.credential";
|
= "dfs.webhdfs.oauth2.credential";
|
||||||
|
|
||||||
@ -119,9 +123,7 @@ void refresh() throws IOException {
|
|||||||
+ responseBody.code() + ", text = " + responseBody.toString());
|
+ responseBody.code() + ", text = " + responseBody.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
Map<?, ?> response = READER.readValue(responseBody.body().string());
|
||||||
Map<?, ?> response = mapper.reader(Map.class)
|
|
||||||
.readValue(responseBody.body().string());
|
|
||||||
|
|
||||||
String newExpiresIn = response.get(EXPIRES_IN).toString();
|
String newExpiresIn = response.get(EXPIRES_IN).toString();
|
||||||
timer.setExpiresIn(newExpiresIn);
|
timer.setExpiresIn(newExpiresIn);
|
||||||
|
@ -975,6 +975,9 @@ Release 2.8.0 - UNRELEASED
|
|||||||
HDFS-9686. Remove useless boxing/unboxing code.
|
HDFS-9686. Remove useless boxing/unboxing code.
|
||||||
(Kousuke Saruta via aajisaka)
|
(Kousuke Saruta via aajisaka)
|
||||||
|
|
||||||
|
HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance.
|
||||||
|
(Lin Yiqun via aajisaka)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
HDFS-8091: ACLStatus and XAttributes should be presented to
|
HDFS-8091: ACLStatus and XAttributes should be presented to
|
||||||
|
@ -61,6 +61,8 @@
|
|||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.codehaus.jackson.annotate.JsonProperty;
|
import org.codehaus.jackson.annotate.JsonProperty;
|
||||||
import org.codehaus.jackson.map.ObjectMapper;
|
import org.codehaus.jackson.map.ObjectMapper;
|
||||||
|
import org.codehaus.jackson.map.ObjectReader;
|
||||||
|
import org.codehaus.jackson.map.ObjectWriter;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
@ -79,6 +81,10 @@
|
|||||||
public class FsVolumeImpl implements FsVolumeSpi {
|
public class FsVolumeImpl implements FsVolumeSpi {
|
||||||
public static final Logger LOG =
|
public static final Logger LOG =
|
||||||
LoggerFactory.getLogger(FsVolumeImpl.class);
|
LoggerFactory.getLogger(FsVolumeImpl.class);
|
||||||
|
private static final ObjectWriter WRITER =
|
||||||
|
new ObjectMapper().writerWithDefaultPrettyPrinter();
|
||||||
|
private static final ObjectReader READER =
|
||||||
|
new ObjectMapper().reader(BlockIteratorState.class);
|
||||||
|
|
||||||
private final FsDatasetImpl dataset;
|
private final FsDatasetImpl dataset;
|
||||||
private final String storageID;
|
private final String storageID;
|
||||||
@ -707,10 +713,9 @@ public void rewind() {
|
|||||||
public void save() throws IOException {
|
public void save() throws IOException {
|
||||||
state.lastSavedMs = Time.now();
|
state.lastSavedMs = Time.now();
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
|
||||||
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
|
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
|
||||||
new FileOutputStream(getTempSaveFile(), false), "UTF-8"))) {
|
new FileOutputStream(getTempSaveFile(), false), "UTF-8"))) {
|
||||||
mapper.writerWithDefaultPrettyPrinter().writeValue(writer, state);
|
WRITER.writeValue(writer, state);
|
||||||
success = true;
|
success = true;
|
||||||
} finally {
|
} finally {
|
||||||
if (!success) {
|
if (!success) {
|
||||||
@ -724,17 +729,16 @@ public void save() throws IOException {
|
|||||||
StandardCopyOption.ATOMIC_MOVE);
|
StandardCopyOption.ATOMIC_MOVE);
|
||||||
if (LOG.isTraceEnabled()) {
|
if (LOG.isTraceEnabled()) {
|
||||||
LOG.trace("save({}, {}): saved {}", storageID, bpid,
|
LOG.trace("save({}, {}): saved {}", storageID, bpid,
|
||||||
mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state));
|
WRITER.writeValueAsString(state));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void load() throws IOException {
|
public void load() throws IOException {
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
|
||||||
File file = getSaveFile();
|
File file = getSaveFile();
|
||||||
this.state = mapper.reader(BlockIteratorState.class).readValue(file);
|
this.state = READER.readValue(file);
|
||||||
LOG.trace("load({}, {}): loaded iterator {} from {}: {}", storageID,
|
LOG.trace("load({}, {}): loaded iterator {} from {}: {}", storageID,
|
||||||
bpid, name, file.getAbsoluteFile(),
|
bpid, name, file.getAbsoluteFile(),
|
||||||
mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state));
|
WRITER.writeValueAsString(state));
|
||||||
}
|
}
|
||||||
|
|
||||||
File getSaveFile() {
|
File getSaveFile() {
|
||||||
|
@ -258,6 +258,7 @@
|
|||||||
import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
|
import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
||||||
|
import org.apache.hadoop.hdfs.web.JsonUtil;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.ipc.CallerContext;
|
import org.apache.hadoop.ipc.CallerContext;
|
||||||
@ -286,7 +287,6 @@
|
|||||||
import org.apache.log4j.Appender;
|
import org.apache.log4j.Appender;
|
||||||
import org.apache.log4j.AsyncAppender;
|
import org.apache.log4j.AsyncAppender;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
import org.codehaus.jackson.map.ObjectMapper;
|
|
||||||
import org.mortbay.util.ajax.JSON;
|
import org.mortbay.util.ajax.JSON;
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
@ -5253,9 +5253,8 @@ public String getTopUserOpCounts() {
|
|||||||
Map<String, Object> topMap = new TreeMap<String, Object>();
|
Map<String, Object> topMap = new TreeMap<String, Object>();
|
||||||
topMap.put("windows", topWindows);
|
topMap.put("windows", topWindows);
|
||||||
topMap.put("timestamp", DFSUtil.dateToIso8601String(now));
|
topMap.put("timestamp", DFSUtil.dateToIso8601String(now));
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
|
||||||
try {
|
try {
|
||||||
return mapper.writeValueAsString(topMap);
|
return JsonUtil.toJsonString(topMap);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.warn("Failed to fetch TopUser metrics", e);
|
LOG.warn("Failed to fetch TopUser metrics", e);
|
||||||
}
|
}
|
||||||
|
@ -54,7 +54,6 @@
|
|||||||
import org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam;
|
import org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.util.LimitInputStream;
|
import org.apache.hadoop.util.LimitInputStream;
|
||||||
import org.codehaus.jackson.map.ObjectMapper;
|
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
@ -263,10 +262,9 @@ static String[] loadStringTable(InputStream in) throws
|
|||||||
* @throws IOException if failed to serialize fileStatus to JSON.
|
* @throws IOException if failed to serialize fileStatus to JSON.
|
||||||
*/
|
*/
|
||||||
String getFileStatus(String path) throws IOException {
|
String getFileStatus(String path) throws IOException {
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
|
||||||
FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path));
|
FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path));
|
||||||
return "{\"FileStatus\":\n"
|
return "{\"FileStatus\":\n"
|
||||||
+ mapper.writeValueAsString(getFileStatus(inode, false)) + "\n}\n";
|
+ JsonUtil.toJsonString(getFileStatus(inode, false)) + "\n}\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -277,7 +275,6 @@ String getFileStatus(String path) throws IOException {
|
|||||||
*/
|
*/
|
||||||
String listStatus(String path) throws IOException {
|
String listStatus(String path) throws IOException {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
|
||||||
List<Map<String, Object>> fileStatusList = getFileStatusList(path);
|
List<Map<String, Object>> fileStatusList = getFileStatusList(path);
|
||||||
sb.append("{\"FileStatuses\":{\"FileStatus\":[\n");
|
sb.append("{\"FileStatuses\":{\"FileStatus\":[\n");
|
||||||
int i = 0;
|
int i = 0;
|
||||||
@ -285,7 +282,7 @@ String listStatus(String path) throws IOException {
|
|||||||
if (i++ != 0) {
|
if (i++ != 0) {
|
||||||
sb.append(',');
|
sb.append(',');
|
||||||
}
|
}
|
||||||
sb.append(mapper.writeValueAsString(fileStatusMap));
|
sb.append(JsonUtil.toJsonString(fileStatusMap));
|
||||||
}
|
}
|
||||||
sb.append("\n]}}\n");
|
sb.append("\n]}}\n");
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
@ -318,9 +315,8 @@ private List<Map<String, Object>> getFileStatusList(String path)
|
|||||||
* @throws IOException if failed to serialize ContentSummary to JSON.
|
* @throws IOException if failed to serialize ContentSummary to JSON.
|
||||||
*/
|
*/
|
||||||
String getContentSummary(String path) throws IOException {
|
String getContentSummary(String path) throws IOException {
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
|
||||||
return "{\"ContentSummary\":\n"
|
return "{\"ContentSummary\":\n"
|
||||||
+ mapper.writeValueAsString(getContentSummaryMap(path)) + "\n}\n";
|
+ JsonUtil.toJsonString(getContentSummaryMap(path)) + "\n}\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, Object> getContentSummaryMap(String path)
|
private Map<String, Object> getContentSummaryMap(String path)
|
||||||
|
@ -388,4 +388,9 @@ public static String toJsonString(final List<XAttr> xAttrs)
|
|||||||
finalMap.put("XAttrNames", ret);
|
finalMap.put("XAttrNames", ret);
|
||||||
return MAPPER.writeValueAsString(finalMap);
|
return MAPPER.writeValueAsString(finalMap);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String toJsonString(Object obj) throws IOException {
|
||||||
|
return MAPPER.writeValueAsString(obj);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user