HADOOP-10568. Reuse ObjectMapper instance in CombinedHostsFileReader and CombinedHostsFileWriter. Contributed by Yiqun Lin.

This commit is contained in:
Akira Ajisaka 2016-06-29 10:10:19 -07:00
parent 96fa0f848b
commit 991c946593
2 changed files with 8 additions and 7 deletions

View File

@ -22,7 +22,6 @@ import java.io.FileInputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.IOException; import java.io.IOException;
import java.io.Reader; import java.io.Reader;
import java.util.Iterator; import java.util.Iterator;
import java.util.Set; import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
@ -31,7 +30,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
/** /**
@ -48,6 +47,10 @@ import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
@InterfaceAudience.LimitedPrivate({"HDFS"}) @InterfaceAudience.LimitedPrivate({"HDFS"})
@InterfaceStability.Unstable @InterfaceStability.Unstable
public final class CombinedHostsFileReader { public final class CombinedHostsFileReader {
private static final ObjectReader READER =
new ObjectMapper().reader(DatanodeAdminProperties.class);
private static final JsonFactory JSON_FACTORY = new JsonFactory();
private CombinedHostsFileReader() { private CombinedHostsFileReader() {
} }
@ -60,12 +63,10 @@ public final class CombinedHostsFileReader {
public static Set<DatanodeAdminProperties> public static Set<DatanodeAdminProperties>
readFile(final String hostsFile) throws IOException { readFile(final String hostsFile) throws IOException {
HashSet<DatanodeAdminProperties> allDNs = new HashSet<>(); HashSet<DatanodeAdminProperties> allDNs = new HashSet<>();
ObjectMapper mapper = new ObjectMapper();
try (Reader input = try (Reader input =
new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) { new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
Iterator<DatanodeAdminProperties> iterator = Iterator<DatanodeAdminProperties> iterator =
mapper.readValues(new JsonFactory().createJsonParser(input), READER.readValues(JSON_FACTORY.createJsonParser(input));
DatanodeAdminProperties.class);
while (iterator.hasNext()) { while (iterator.hasNext()) {
DatanodeAdminProperties properties = iterator.next(); DatanodeAdminProperties properties = iterator.next();
allDNs.add(properties); allDNs.add(properties);

View File

@ -45,6 +45,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
@InterfaceAudience.LimitedPrivate({"HDFS"}) @InterfaceAudience.LimitedPrivate({"HDFS"})
@InterfaceStability.Unstable @InterfaceStability.Unstable
public final class CombinedHostsFileWriter { public final class CombinedHostsFileWriter {
private static final ObjectMapper MAPPER = new ObjectMapper();
private CombinedHostsFileWriter() { private CombinedHostsFileWriter() {
} }
@ -60,8 +61,7 @@ public final class CombinedHostsFileWriter {
try (Writer output = try (Writer output =
new OutputStreamWriter(new FileOutputStream(hostsFile), "UTF-8")) { new OutputStreamWriter(new FileOutputStream(hostsFile), "UTF-8")) {
for (DatanodeAdminProperties datanodeAdminProperties: allDNs) { for (DatanodeAdminProperties datanodeAdminProperties: allDNs) {
ObjectMapper mapper = new ObjectMapper(); configs.append(MAPPER.writeValueAsString(datanodeAdminProperties));
configs.append(mapper.writeValueAsString(datanodeAdminProperties));
} }
output.write(configs.toString()); output.write(configs.toString());
} }