HADOOP-16282. Avoid FileStream to improve performance. Contributed by Ayush Saxena.

This commit is contained in:
Giovanni Matteo Fumarola 2019-05-02 12:58:42 -07:00
parent b094b94d43
commit 7a3188d054
42 changed files with 148 additions and 126 deletions

View File

@ -30,7 +30,6 @@ import java.io.BufferedInputStream;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
@ -43,6 +42,7 @@ import java.net.InetSocketAddress;
import java.net.JarURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -3075,7 +3075,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
LOG.debug("parsing File " + file);
}
reader = (XMLStreamReader2)parse(new BufferedInputStream(
new FileInputStream(file)), ((Path)resource).toString(),
Files.newInputStream(file.toPath())), ((Path) resource).toString(),
isRestricted);
}
} else if (resource instanceof InputStream) {

View File

@ -18,9 +18,10 @@
package org.apache.hadoop.crypto.random;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Random;
import org.apache.hadoop.classification.InterfaceAudience;
@ -50,7 +51,7 @@ public class OsSecureRandom extends Random implements Closeable, Configurable {
private String randomDevPath;
private transient FileInputStream stream;
private transient InputStream stream;
private final byte[] reservoir = new byte[RESERVOIR_LENGTH];
@ -60,7 +61,7 @@ public class OsSecureRandom extends Random implements Closeable, Configurable {
if (pos >= reservoir.length - min) {
try {
if (stream == null) {
stream = new FileInputStream(new File(randomDevPath));
stream = Files.newInputStream(Paths.get(randomDevPath));
}
IOUtils.readFully(stream, reservoir, 0, reservoir.length);
} catch (IOException e) {

View File

@ -22,9 +22,7 @@ import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
@ -447,7 +445,7 @@ public class FileUtil {
InputStream in = null;
OutputStream out =null;
try {
in = new FileInputStream(src);
in = Files.newInputStream(src.toPath());
out = dstFS.create(dst);
IOUtils.copyBytes(in, out, conf);
} catch (IOException e) {
@ -495,7 +493,7 @@ public class FileUtil {
}
} else {
InputStream in = srcFS.open(src);
IOUtils.copyBytes(in, new FileOutputStream(dst), conf);
IOUtils.copyBytes(in, Files.newOutputStream(dst.toPath()), conf);
}
if (deleteSource) {
return srcFS.delete(src, true);
@ -639,7 +637,7 @@ public class FileUtil {
throw new IOException("Mkdirs failed to create " +
parent.getAbsolutePath());
}
try (OutputStream out = new FileOutputStream(file)) {
try (OutputStream out = Files.newOutputStream(file.toPath())) {
IOUtils.copyBytes(zip, out, BUFFER_SIZE);
}
if (!file.setLastModified(entry.getTime())) {
@ -684,7 +682,7 @@ public class FileUtil {
file.getParentFile().toString());
}
}
OutputStream out = new FileOutputStream(file);
OutputStream out = Files.newOutputStream(file.toPath());
try {
byte[] buffer = new byte[8192];
int i;
@ -918,9 +916,10 @@ public class FileUtil {
TarArchiveInputStream tis = null;
try {
if (gzipped) {
inputStream = new GZIPInputStream(new FileInputStream(inFile));
inputStream =
new GZIPInputStream(Files.newInputStream(inFile.toPath()));
} else {
inputStream = new FileInputStream(inFile);
inputStream = Files.newInputStream(inFile.toPath());
}
inputStream = new BufferedInputStream(inputStream);
@ -1544,7 +1543,7 @@ public class FileUtil {
// Write the manifest to output JAR file
File classPathJar = File.createTempFile("classpath-", ".jar", workingDir);
try (FileOutputStream fos = new FileOutputStream(classPathJar);
try (OutputStream fos = Files.newOutputStream(classPathJar.toPath());
BufferedOutputStream bos = new BufferedOutputStream(fos)) {
JarOutputStream jos = new JarOutputStream(bos, jarManifest);
jos.close();

View File

@ -18,10 +18,11 @@
package org.apache.hadoop.fs.shell;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
@ -464,7 +465,7 @@ class CopyCommands {
dst.fs.create(dst.path, false).close();
}
FileInputStream is = null;
InputStream is = null;
try (FSDataOutputStream fos = dst.fs.append(dst.path)) {
if (readStdin) {
if (args.size() == 0) {
@ -477,7 +478,7 @@ class CopyCommands {
// Read in each input file and write to the target.
for (PathData source : args) {
is = new FileInputStream(source.toFile());
is = Files.newInputStream(source.toFile().toPath());
IOUtils.copyBytes(is, fos, DEFAULT_IO_LENGTH);
IOUtils.closeStream(is);
is = null;

View File

@ -19,10 +19,10 @@
package org.apache.hadoop.metrics2.sink;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience;
@ -47,7 +47,7 @@ public class FileSink implements MetricsSink, Closeable {
String filename = conf.getString(FILENAME_KEY);
try {
writer = filename == null ? System.out
: new PrintStream(new FileOutputStream(new File(filename)),
: new PrintStream(Files.newOutputStream(Paths.get(filename)),
true, "UTF-8");
} catch (Exception e) {
throw new MetricsException("Error creating "+ filename, e);

View File

@ -20,9 +20,10 @@ package org.apache.hadoop.net;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -100,7 +101,8 @@ public class TableMapping extends CachedDNSToSwitchMapping {
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(
new FileInputStream(filename), StandardCharsets.UTF_8))) {
Files.newInputStream(Paths.get(filename)),
StandardCharsets.UTF_8))) {
String line = reader.readLine();
while (line != null) {
line = line.trim();

View File

@ -26,9 +26,9 @@ import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@ -243,7 +243,7 @@ public class Credentials implements Writable {
Credentials credentials = new Credentials();
try {
in = new DataInputStream(new BufferedInputStream(
new FileInputStream(filename)));
Files.newInputStream(filename.toPath())));
credentials.readTokenStorageStream(in);
return credentials;
} catch(IOException ioe) {

View File

@ -41,12 +41,12 @@ import javax.crypto.Cipher;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.lang.reflect.InvocationTargetException;
import java.net.InetAddress;
import java.nio.file.Files;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
@ -922,7 +922,7 @@ public class KDiag extends Configured implements Tool, Closeable {
* @throws IOException IO problems
*/
private void dump(File file) throws IOException {
try (FileInputStream in = new FileInputStream(file)) {
try (InputStream in = Files.newInputStream(file.toPath())) {
for (String line : IOUtils.readLines(in)) {
println("%s", line);
}

View File

@ -17,11 +17,12 @@
*/
package org.apache.hadoop.security;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Hashtable;
@ -836,7 +837,7 @@ public class LdapGroupsMapping
StringBuilder password = new StringBuilder();
try (Reader reader = new InputStreamReader(
new FileInputStream(pwFile), StandardCharsets.UTF_8)) {
Files.newInputStream(Paths.get(pwFile)), StandardCharsets.UTF_8)) {
int c = reader.read();
while (c > -1) {
password.append((char)c);

View File

@ -19,11 +19,11 @@ package org.apache.hadoop.security;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
@ -583,7 +583,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
BufferedReader in = new BufferedReader(new InputStreamReader(
new FileInputStream(staticMapFile), StandardCharsets.UTF_8));
Files.newInputStream(staticMapFile.toPath()), StandardCharsets.UTF_8));
try {
String line = null;

View File

@ -25,8 +25,6 @@ import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Shell;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@ -69,7 +67,7 @@ public final class LocalJavaKeyStoreProvider extends
if (LOG.isDebugEnabled()) {
LOG.debug("using '" + file + "' for output stream.");
}
FileOutputStream out = new FileOutputStream(file);
OutputStream out = Files.newOutputStream(file.toPath());
return out;
}
@ -81,7 +79,7 @@ public final class LocalJavaKeyStoreProvider extends
@Override
protected InputStream getInputStreamForFile() throws IOException {
FileInputStream is = new FileInputStream(file);
InputStream is = Files.newInputStream(file.toPath());
return is;
}

View File

@ -28,9 +28,10 @@ import org.slf4j.LoggerFactory;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.TrustManager;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.text.MessageFormat;
@ -170,7 +171,7 @@ public class FileBasedKeyStoresFactory implements KeyStoresFactory {
LOG.debug(mode.toString() + " KeyStore: " + keystoreLocation);
}
InputStream is = new FileInputStream(keystoreLocation);
InputStream is = Files.newInputStream(Paths.get(keystoreLocation));
try {
keystore.load(is, keystorePassword.toCharArray());
} finally {

View File

@ -29,8 +29,9 @@ import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.cert.CertificateException;
@ -166,7 +167,7 @@ public final class ReloadingX509TrustManager
throws IOException, GeneralSecurityException {
X509TrustManager trustManager = null;
KeyStore ks = KeyStore.getInstance(type);
FileInputStream in = new FileInputStream(file);
InputStream in = Files.newInputStream(file.toPath());
try {
ks.load(in, (password == null) ? null : password.toCharArray());
lastLoaded = file.lastModified();

View File

@ -20,9 +20,9 @@ package org.apache.hadoop.util;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@ -282,7 +282,7 @@ public final class ConfTest {
boolean ok = true;
for (File file : files) {
String path = file.getAbsolutePath();
List<String> errors = checkConf(new FileInputStream(file));
List<String> errors = checkConf(Files.newInputStream(file.toPath()));
if (errors.isEmpty()) {
System.out.println(path + ": valid");
} else {

View File

@ -22,11 +22,11 @@ import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
@ -90,7 +90,7 @@ public class FileBasedIPList implements IPList {
if (file.exists()) {
try (
Reader fileReader = new InputStreamReader(
new FileInputStream(file), StandardCharsets.UTF_8);
Files.newInputStream(file.toPath()), StandardCharsets.UTF_8);
BufferedReader bufferedReader = new BufferedReader(fileReader)) {
List<String> lines = new ArrayList<String>();
String line = null;

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.util;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Collections;
import java.util.Set;
import java.util.HashMap;
@ -74,7 +75,7 @@ public class HostsFileReader {
public static void readFileToSet(String type,
String filename, Set<String> set) throws IOException {
File file = new File(filename);
FileInputStream fis = new FileInputStream(file);
InputStream fis = Files.newInputStream(file.toPath());
readFileToSetWithFileInputStream(type, filename, fis, set);
}
@ -120,7 +121,7 @@ public class HostsFileReader {
public static void readFileToMap(String type,
String filename, Map<String, Integer> map) throws IOException {
File file = new File(filename);
FileInputStream fis = new FileInputStream(file);
InputStream fis = Files.newInputStream(file.toPath());
readFileToMapWithFileInputStream(type, filename, fis, map);
}

View File

@ -21,10 +21,10 @@ package org.apache.hadoop.util;
import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.util.Map;
import com.fasterxml.jackson.core.JsonParseException;
@ -190,7 +190,7 @@ public class JsonSerialization<T> {
*/
public void save(File file, T instance) throws
IOException {
writeJsonAsBytes(instance, new FileOutputStream(file));
writeJsonAsBytes(instance, Files.newOutputStream(file.toPath()));
}
/**

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.util;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@ -28,6 +27,7 @@ import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
@ -129,7 +129,7 @@ public class RunJar {
+ " would create file outside of " + toDir);
}
ensureDirectory(file.getParentFile());
try (OutputStream out = new FileOutputStream(file)) {
try (OutputStream out = Files.newOutputStream(file.toPath())) {
IOUtils.copyBytes(jar, out, BUFFER_SIZE);
}
if (!file.setLastModified(entry.getTime())) {
@ -166,7 +166,7 @@ public class RunJar {
throws IOException{
File file = new File(toDir, name);
ensureDirectory(toDir);
try (OutputStream jar = new FileOutputStream(file);
try (OutputStream jar = Files.newOutputStream(file.toPath());
TeeInputStream teeInputStream = new TeeInputStream(inputStream, jar)) {
unJar(teeInputStream, toDir, unpackRegex);
}
@ -200,7 +200,7 @@ public class RunJar {
+ " would create file outside of " + toDir);
}
ensureDirectory(file.getParentFile());
try (OutputStream out = new FileOutputStream(file)) {
try (OutputStream out = Files.newOutputStream(file.toPath())) {
IOUtils.copyBytes(in, out, BUFFER_SIZE);
}
if (!file.setLastModified(entry.getTime())) {

View File

@ -19,12 +19,12 @@
package org.apache.hadoop.util;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.HashSet;
import java.util.regex.Matcher;
@ -246,9 +246,10 @@ public class SysInfoLinux extends SysInfo {
InputStreamReader fReader;
try {
fReader = new InputStreamReader(
new FileInputStream(procfsMemFile), Charset.forName("UTF-8"));
Files.newInputStream(Paths.get(procfsMemFile)),
Charset.forName("UTF-8"));
in = new BufferedReader(fReader);
} catch (FileNotFoundException f) {
} catch (IOException f) {
// shouldn't happen....
LOG.warn("Couldn't read " + procfsMemFile
+ "; can't determine memory settings");
@ -316,10 +317,11 @@ public class SysInfoLinux extends SysInfo {
BufferedReader in;
InputStreamReader fReader;
try {
fReader = new InputStreamReader(
new FileInputStream(procfsCpuFile), Charset.forName("UTF-8"));
fReader =
new InputStreamReader(Files.newInputStream(Paths.get(procfsCpuFile)),
Charset.forName("UTF-8"));
in = new BufferedReader(fReader);
} catch (FileNotFoundException f) {
} catch (IOException f) {
// shouldn't happen....
LOG.warn("Couldn't read " + procfsCpuFile + "; can't determine cpu info");
return;
@ -377,9 +379,10 @@ public class SysInfoLinux extends SysInfo {
InputStreamReader fReader;
try {
fReader = new InputStreamReader(
new FileInputStream(procfsStatFile), Charset.forName("UTF-8"));
Files.newInputStream(Paths.get(procfsStatFile)),
Charset.forName("UTF-8"));
in = new BufferedReader(fReader);
} catch (FileNotFoundException f) {
} catch (IOException f) {
// shouldn't happen....
return;
}
@ -431,9 +434,10 @@ public class SysInfoLinux extends SysInfo {
InputStreamReader fReader;
try {
fReader = new InputStreamReader(
new FileInputStream(procfsNetFile), Charset.forName("UTF-8"));
Files.newInputStream(Paths.get(procfsNetFile)),
Charset.forName("UTF-8"));
in = new BufferedReader(fReader);
} catch (FileNotFoundException f) {
} catch (IOException f) {
return;
}
@ -485,8 +489,9 @@ public class SysInfoLinux extends SysInfo {
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(
new FileInputStream(procfsDisksFile), Charset.forName("UTF-8")));
} catch (FileNotFoundException f) {
Files.newInputStream(Paths.get(procfsDisksFile)),
Charset.forName("UTF-8")));
} catch (IOException f) {
return;
}
@ -552,9 +557,9 @@ public class SysInfoLinux extends SysInfo {
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(
new FileInputStream(procfsDiskSectorFile),
Files.newInputStream(Paths.get(procfsDiskSectorFile)),
Charset.forName("UTF-8")));
} catch (FileNotFoundException f) {
} catch (IOException f) {
return defSector;
}

View File

@ -18,8 +18,10 @@
package org.apache.hadoop.util.hash;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -252,7 +254,7 @@ public class JenkinsHash extends Hash {
System.err.println("Usage: JenkinsHash filename");
System.exit(-1);
}
try (FileInputStream in = new FileInputStream(args[0])) {
try (InputStream in = Files.newInputStream(Paths.get(args[0]))) {
byte[] bytes = new byte[512];
int value = 0;
JenkinsHash hash = new JenkinsHash();

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.nio.file.NoSuchFileException;
import java.util.Map;
import org.apache.hadoop.test.GenericTestUtils;
@ -135,8 +135,8 @@ public class TestHostsFileReader {
new HostsFileReader(
HOSTS_TEST_DIR + "/doesnt-exist",
HOSTS_TEST_DIR + "/doesnt-exist");
Assert.fail("Should throw FileNotFoundException");
} catch (FileNotFoundException ex) {
Assert.fail("Should throw NoSuchFileException");
} catch (NoSuchFileException ex) {
// Exception as expected
}
}
@ -157,8 +157,8 @@ public class TestHostsFileReader {
assertTrue(INCLUDES_FILE.delete());
try {
hfp.refresh();
Assert.fail("Should throw FileNotFoundException");
} catch (FileNotFoundException ex) {
Assert.fail("Should throw NoSuchFileException");
} catch (NoSuchFileException ex) {
// Exception as expected
}
}

View File

@ -24,10 +24,11 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@ -82,8 +83,8 @@ public final class CombinedHostsFileReader {
if (hostFile.length() > 0) {
try (Reader input =
new InputStreamReader(new FileInputStream(hostFile),
"UTF-8")) {
new InputStreamReader(
Files.newInputStream(hostFile.toPath()), "UTF-8")) {
allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
} catch (JsonMappingException jme) {
// The old format doesn't have json top-level token to enclose
@ -101,7 +102,7 @@ public final class CombinedHostsFileReader {
JsonFactory jsonFactory = new JsonFactory();
List<DatanodeAdminProperties> all = new ArrayList<>();
try (Reader input =
new InputStreamReader(new FileInputStream(hostsFilePath),
new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)),
"UTF-8")) {
Iterator<DatanodeAdminProperties> iterator =
objectReader.readValues(jsonFactory.createParser(input));

View File

@ -18,11 +18,11 @@
package org.apache.hadoop.hdfs.util;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Set;
import com.fasterxml.jackson.databind.ObjectMapper;
@ -61,7 +61,8 @@ public final class CombinedHostsFileWriter {
final ObjectMapper objectMapper = new ObjectMapper();
try (Writer output =
new OutputStreamWriter(new FileOutputStream(hostsFile), "UTF-8")) {
new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)),
"UTF-8")) {
objectMapper.writeValue(output, allDNs);
}
}

View File

@ -27,11 +27,12 @@ import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAu
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Map;
import java.util.Properties;
@ -82,8 +83,8 @@ public class HttpFSAuthenticationFilter
try {
StringBuilder secret = new StringBuilder();
Reader reader = new InputStreamReader(new FileInputStream(
signatureSecretFile), StandardCharsets.UTF_8);
Reader reader = new InputStreamReader(Files.newInputStream(Paths.get(
signatureSecretFile)), StandardCharsets.UTF_8);
int c = reader.read();
while (c > -1) {
secret.append((char)c);

View File

@ -30,9 +30,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
@ -470,7 +470,7 @@ public class Server {
}
try {
log.debug("Loading site configuration from [{}]", siteFile);
inputStream = new FileInputStream(siteFile);
inputStream = Files.newInputStream(siteFile.toPath());
siteConf = new Configuration(false);
ConfigurationUtils.load(siteConf, inputStream);
} catch (IOException ex) {

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.qjournal.server;
import com.google.protobuf.ByteString;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
@ -1058,7 +1057,7 @@ public class Journal implements Closeable {
return null;
}
InputStream in = new FileInputStream(f);
InputStream in = Files.newInputStream(f.toPath());
try {
PersistedRecoveryPaxosData ret = PersistedRecoveryPaxosData.parseDelimitedFrom(in);
Preconditions.checkState(ret != null &&

View File

@ -53,9 +53,9 @@ import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Map;
import java.util.Optional;
@ -324,7 +324,8 @@ public class InMemoryAliasMap implements InMemoryAliasMapProtocol,
GzipCompressorOutputStream gzOut = null;
TarArchiveOutputStream tOut = null;
try {
bOut = new BufferedOutputStream(new FileOutputStream(outCompressedFile));
bOut = new BufferedOutputStream(
Files.newOutputStream(outCompressedFile.toPath()));
gzOut = new GzipCompressorOutputStream(bOut);
tOut = new TarArchiveOutputStream(gzOut);
addFileToTarGzRecursively(tOut, aliasMapDir, "", new Configuration());

View File

@ -28,6 +28,7 @@ import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.RandomAccessFile;
import java.io.Writer;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -311,7 +312,7 @@ class BlockPoolSlice {
try {
long used = getDfsUsed();
try (Writer out = new OutputStreamWriter(
new FileOutputStream(outFile), "UTF-8")) {
Files.newOutputStream(outFile.toPath()), "UTF-8")) {
// mtime is written last, so that truncated writes won't be valid.
out.write(Long.toString(used) + " " + Long.toString(timer.now()));
// This is only called as part of the volume shutdown.

View File

@ -28,6 +28,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@ -1149,7 +1150,7 @@ class FsDatasetImpl implements FsDatasetSpi<FsVolumeImpl> {
}
}
metaOut = new DataOutputStream(new BufferedOutputStream(
new FileOutputStream(dstMeta), smallBufferSize));
Files.newOutputStream(dstMeta.toPath()), smallBufferSize));
BlockMetadataHeader.writeHeader(metaOut, checksum);
int offset = 0;

View File

@ -21,7 +21,6 @@ import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
@ -193,7 +192,7 @@ public class FsDatasetUtil {
@Override
public InputStream getDataInputStream(long seekOffset)
throws IOException {
return new FileInputStream(blockFile);
return Files.newInputStream(blockFile.toPath());
}
};

View File

@ -54,12 +54,12 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URL;
import java.nio.file.NoSuchFileException;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.Collections;
@ -274,7 +274,7 @@ public abstract class Command extends Configured implements Closeable {
try {
HostsFileReader.readFileToSet("include",
Paths.get(listURL.getPath()).toString(), resultSet);
} catch (FileNotFoundException e) {
} catch (NoSuchFileException e) {
String warnMsg = String
.format("The input host file path '%s' is not a valid path. "
+ "Please make sure the host file exists.", listArg);

View File

@ -23,11 +23,11 @@ import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction;
import org.slf4j.Logger;
@ -430,7 +430,7 @@ public class EditLogFileInputStream extends EditLogInputStream {
@Override
public InputStream getInputStream() throws IOException {
return new FileInputStream(file);
return Files.newInputStream(file.toPath());
}
@Override

View File

@ -23,9 +23,10 @@ import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.security.DigestInputStream;
import java.security.DigestOutputStream;
import java.security.MessageDigest;
@ -215,9 +216,9 @@ public class FSImageFormat {
throws IOException {
Preconditions.checkState(impl == null, "Image already loaded!");
FileInputStream is = null;
InputStream is = null;
try {
is = new FileInputStream(file);
is = Files.newInputStream(file.toPath());
byte[] magic = new byte[FSImageUtil.MAGIC_HEADER.length];
IOUtils.readFully(is, magic, 0, magic.length);
if (Arrays.equals(magic, FSImageUtil.MAGIC_HEADER)) {
@ -318,7 +319,7 @@ public class FSImageFormat {
//
MessageDigest digester = MD5Hash.getDigester();
DigestInputStream fin = new DigestInputStream(
new FileInputStream(curFile), digester);
Files.newInputStream(curFile.toPath()), digester);
DataInputStream in = new DataInputStream(fin);
try {

View File

@ -20,8 +20,8 @@ package org.apache.hadoop.hdfs.server.namenode;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
@ -130,7 +130,8 @@ class FSImagePreTransactionalStorageInspector extends FSImageStorageInspector {
File timeFile = NNStorage.getStorageFile(sd, NameNodeFile.TIME);
long timeStamp = 0L;
if (timeFile.exists() && FileUtil.canRead(timeFile)) {
DataInputStream in = new DataInputStream(new FileInputStream(timeFile));
DataInputStream in = new DataInputStream(
Files.newInputStream(timeFile.toPath()));
try {
timeStamp = in.readLong();
in.close();

View File

@ -117,7 +117,6 @@ import java.io.DataInput;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
@ -126,6 +125,7 @@ import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URI;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -1774,7 +1774,8 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
checkOperation(OperationCategory.READ);
File file = new File(System.getProperty("hadoop.log.dir"), filename);
PrintWriter out = new PrintWriter(new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(file), Charsets.UTF_8)));
new OutputStreamWriter(Files.newOutputStream(file.toPath()),
Charsets.UTF_8)));
metaSave(out);
out.flush();
out.close();

View File

@ -21,7 +21,6 @@ import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
@ -29,6 +28,7 @@ import java.net.URI;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.TimeUnit;
@ -276,7 +276,7 @@ public class DebugAdmin extends Configured implements Tool {
final int smallBufferSize = DFSUtilClient.getSmallBufferSize(conf);
metaOut = new DataOutputStream(
new BufferedOutputStream(new FileOutputStream(srcMeta),
new BufferedOutputStream(Files.newOutputStream(srcMeta.toPath()),
smallBufferSize));
BlockMetadataHeader.writeHeader(metaOut, checksum);
metaOut.close();

View File

@ -17,9 +17,10 @@
*/
package org.apache.hadoop.hdfs.tools.offlineEditsViewer;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -48,7 +49,7 @@ public class OfflineEditsVisitorFactory {
return new BinaryEditsVisitor(filename);
}
OfflineEditsVisitor vis;
OutputStream fout = new FileOutputStream(filename);
OutputStream fout = Files.newOutputStream(Paths.get(filename));
OutputStream out = null;
try {
if (!printToScreen) {

View File

@ -31,10 +31,10 @@ import static org.apache.hadoop.hdfs.tools.offlineImageViewer.PBImageXmlWriter.*
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.file.Files;
@ -1821,16 +1821,16 @@ class OfflineImageReconstructor {
public static void run(String inputPath, String outputPath)
throws Exception {
MessageDigest digester = MD5Hash.getDigester();
FileOutputStream fout = null;
OutputStream fout = null;
File foutHash = new File(outputPath + ".md5");
Files.deleteIfExists(foutHash.toPath()); // delete any .md5 file that exists
CountingOutputStream out = null;
FileInputStream fis = null;
InputStream fis = null;
InputStreamReader reader = null;
try {
Files.deleteIfExists(Paths.get(outputPath));
fout = new FileOutputStream(outputPath);
fis = new FileInputStream(inputPath);
fout = Files.newOutputStream(Paths.get(outputPath));
fis = Files.newInputStream(Paths.get(inputPath));
reader = new InputStreamReader(fis, Charset.forName("UTF-8"));
out = new CountingOutputStream(
new DigestOutputStream(

View File

@ -20,9 +20,9 @@ package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@ -126,7 +126,7 @@ public class OfflineImageViewer {
boolean done = false;
try {
tracker = new PositionTrackingInputStream(new BufferedInputStream(
new FileInputStream(new File(inputFile))));
Files.newInputStream(Paths.get(inputFile))));
in = new DataInputStream(tracker);
int imageVersionFile = findImageVersion(in);

View File

@ -17,9 +17,10 @@
*/
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.file.Files;
import java.nio.file.Paths;
import com.google.common.base.Charsets;
@ -59,7 +60,8 @@ abstract class TextWriterImageVisitor extends ImageVisitor {
throws IOException {
super();
this.printToScreen = printToScreen;
fw = new OutputStreamWriter(new FileOutputStream(filename), Charsets.UTF_8);
fw = new OutputStreamWriter(Files.newOutputStream(Paths.get(filename)),
Charsets.UTF_8);
okToWrite = true;
}

View File

@ -19,11 +19,11 @@ package org.apache.hadoop.hdfs.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.util.regex.Matcher;
@ -74,8 +74,8 @@ public abstract class MD5FileUtils {
*/
private static Matcher readStoredMd5(File md5File) throws IOException {
BufferedReader reader =
new BufferedReader(new InputStreamReader(new FileInputStream(
md5File), Charsets.UTF_8));
new BufferedReader(new InputStreamReader(
Files.newInputStream(md5File.toPath()), Charsets.UTF_8));
String md5Line;
try {
md5Line = reader.readLine();
@ -125,7 +125,7 @@ public abstract class MD5FileUtils {
* Read dataFile and compute its MD5 checksum.
*/
public static MD5Hash computeMd5ForFile(File dataFile) throws IOException {
InputStream in = new FileInputStream(dataFile);
InputStream in = Files.newInputStream(dataFile.toPath());
try {
MessageDigest digester = MD5Hash.getDigester();
DigestInputStream dis = new DigestInputStream(in, digester);

View File

@ -25,12 +25,12 @@ import org.apache.hadoop.io.IOUtils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
@ -63,7 +63,7 @@ public class RegexCopyFilter extends CopyFilter {
public void initialize() {
BufferedReader reader = null;
try {
InputStream is = new FileInputStream(filtersFile);
InputStream is = Files.newInputStream(filtersFile.toPath());
reader = new BufferedReader(new InputStreamReader(is,
Charset.forName("UTF-8")));
String line;