HADOOP-13427. Eliminate needless uses of FileSystem#{exists(), isFile(), isDirectory()}. Contributed by Steve Loughran and Mingliang Liu

(cherry picked from commit 5af572b644)
This commit is contained in:
Mingliang Liu 2016-11-09 14:48:56 -08:00
parent 260f3a9dc9
commit 4e0fcff8ab
37 changed files with 171 additions and 184 deletions

View File

@ -36,6 +36,7 @@ import com.google.common.annotations.VisibleForTesting;
import javax.crypto.spec.SecretKeySpec; import javax.crypto.spec.SecretKeySpec;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.ObjectInputStream; import java.io.ObjectInputStream;
import java.io.ObjectOutputStream; import java.io.ObjectOutputStream;
@ -167,9 +168,9 @@ public class JavaKeyStoreProvider extends KeyProvider {
// rewrite the keystore in flush() // rewrite the keystore in flush()
permissions = perm; permissions = perm;
} catch (KeyStoreException e) { } catch (KeyStoreException e) {
throw new IOException("Can't create keystore", e); throw new IOException("Can't create keystore: " + e, e);
} catch (GeneralSecurityException e) { } catch (GeneralSecurityException e) {
throw new IOException("Can't load keystore " + path, e); throw new IOException("Can't load keystore " + path + " : " + e , e);
} }
} }
@ -190,9 +191,7 @@ public class JavaKeyStoreProvider extends KeyProvider {
try { try {
perm = loadFromPath(path, password); perm = loadFromPath(path, password);
// Remove _OLD if exists // Remove _OLD if exists
if (fs.exists(backupPath)) { fs.delete(backupPath, true);
fs.delete(backupPath, true);
}
LOG.debug("KeyStore loaded successfully !!"); LOG.debug("KeyStore loaded successfully !!");
} catch (IOException ioe) { } catch (IOException ioe) {
// If file is corrupted for some reason other than // If file is corrupted for some reason other than
@ -260,9 +259,7 @@ public class JavaKeyStoreProvider extends KeyProvider {
LOG.debug(String.format("KeyStore loaded successfully from '%s'!!", LOG.debug(String.format("KeyStore loaded successfully from '%s'!!",
pathToLoad)); pathToLoad));
} }
if (fs.exists(pathToDelete)) { fs.delete(pathToDelete, true);
fs.delete(pathToDelete, true);
}
} catch (IOException e) { } catch (IOException e) {
// Check for password issue : don't want to trash file due // Check for password issue : don't want to trash file due
// to wrong password // to wrong password
@ -539,13 +536,15 @@ public class JavaKeyStoreProvider extends KeyProvider {
return; return;
} }
// Might exist if a backup has been restored etc. // Might exist if a backup has been restored etc.
if (fs.exists(newPath)) { try {
renameOrFail(newPath, new Path(newPath.toString() renameOrFail(newPath, new Path(newPath.toString()
+ "_ORPHANED_" + System.currentTimeMillis())); + "_ORPHANED_" + System.currentTimeMillis()));
} catch (FileNotFoundException ignored) {
} }
if (fs.exists(oldPath)) { try {
renameOrFail(oldPath, new Path(oldPath.toString() renameOrFail(oldPath, new Path(oldPath.toString()
+ "_ORPHANED_" + System.currentTimeMillis())); + "_ORPHANED_" + System.currentTimeMillis()));
} catch (FileNotFoundException ignored) {
} }
// put all of the updates into the keystore // put all of the updates into the keystore
for(Map.Entry<String, Metadata> entry: cache.entrySet()) { for(Map.Entry<String, Metadata> entry: cache.entrySet()) {
@ -601,9 +600,7 @@ public class JavaKeyStoreProvider extends KeyProvider {
// Rename _NEW to CURRENT // Rename _NEW to CURRENT
renameOrFail(newPath, path); renameOrFail(newPath, path);
// Delete _OLD // Delete _OLD
if (fs.exists(oldPath)) { fs.delete(oldPath, true);
fs.delete(oldPath, true);
}
} }
protected void writeToNew(Path newPath) throws IOException { protected void writeToNew(Path newPath) throws IOException {
@ -623,12 +620,12 @@ public class JavaKeyStoreProvider extends KeyProvider {
protected boolean backupToOld(Path oldPath) protected boolean backupToOld(Path oldPath)
throws IOException { throws IOException {
boolean fileExisted = false; try {
if (fs.exists(path)) {
renameOrFail(path, oldPath); renameOrFail(path, oldPath);
fileExisted = true; return true;
} catch (FileNotFoundException e) {
return false;
} }
return fileExisted;
} }
private void revertFromOld(Path oldPath, boolean fileExisted) private void revertFromOld(Path oldPath, boolean fileExisted)

View File

@ -22,6 +22,7 @@ import java.io.BufferedInputStream;
import java.io.BufferedOutputStream; import java.io.BufferedOutputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
@ -326,14 +327,15 @@ public class FileUtil {
return copy(srcFS, srcs[0], dstFS, dst, deleteSource, overwrite, conf); return copy(srcFS, srcs[0], dstFS, dst, deleteSource, overwrite, conf);
// Check if dest is directory // Check if dest is directory
if (!dstFS.exists(dst)) { try {
throw new IOException("`" + dst +"': specified destination directory " +
"does not exist");
} else {
FileStatus sdst = dstFS.getFileStatus(dst); FileStatus sdst = dstFS.getFileStatus(dst);
if (!sdst.isDirectory()) if (!sdst.isDirectory())
throw new IOException("copying multiple files, but last argument `" + throw new IOException("copying multiple files, but last argument `" +
dst + "' is not a directory"); dst + "' is not a directory");
} catch (FileNotFoundException e) {
throw new IOException(
"`" + dst + "': specified destination directory " +
"does not exist", e);
} }
for (Path src : srcs) { for (Path src : srcs) {
@ -518,8 +520,13 @@ public class FileUtil {
private static Path checkDest(String srcName, FileSystem dstFS, Path dst, private static Path checkDest(String srcName, FileSystem dstFS, Path dst,
boolean overwrite) throws IOException { boolean overwrite) throws IOException {
if (dstFS.exists(dst)) { FileStatus sdst;
FileStatus sdst = dstFS.getFileStatus(dst); try {
sdst = dstFS.getFileStatus(dst);
} catch (FileNotFoundException e) {
sdst = null;
}
if (null != sdst) {
if (sdst.isDirectory()) { if (sdst.isDirectory()) {
if (null == srcName) { if (null == srcName) {
throw new IOException("Target " + dst + " is a directory"); throw new IOException("Target " + dst + " is a directory");

View File

@ -208,9 +208,7 @@ public class RawLocalFileSystem extends FileSystem {
@Override @Override
public FSDataInputStream open(Path f, int bufferSize) throws IOException { public FSDataInputStream open(Path f, int bufferSize) throws IOException {
if (!exists(f)) { getFileStatus(f);
throw new FileNotFoundException(f.toString());
}
return new FSDataInputStream(new BufferedFSInputStream( return new FSDataInputStream(new BufferedFSInputStream(
new LocalFSFileInputStream(f), bufferSize)); new LocalFSFileInputStream(f), bufferSize));
} }
@ -274,9 +272,6 @@ public class RawLocalFileSystem extends FileSystem {
@Override @Override
public FSDataOutputStream append(Path f, int bufferSize, public FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException { Progressable progress) throws IOException {
if (!exists(f)) {
throw new FileNotFoundException("File " + f + " not found");
}
FileStatus status = getFileStatus(f); FileStatus status = getFileStatus(f);
if (status.isDirectory()) { if (status.isDirectory()) {
throw new IOException("Cannot append to a diretory (=" + f + " )"); throw new IOException("Cannot append to a diretory (=" + f + " )");
@ -383,17 +378,18 @@ public class RawLocalFileSystem extends FileSystem {
// platforms (notably Windows) do not provide this behavior, so the Java API // platforms (notably Windows) do not provide this behavior, so the Java API
// call renameTo(dstFile) fails. Delete destination and attempt rename // call renameTo(dstFile) fails. Delete destination and attempt rename
// again. // again.
if (this.exists(dst)) { try {
FileStatus sdst = this.getFileStatus(dst); FileStatus sdst = this.getFileStatus(dst);
if (sdst.isDirectory() && dstFile.list().length == 0) { if (sdst.isDirectory() && dstFile.list().length == 0) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Deleting empty destination and renaming " + src + " to " + LOG.debug("Deleting empty destination and renaming " + src + " to " +
dst); dst);
} }
if (this.delete(dst, false) && srcFile.renameTo(dstFile)) { if (this.delete(dst, false) && srcFile.renameTo(dstFile)) {
return true; return true;
} }
} }
} catch (FileNotFoundException ignored) {
} }
return false; return false;
} }

View File

@ -121,9 +121,8 @@ public class TrashPolicyDefault extends TrashPolicy {
if (!path.isAbsolute()) // make path absolute if (!path.isAbsolute()) // make path absolute
path = new Path(fs.getWorkingDirectory(), path); path = new Path(fs.getWorkingDirectory(), path);
if (!fs.exists(path)) // check that path exists // check that path exists
throw new FileNotFoundException(path.toString()); fs.getFileStatus(path);
String qpath = fs.makeQualified(path).toString(); String qpath = fs.makeQualified(path).toString();
Path trashRoot = fs.getTrashRoot(path); Path trashRoot = fs.getTrashRoot(path);

View File

@ -41,6 +41,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -328,9 +329,7 @@ public class GenericOptionsParser {
// check if the local file exists // check if the local file exists
FileSystem localFs = FileSystem.getLocal(conf); FileSystem localFs = FileSystem.getLocal(conf);
Path p = localFs.makeQualified(new Path(fileName)); Path p = localFs.makeQualified(new Path(fileName));
if (!localFs.exists(p)) { localFs.getFileStatus(p);
throw new FileNotFoundException("File "+fileName+" does not exist.");
}
if(LOG.isDebugEnabled()) { if(LOG.isDebugEnabled()) {
LOG.debug("setting conf tokensFile: " + fileName); LOG.debug("setting conf tokensFile: " + fileName);
} }
@ -437,9 +436,7 @@ public class GenericOptionsParser {
if (pathURI.getScheme() == null) { if (pathURI.getScheme() == null) {
//default to the local file system //default to the local file system
//check if the file exists or not first //check if the file exists or not first
if (!localFs.exists(path)) { localFs.getFileStatus(path);
throw new FileNotFoundException("File " + tmp + " does not exist.");
}
if (isWildcard) { if (isWildcard) {
expandWildcard(finalPaths, path, localFs); expandWildcard(finalPaths, path, localFs);
} else { } else {
@ -452,9 +449,8 @@ public class GenericOptionsParser {
// these files to the file system ResourceManager is running // these files to the file system ResourceManager is running
// on. // on.
FileSystem fs = path.getFileSystem(conf); FileSystem fs = path.getFileSystem(conf);
if (!fs.exists(path)) { // existence check
throw new FileNotFoundException("File " + tmp + " does not exist."); fs.getFileStatus(path);
}
if (isWildcard) { if (isWildcard) {
expandWildcard(finalPaths, path, fs); expandWildcard(finalPaths, path, fs);
} else { } else {
@ -476,7 +472,8 @@ public class GenericOptionsParser {
private void expandWildcard(List<String> finalPaths, Path path, FileSystem fs) private void expandWildcard(List<String> finalPaths, Path path, FileSystem fs)
throws IOException { throws IOException {
if (!fs.isDirectory(path)) { FileStatus status = fs.getFileStatus(path);
if (!status.isDirectory()) {
throw new FileNotFoundException(path + " is not a directory."); throw new FileNotFoundException(path + " is not a directory.");
} }
// get all the jars in the directory // get all the jars in the directory

View File

@ -2526,8 +2526,9 @@ public class DistributedFileSystem extends FileSystem {
} else { } else {
Path userTrash = new Path(ezTrashRoot, System.getProperty( Path userTrash = new Path(ezTrashRoot, System.getProperty(
"user.name")); "user.name"));
if (exists(userTrash)) { try {
ret.add(getFileStatus(userTrash)); ret.add(getFileStatus(userTrash));
} catch (FileNotFoundException ignored) {
} }
} }
} }

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockStoragePolicySpi; import org.apache.hadoop.fs.BlockStoragePolicySpi;
import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.fs.FileEncryptionInfo;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -476,10 +477,10 @@ public class HdfsAdmin {
Path trashPath = new Path(ez.getPath(), FileSystem.TRASH_PREFIX); Path trashPath = new Path(ez.getPath(), FileSystem.TRASH_PREFIX);
if (dfs.exists(trashPath)) { try {
FileStatus trashFileStatus = dfs.getFileStatus(trashPath);
String errMessage = "Will not provision new trash directory for " + String errMessage = "Will not provision new trash directory for " +
"encryption zone " + ez.getPath() + ". Path already exists."; "encryption zone " + ez.getPath() + ". Path already exists.";
FileStatus trashFileStatus = dfs.getFileStatus(trashPath);
if (!trashFileStatus.isDirectory()) { if (!trashFileStatus.isDirectory()) {
errMessage += "\r\n" + errMessage += "\r\n" +
"Warning: " + trashPath.toString() + " is not a directory"; "Warning: " + trashPath.toString() + " is not a directory";
@ -489,7 +490,9 @@ public class HdfsAdmin {
"Warning: the permission of " + "Warning: the permission of " +
trashPath.toString() + " is not " + TRASH_PERMISSION; trashPath.toString() + " is not " + TRASH_PERMISSION;
} }
throw new IOException(errMessage); throw new FileAlreadyExistsException(errMessage);
} catch (FileNotFoundException ignored) {
// no trash path
} }
// Update the permission bits // Update the permission bits

View File

@ -1374,9 +1374,7 @@ public class JobHistoryEventHandler extends AbstractService
if (stagingDirFS.exists(fromPath)) { if (stagingDirFS.exists(fromPath)) {
LOG.info("Copying " + fromPath.toString() + " to " + toPath.toString()); LOG.info("Copying " + fromPath.toString() + " to " + toPath.toString());
// TODO temporarily removing the existing dst // TODO temporarily removing the existing dst
if (doneDirFS.exists(toPath)) { doneDirFS.delete(toPath, true);
doneDirFS.delete(toPath, true);
}
boolean copied = FileUtil.copy(stagingDirFS, fromPath, doneDirFS, toPath, boolean copied = FileUtil.copy(stagingDirFS, fromPath, doneDirFS, toPath,
false, getConfig()); false, getConfig());
@ -1389,10 +1387,6 @@ public class JobHistoryEventHandler extends AbstractService
} }
} }
boolean pathExists(FileSystem fileSys, Path path) throws IOException {
return fileSys.exists(path);
}
private String getTempFileName(String srcFile) { private String getTempFileName(String srcFile) {
return srcFile + "_tmp"; return srcFile + "_tmp";
} }

View File

@ -435,9 +435,11 @@ class JobResourceUploader {
LOG.debug("default FileSystem: " + jtFs.getUri()); LOG.debug("default FileSystem: " + jtFs.getUri());
FsPermission mapredSysPerms = FsPermission mapredSysPerms =
new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION);
if (!jtFs.exists(submitJobDir)) { try {
jtFs.getFileStatus(submitJobDir);
} catch (FileNotFoundException e) {
throw new IOException("Cannot find job submission directory! " throw new IOException("Cannot find job submission directory! "
+ "It should just be created, so something wrong here."); + "It should just be created, so something wrong here.", e);
} }
Path fileDir = JobSubmissionFiles.getJobLog4jFile(submitJobDir); Path fileDir = JobSubmissionFiles.getJobLog4jFile(submitJobDir);
@ -488,9 +490,7 @@ class JobResourceUploader {
if (pathURI.getScheme() == null) { if (pathURI.getScheme() == null) {
// default to the local file system // default to the local file system
// check if the file exists or not first // check if the file exists or not first
if (!localFs.exists(path)) { localFs.getFileStatus(path);
throw new FileNotFoundException("File " + file + " does not exist.");
}
finalPath = finalPath =
path.makeQualified(localFs.getUri(), localFs.getWorkingDirectory()) path.makeQualified(localFs.getUri(), localFs.getWorkingDirectory())
.toString(); .toString();
@ -500,9 +500,7 @@ class JobResourceUploader {
// these files to the file system ResourceManager is running // these files to the file system ResourceManager is running
// on. // on.
FileSystem fs = path.getFileSystem(conf); FileSystem fs = path.getFileSystem(conf);
if (!fs.exists(path)) { fs.getFileStatus(path);
throw new FileNotFoundException("File " + file + " does not exist.");
}
finalPath = finalPath =
path.makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString(); path.makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString();
} }

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce; package org.apache.hadoop.mapreduce;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@ -130,7 +131,7 @@ public class JobSubmissionFiles {
Path stagingArea = cluster.getStagingAreaDir(); Path stagingArea = cluster.getStagingAreaDir();
FileSystem fs = stagingArea.getFileSystem(conf); FileSystem fs = stagingArea.getFileSystem(conf);
UserGroupInformation currentUser = realUser.getCurrentUser(); UserGroupInformation currentUser = realUser.getCurrentUser();
if (fs.exists(stagingArea)) { try {
FileStatus fsStatus = fs.getFileStatus(stagingArea); FileStatus fsStatus = fs.getFileStatus(stagingArea);
String fileOwner = fsStatus.getOwner(); String fileOwner = fsStatus.getOwner();
if (!(fileOwner.equals(currentUser.getShortUserName()) || fileOwner if (!(fileOwner.equals(currentUser.getShortUserName()) || fileOwner
@ -156,7 +157,7 @@ public class JobSubmissionFiles {
"to correct value " + JOB_DIR_PERMISSION); "to correct value " + JOB_DIR_PERMISSION);
fs.setPermission(stagingArea, JOB_DIR_PERMISSION); fs.setPermission(stagingArea, JOB_DIR_PERMISSION);
} }
} else { } catch (FileNotFoundException e) {
fs.mkdirs(stagingArea, new FsPermission(JOB_DIR_PERMISSION)); fs.mkdirs(stagingArea, new FsPermission(JOB_DIR_PERMISSION));
} }
return stagingArea; return stagingArea;

View File

@ -674,10 +674,9 @@ public class FileOutputCommitter extends OutputCommitter {
if (algorithmVersion == 1) { if (algorithmVersion == 1) {
if (fs.exists(previousCommittedTaskPath)) { if (fs.exists(previousCommittedTaskPath)) {
Path committedTaskPath = getCommittedTaskPath(context); Path committedTaskPath = getCommittedTaskPath(context);
if (fs.exists(committedTaskPath)) { if (!fs.delete(committedTaskPath, true) &&
if (!fs.delete(committedTaskPath, true)) { fs.exists(committedTaskPath)) {
throw new IOException("Could not delete "+committedTaskPath); throw new IOException("Could not delete " + committedTaskPath);
}
} }
//Rename can fail if the parent directory does not yet exist. //Rename can fail if the parent directory does not yet exist.
Path committedParent = committedTaskPath.getParent(); Path committedParent = committedTaskPath.getParent();
@ -693,11 +692,12 @@ public class FileOutputCommitter extends OutputCommitter {
// essentially a no-op, but for backwards compatibility // essentially a no-op, but for backwards compatibility
// after upgrade to the new fileOutputCommitter, // after upgrade to the new fileOutputCommitter,
// check if there are any output left in committedTaskPath // check if there are any output left in committedTaskPath
if (fs.exists(previousCommittedTaskPath)) { try {
FileStatus from = fs.getFileStatus(previousCommittedTaskPath);
LOG.info("Recovering task for upgrading scenario, moving files from " LOG.info("Recovering task for upgrading scenario, moving files from "
+ previousCommittedTaskPath + " to " + outputPath); + previousCommittedTaskPath + " to " + outputPath);
FileStatus from = fs.getFileStatus(previousCommittedTaskPath);
mergePaths(fs, from, outputPath); mergePaths(fs, from, outputPath);
} catch (FileNotFoundException ignored) {
} }
LOG.info("Done recovering task " + attemptId); LOG.info("Done recovering task " + attemptId);
} }

View File

@ -97,7 +97,7 @@ public class PartialFileOutputCommitter
for (int i = 0; i < taid.getId(); ++i) { for (int i = 0; i < taid.getId(); ++i) {
TaskAttemptID oldId = new TaskAttemptID(tid, i); TaskAttemptID oldId = new TaskAttemptID(tid, i);
Path pTask = new Path(pCommit, oldId.toString()); Path pTask = new Path(pCommit, oldId.toString());
if (fs.exists(pTask) && !fs.delete(pTask, true)) { if (!fs.delete(pTask, true) && fs.exists(pTask)) {
throw new IOException("Failed to delete " + pTask); throw new IOException("Failed to delete " + pTask);
} }
} }

View File

@ -322,10 +322,8 @@ public class InputSampler<K,V> extends Configured implements Tool {
Arrays.sort(samples, comparator); Arrays.sort(samples, comparator);
Path dst = new Path(TotalOrderPartitioner.getPartitionFile(conf)); Path dst = new Path(TotalOrderPartitioner.getPartitionFile(conf));
FileSystem fs = dst.getFileSystem(conf); FileSystem fs = dst.getFileSystem(conf);
if (fs.exists(dst)) { fs.delete(dst, false);
fs.delete(dst, false); SequenceFile.Writer writer = SequenceFile.createWriter(fs,
}
SequenceFile.Writer writer = SequenceFile.createWriter(fs,
conf, dst, job.getMapOutputKeyClass(), NullWritable.class); conf, dst, job.getMapOutputKeyClass(), NullWritable.class);
NullWritable nullValue = NullWritable.get(); NullWritable nullValue = NullWritable.get();
float stepSize = samples.length / (float) numPartitions; float stepSize = samples.length / (float) numPartitions;

View File

@ -80,7 +80,7 @@ public class TestPreemptableFileOutputCommitter {
foc.cleanUpPartialOutputForTask(context); foc.cleanUpPartialOutputForTask(context);
verify(fs).delete(eq(p0), eq(true)); verify(fs).delete(eq(p0), eq(true));
verify(fs).delete(eq(p1), eq(true)); verify(fs).delete(eq(p1), eq(true));
verify(fs, never()).delete(eq(p3), eq(true)); verify(fs, times(1)).delete(eq(p3), eq(true));
verify(fs, never()).delete(eq(p2), eq(true)); verify(fs, never()).delete(eq(p2), eq(true));
} }

View File

@ -182,7 +182,7 @@ public class HistoryServerFileSystemStateStoreService
Path keyPath = new Path(tokenKeysStatePath, Path keyPath = new Path(tokenKeysStatePath,
TOKEN_MASTER_KEY_FILE_PREFIX + key.getKeyId()); TOKEN_MASTER_KEY_FILE_PREFIX + key.getKeyId());
if (fs.exists(keyPath)) { if (fs.exists(keyPath)) {
throw new IOException(keyPath + " already exists"); throw new FileAlreadyExistsException(keyPath + " already exists");
} }
ByteArrayOutputStream memStream = new ByteArrayOutputStream(); ByteArrayOutputStream memStream = new ByteArrayOutputStream();

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.examples.terasort; package org.apache.hadoop.examples.terasort;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -100,7 +101,7 @@ public class TeraOutputFormat extends FileOutputFormat<Text,Text> {
final FileSystem fs = outDir.getFileSystem(jobConf); final FileSystem fs = outDir.getFileSystem(jobConf);
if (fs.exists(outDir)) { try {
// existing output dir is considered empty iff its only content is the // existing output dir is considered empty iff its only content is the
// partition file. // partition file.
// //
@ -116,6 +117,7 @@ public class TeraOutputFormat extends FileOutputFormat<Text,Text> {
throw new FileAlreadyExistsException("Output directory " + outDir throw new FileAlreadyExistsException("Output directory " + outDir
+ " already exists"); + " already exists");
} }
} catch (FileNotFoundException ignored) {
} }
} }

View File

@ -178,9 +178,7 @@ public class HadoopArchiveLogs implements Tool {
} finally { } finally {
if (fs != null) { if (fs != null) {
// Cleanup working directory // Cleanup working directory
if (fs.exists(workingDir)) { fs.delete(workingDir, true);
fs.delete(workingDir, true);
}
fs.close(); fs.close();
} }
} }

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.tools;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.net.URLEncoder; import java.net.URLEncoder;
@ -149,9 +148,7 @@ public class HadoopArchives implements Tool {
IOException { IOException {
for (Path p : paths) { for (Path p : paths) {
FileSystem fs = p.getFileSystem(conf); FileSystem fs = p.getFileSystem(conf);
if (!fs.exists(p)) { fs.getFileStatus(p);
throw new FileNotFoundException("Source " + p + " does not exist.");
}
} }
} }
@ -619,9 +616,7 @@ public class HadoopArchives implements Tool {
try { try {
destFs = tmpOutput.getFileSystem(conf); destFs = tmpOutput.getFileSystem(conf);
//this was a stale copy //this was a stale copy
if (destFs.exists(tmpOutput)) { destFs.delete(tmpOutput, false);
destFs.delete(tmpOutput, false);
}
partStream = destFs.create(tmpOutput, false, conf.getInt("io.file.buffer.size", 4096), partStream = destFs.create(tmpOutput, false, conf.getInt("io.file.buffer.size", 4096),
destFs.getDefaultReplication(tmpOutput), blockSize); destFs.getDefaultReplication(tmpOutput), blockSize);
} catch(IOException ie) { } catch(IOException ie) {
@ -747,12 +742,8 @@ public class HadoopArchives implements Tool {
replication = conf.getInt(HAR_REPLICATION_LABEL, 3); replication = conf.getInt(HAR_REPLICATION_LABEL, 3);
try { try {
fs = masterIndex.getFileSystem(conf); fs = masterIndex.getFileSystem(conf);
if (fs.exists(masterIndex)) { fs.delete(masterIndex, false);
fs.delete(masterIndex, false); fs.delete(index, false);
}
if (fs.exists(index)) {
fs.delete(index, false);
}
indexStream = fs.create(index); indexStream = fs.create(index);
outStream = fs.create(masterIndex); outStream = fs.create(masterIndex);
String version = VERSION + " \n"; String version = VERSION + " \n";

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.azure; package org.apache.hadoop.fs.azure;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@ -139,12 +140,15 @@ public class WasbFsck extends Configured implements Tool {
if (p == null) { if (p == null) {
return true; return true;
} }
if (!fs.exists(p)) { FileStatus status;
try {
status = fs.getFileStatus(p);
} catch (FileNotFoundException e) {
System.out.println("Path " + p + " does not exist!"); System.out.println("Path " + p + " does not exist!");
return true; return true;
} }
if (fs.isFile(p)) { if (status.isFile()) {
if (containsColon(p)) { if (containsColon(p)) {
System.out.println("Warning: file " + p + " has a colon in its name."); System.out.println("Warning: file " + p + " has a colon in its name.");
return false; return false;

View File

@ -328,9 +328,7 @@ class DistCpSync {
Arrays.sort(diffs, DiffInfo.targetComparator); Arrays.sort(diffs, DiffInfo.targetComparator);
for (DiffInfo diff : diffs) { for (DiffInfo diff : diffs) {
if (diff.getTarget() != null) { if (diff.getTarget() != null) {
if (!targetFs.exists(diff.getTarget().getParent())) { targetFs.mkdirs(diff.getTarget().getParent());
targetFs.mkdirs(diff.getTarget().getParent());
}
targetFs.rename(diff.getTmp(), diff.getTarget()); targetFs.rename(diff.getTmp(), diff.getTarget());
} }
} }

View File

@ -126,7 +126,13 @@ public class SimpleCopyListing extends CopyListing {
Path targetPath = options.getTargetPath(); Path targetPath = options.getTargetPath();
FileSystem targetFS = targetPath.getFileSystem(getConf()); FileSystem targetFS = targetPath.getFileSystem(getConf());
boolean targetIsFile = targetFS.isFile(targetPath); boolean targetExists = false;
boolean targetIsFile = false;
try {
targetIsFile = targetFS.getFileStatus(targetPath).isFile();
targetExists = true;
} catch (FileNotFoundException ignored) {
}
targetPath = targetFS.makeQualified(targetPath); targetPath = targetFS.makeQualified(targetPath);
final boolean targetIsReservedRaw = final boolean targetIsReservedRaw =
Path.getPathWithoutSchemeAndAuthority(targetPath).toString(). Path.getPathWithoutSchemeAndAuthority(targetPath).toString().
@ -147,7 +153,7 @@ public class SimpleCopyListing extends CopyListing {
} }
} }
if (options.shouldAtomicCommit() && targetFS.exists(targetPath)) { if (options.shouldAtomicCommit() && targetExists) {
throw new InvalidInputException("Target path for atomic-commit already exists: " + throw new InvalidInputException("Target path for atomic-commit already exists: " +
targetPath + ". Cannot atomic-commit to pre-existing target-path."); targetPath + ". Cannot atomic-commit to pre-existing target-path.");
} }
@ -448,7 +454,7 @@ public class SimpleCopyListing extends CopyListing {
&& !sourceStatus.isDirectory(); && !sourceStatus.isDirectory();
if (solitaryFile) { if (solitaryFile) {
if (targetFS.isFile(target) || !targetPathExists) { if (!targetPathExists || targetFS.isFile(target)) {
return sourceStatus.getPath(); return sourceStatus.getPath();
} else { } else {
return sourceStatus.getPath().getParent(); return sourceStatus.getPath().getParent();
@ -495,9 +501,7 @@ public class SimpleCopyListing extends CopyListing {
private SequenceFile.Writer getWriter(Path pathToListFile) throws IOException { private SequenceFile.Writer getWriter(Path pathToListFile) throws IOException {
FileSystem fs = pathToListFile.getFileSystem(getConf()); FileSystem fs = pathToListFile.getFileSystem(getConf());
if (fs.exists(pathToListFile)) { fs.delete(pathToListFile, false);
fs.delete(pathToListFile, false);
}
return SequenceFile.createWriter(getConf(), return SequenceFile.createWriter(getConf(),
SequenceFile.Writer.file(pathToListFile), SequenceFile.Writer.file(pathToListFile),
SequenceFile.Writer.keyClass(Text.class), SequenceFile.Writer.keyClass(Text.class),

View File

@ -279,8 +279,8 @@ public class CopyCommitter extends FileOutputCommitter {
if (srcAvailable && trgtRelPath.equals(srcRelPath)) continue; if (srcAvailable && trgtRelPath.equals(srcRelPath)) continue;
// Target doesn't exist at source. Delete. // Target doesn't exist at source. Delete.
boolean result = (!targetFS.exists(trgtFileStatus.getPath()) || boolean result = targetFS.delete(trgtFileStatus.getPath(), true)
targetFS.delete(trgtFileStatus.getPath(), true)); || !targetFS.exists(trgtFileStatus.getPath());
if (result) { if (result) {
LOG.info("Deleted " + trgtFileStatus.getPath() + " - Missing at source"); LOG.info("Deleted " + trgtFileStatus.getPath() + " - Missing at source");
deletedEntries++; deletedEntries++;

View File

@ -113,8 +113,9 @@ public class CopyMapper extends Mapper<Text, CopyListingFileStatus, Text, Text>
DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH)); DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH));
targetFS = targetFinalPath.getFileSystem(conf); targetFS = targetFinalPath.getFileSystem(conf);
if (targetFS.exists(targetFinalPath) && targetFS.isFile(targetFinalPath)) { try {
overWrite = true; // When target is an existing file, overwrite it. overWrite = overWrite || targetFS.getFileStatus(targetFinalPath).isFile();
} catch (FileNotFoundException ignored) {
} }
if (conf.get(DistCpConstants.CONF_LABEL_SSL_CONF) != null) { if (conf.get(DistCpConstants.CONF_LABEL_SSL_CONF) != null) {

View File

@ -140,7 +140,7 @@ public class RetriableFileCopyCommand extends RetriableCommand {
// note that for append case, it is possible that we append partial data // note that for append case, it is possible that we append partial data
// and then fail. In that case, for the next retry, we either reuse the // and then fail. In that case, for the next retry, we either reuse the
// partial appended data if it is good or we overwrite the whole file // partial appended data if it is good or we overwrite the whole file
if (!toAppend && targetFS.exists(targetPath)) { if (!toAppend) {
targetFS.delete(targetPath, false); targetFS.delete(targetPath, false);
} }
} }

View File

@ -356,9 +356,7 @@ public class DistCpUtils {
CopyListingFileStatus.class, conf); CopyListingFileStatus.class, conf);
Path output = new Path(sourceListing.toString() + "_sorted"); Path output = new Path(sourceListing.toString() + "_sorted");
if (fs.exists(output)) { fs.delete(output, false);
fs.delete(output, false);
}
sorter.sort(sourceListing, output); sorter.sort(sourceListing, output);
return output; return output;

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.tools;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.Charset; import java.nio.charset.Charset;
@ -68,11 +67,10 @@ abstract class DistTool implements org.apache.hadoop.util.Tool {
List<IOException> ioes = new ArrayList<IOException>(); List<IOException> ioes = new ArrayList<IOException>();
for(Path p : srcs) { for(Path p : srcs) {
try { try {
if (!p.getFileSystem(conf).exists(p)) { p.getFileSystem(conf).getFileStatus(p);
ioes.add(new FileNotFoundException("Source "+p+" does not exist.")); } catch(IOException e) {
} ioes.add(e);
} }
catch(IOException e) {ioes.add(e);}
} }
if (!ioes.isEmpty()) { if (!ioes.isEmpty()) {
throw new InvalidInputException(ioes); throw new InvalidInputException(ioes);
@ -113,4 +111,4 @@ abstract class DistTool implements org.apache.hadoop.util.Tool {
public static final int ERROR_CODE = -2; public static final int ERROR_CODE = -2;
DuplicationException(String message) {super(message);} DuplicationException(String message) {super(message);}
} }
} }

View File

@ -489,10 +489,13 @@ public class SwiftTestUtils extends org.junit.Assert {
*/ */
public static void assertPathExists(FileSystem fileSystem, String message, public static void assertPathExists(FileSystem fileSystem, String message,
Path path) throws IOException { Path path) throws IOException {
if (!fileSystem.exists(path)) { try {
fileSystem.getFileStatus(path);
} catch (FileNotFoundException e) {
//failure, report it //failure, report it
fail(message + ": not found " + path + " in " + path.getParent()); throw (IOException)new FileNotFoundException(message + ": not found "
ls(fileSystem, path.getParent()); + path + " in " + path.getParent() + ": " + e + " -- "
+ ls(fileSystem, path.getParent())).initCause(e);
} }
} }

View File

@ -21,6 +21,7 @@ import java.io.DataInput;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.text.DateFormat; import java.text.DateFormat;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
@ -186,27 +187,23 @@ public class StatePool {
if (reload) { if (reload) {
// Reload persisted entries // Reload persisted entries
Path stateFilename = new Path(persistDirPath, COMMIT_STATE_FILENAME); Path stateFilename = new Path(persistDirPath, COMMIT_STATE_FILENAME);
FileSystem fs = stateFilename.getFileSystem(conf); if (!reloadState(stateFilename, conf)) {
if (fs.exists(stateFilename)) { throw new RuntimeException("No latest state persist directory found!"
reloadState(stateFilename, conf);
} else {
throw new RuntimeException("No latest state persist directory found!"
+ " Disable persistence and run."); + " Disable persistence and run.");
} }
} }
} }
private void reloadState(Path stateFile, Configuration conf) private boolean reloadState(Path stateFile, Configuration configuration)
throws Exception { throws Exception {
FileSystem fs = stateFile.getFileSystem(conf); FileSystem fs = stateFile.getFileSystem(configuration);
if (fs.exists(stateFile)) { try (FSDataInputStream in = fs.open(stateFile)) {
System.out.println("Reading state from " + stateFile.toString()); System.out.println("Reading state from " + stateFile.toString());
FSDataInputStream in = fs.open(stateFile);
read(in); read(in);
in.close(); return true;
} else { } catch (FileNotFoundException e) {
System.out.println("No state information found for " + stateFile); System.out.println("No state information found for " + stateFile);
return false;
} }
} }
@ -342,4 +339,4 @@ public class StatePool {
//TODO Should we do a clone? //TODO Should we do a clone?
this.pool = states; this.pool = states;
} }
} }

View File

@ -74,9 +74,7 @@ public class FileSystemBasedConfigurationProvider
new Path(bootstrapConf.get(YarnConfiguration.FS_BASED_RM_CONF_STORE, new Path(bootstrapConf.get(YarnConfiguration.FS_BASED_RM_CONF_STORE,
YarnConfiguration.DEFAULT_FS_BASED_RM_CONF_STORE)); YarnConfiguration.DEFAULT_FS_BASED_RM_CONF_STORE));
fs = configDir.getFileSystem(bootstrapConf); fs = configDir.getFileSystem(bootstrapConf);
if (!fs.exists(configDir)) { fs.mkdirs(configDir);
fs.mkdirs(configDir);
}
} }
@Override @Override

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.yarn.client.api.impl; package org.apache.hadoop.yarn.client.api.impl;
import java.io.Closeable; import java.io.Closeable;
import java.io.FileNotFoundException;
import java.io.Flushable; import java.io.Flushable;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
@ -121,10 +120,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{
.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_ACTIVE_DIR_DEFAULT)); .TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_ACTIVE_DIR_DEFAULT));
fs = FileSystem.newInstance(activePath.toUri(), fsConf); fs = FileSystem.newInstance(activePath.toUri(), fsConf);
if (!fs.exists(activePath)) { // raise FileNotFoundException if the path is not found
throw new FileNotFoundException(activePath + " does not exist"); fs.getFileStatus(activePath);
}
summaryEntityTypes = new HashSet<String>( summaryEntityTypes = new HashSet<String>(
conf.getStringCollection(YarnConfiguration conf.getStringCollection(YarnConfiguration
.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SUMMARY_ENTITY_TYPES)); .TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SUMMARY_ENTITY_TYPES));
@ -992,9 +989,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{
Path appDir = createApplicationDir(appAttemptId.getApplicationId()); Path appDir = createApplicationDir(appAttemptId.getApplicationId());
Path attemptDir = new Path(appDir, appAttemptId.toString()); Path attemptDir = new Path(appDir, appAttemptId.toString());
if (!fs.exists(attemptDir)) { if (FileSystem.mkdirs(fs, attemptDir,
FileSystem.mkdirs(fs, attemptDir, new FsPermission( new FsPermission(APP_LOG_DIR_PERMISSIONS))) {
APP_LOG_DIR_PERMISSIONS));
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("New attempt directory created - " + attemptDir); LOG.debug("New attempt directory created - " + attemptDir);
} }
@ -1005,9 +1001,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{
private Path createApplicationDir(ApplicationId appId) throws IOException { private Path createApplicationDir(ApplicationId appId) throws IOException {
Path appDir = Path appDir =
new Path(activePath, appId.toString()); new Path(activePath, appId.toString());
if (!fs.exists(appDir)) { if (FileSystem.mkdirs(fs, appDir,
FileSystem.mkdirs(fs, appDir, new FsPermission(APP_LOG_DIR_PERMISSIONS))) {
new FsPermission(APP_LOG_DIR_PERMISSIONS));
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("New app directory created - " + appDir); LOG.debug("New app directory created - " + appDir);
} }

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.nodelabels; package org.apache.hadoop.yarn.nodelabels;
import java.io.EOFException; import java.io.EOFException;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
@ -83,9 +84,7 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
setFileSystem(conf); setFileSystem(conf);
// mkdir of root dir path // mkdir of root dir path
if (!fs.exists(fsWorkingPath)) { fs.mkdirs(fsWorkingPath);
fs.mkdirs(fsWorkingPath);
}
} }
@Override @Override
@ -160,12 +159,15 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
throws IOException { throws IOException {
// If mirror.new exists, read from mirror.new, // If mirror.new exists, read from mirror.new,
FSDataInputStream is = null; FSDataInputStream is = null;
if (fs.exists(newMirrorPath)) { try {
is = fs.open(newMirrorPath); is = fs.open(newMirrorPath);
} else if (fs.exists(oldMirrorPath)) { } catch (FileNotFoundException e) {
is = fs.open(oldMirrorPath); try {
} is = fs.open(oldMirrorPath);
} catch (FileNotFoundException ignored) {
}
}
if (null != is) { if (null != is) {
List<NodeLabel> labels = new AddToClusterNodeLabelsRequestPBImpl( List<NodeLabel> labels = new AddToClusterNodeLabelsRequestPBImpl(
AddToClusterNodeLabelsRequestProto.parseDelimitedFrom(is)) AddToClusterNodeLabelsRequestProto.parseDelimitedFrom(is))
@ -209,8 +211,13 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
// Open and process editlog // Open and process editlog
editLogPath = new Path(fsWorkingPath, EDITLOG_FILENAME); editLogPath = new Path(fsWorkingPath, EDITLOG_FILENAME);
if (fs.exists(editLogPath)) { FSDataInputStream is;
FSDataInputStream is = fs.open(editLogPath); try {
is = fs.open(editLogPath);
} catch (FileNotFoundException e) {
is = null;
}
if (null != is) {
while (true) { while (true) {
try { try {
@ -255,6 +262,7 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
break; break;
} }
} }
is.close();
} }
// Serialize current mirror to mirror.writing // Serialize current mirror to mirror.writing

View File

@ -116,16 +116,12 @@ public class NonAppendableFSNodeLabelStore extends FileSystemNodeLabelsStore {
// Rename mirror.new.tmp to mirror.new (will remove .new if it's existed) // Rename mirror.new.tmp to mirror.new (will remove .new if it's existed)
Path newPath = new Path(fsWorkingPath, MIRROR_FILENAME + ".new"); Path newPath = new Path(fsWorkingPath, MIRROR_FILENAME + ".new");
if (fs.exists(newPath)) { fs.delete(newPath, false);
fs.delete(newPath, false);
}
fs.rename(newTmpPath, newPath); fs.rename(newTmpPath, newPath);
// Remove existing mirror and rename mirror.new to mirror // Remove existing mirror and rename mirror.new to mirror
Path mirrorPath = new Path(fsWorkingPath, MIRROR_FILENAME); Path mirrorPath = new Path(fsWorkingPath, MIRROR_FILENAME);
if (fs.exists(mirrorPath)) { fs.delete(mirrorPath, false);
fs.delete(mirrorPath, false);
}
fs.rename(newPath, mirrorPath); fs.rename(newPath, mirrorPath);
} finally { } finally {
readLock.unlock(); readLock.unlock();

View File

@ -348,10 +348,10 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
}; };
mockStore.setNodeLabelsManager(mgr); mockStore.setNodeLabelsManager(mgr);
mockStore.fs = mockFs; mockStore.fs = mockFs;
verifyMkdirsCount(mockStore, true, 0);
verifyMkdirsCount(mockStore, false, 1);
verifyMkdirsCount(mockStore, true, 1); verifyMkdirsCount(mockStore, true, 1);
verifyMkdirsCount(mockStore, false, 2); verifyMkdirsCount(mockStore, false, 2);
verifyMkdirsCount(mockStore, true, 3);
verifyMkdirsCount(mockStore, false, 4);
} }
private void verifyMkdirsCount(FileSystemNodeLabelsStore store, private void verifyMkdirsCount(FileSystemNodeLabelsStore store,

View File

@ -22,6 +22,7 @@ import java.io.DataInput;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -123,12 +124,7 @@ public class FileSystemApplicationHistoryStore extends AbstractService
rootDirPath = new Path(fsWorkingPath, ROOT_DIR_NAME); rootDirPath = new Path(fsWorkingPath, ROOT_DIR_NAME);
try { try {
fs = getFileSystem(fsWorkingPath, conf); fs = getFileSystem(fsWorkingPath, conf);
fs.mkdirs(rootDirPath, ROOT_DIR_UMASK);
if (!fs.isDirectory(rootDirPath)) {
fs.mkdirs(rootDirPath);
fs.setPermission(rootDirPath, ROOT_DIR_UMASK);
}
} catch (IOException e) { } catch (IOException e) {
LOG.error("Error when initializing FileSystemHistoryStorage", e); LOG.error("Error when initializing FileSystemHistoryStorage", e);
throw e; throw e;
@ -659,9 +655,11 @@ public class FileSystemApplicationHistoryStore extends AbstractService
private HistoryFileReader getHistoryFileReader(ApplicationId appId) private HistoryFileReader getHistoryFileReader(ApplicationId appId)
throws IOException { throws IOException {
Path applicationHistoryFile = new Path(rootDirPath, appId.toString()); Path applicationHistoryFile = new Path(rootDirPath, appId.toString());
if (!fs.exists(applicationHistoryFile)) { try {
throw new IOException("History file for application " + appId fs.getFileStatus(applicationHistoryFile);
+ " is not found"); } catch (FileNotFoundException e) {
throw (FileNotFoundException) new FileNotFoundException("History file for"
+ " application " + appId + " is not found: " + e).initCause(e);
} }
// The history file is still under writing // The history file is still under writing
if (outstandingWriters.containsKey(appId)) { if (outstandingWriters.containsKey(appId)) {

View File

@ -27,6 +27,7 @@ import org.junit.Assert;
import static org.mockito.Mockito.any; import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy; import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times; import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
@ -279,8 +280,8 @@ public class TestFileSystemApplicationHistoryStore extends
} }
// Make sure that directory creation was not attempted // Make sure that directory creation was not attempted
verify(fs, times(1)).isDirectory(any(Path.class)); verify(fs, never()).isDirectory(any(Path.class));
verify(fs, times(0)).mkdirs(any(Path.class)); verify(fs, times(1)).mkdirs(any(Path.class));
} }
@Test @Test
@ -301,7 +302,7 @@ public class TestFileSystemApplicationHistoryStore extends
} }
// Make sure that directory creation was attempted // Make sure that directory creation was attempted
verify(fs, times(1)).isDirectory(any(Path.class)); verify(fs, never()).isDirectory(any(Path.class));
verify(fs, times(1)).mkdirs(any(Path.class)); verify(fs, times(1)).mkdirs(any(Path.class));
} }
} }

View File

@ -192,10 +192,12 @@ class SharedCacheUploader implements Callable<Boolean> {
private void deleteTempFile(Path tempPath) { private void deleteTempFile(Path tempPath) {
try { try {
if (tempPath != null && fs.exists(tempPath)) { if (tempPath != null) {
fs.delete(tempPath, false); fs.delete(tempPath, false);
} }
} catch (IOException ignore) {} } catch (IOException ioe) {
LOG.debug("Exception received while deleting temp files", ioe);
}
} }
/** /**

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.sharedcachemanager.store; package org.apache.hadoop.yarn.server.sharedcachemanager.store;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
@ -189,11 +190,14 @@ public class InMemorySCMStore extends SCMStore {
conf.get(YarnConfiguration.SHARED_CACHE_ROOT, conf.get(YarnConfiguration.SHARED_CACHE_ROOT,
YarnConfiguration.DEFAULT_SHARED_CACHE_ROOT); YarnConfiguration.DEFAULT_SHARED_CACHE_ROOT);
Path root = new Path(location); Path root = new Path(location);
if (!fs.exists(root)) { try {
fs.getFileStatus(root);
} catch (FileNotFoundException e) {
String message = String message =
"The shared cache root directory " + location + " was not found"; "The shared cache root directory " + location + " was not found";
LOG.error(message); LOG.error(message);
throw new IOException(message); throw (IOException)new FileNotFoundException(message)
.initCause(e);
} }
int nestedLevel = SharedCacheUtil.getCacheDepth(conf); int nestedLevel = SharedCacheUtil.getCacheDepth(conf);