MAPREDUCE-5177. Use common utils FileUtil#setReadable/Writable/Executable & FileUtil#canRead/Write/Execute. Contributed by Ivan Mitic.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1477403 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2013-04-29 23:00:39 +00:00
parent e2091275dc
commit 8f7ce62085
4 changed files with 8 additions and 3 deletions

View File

@ -159,6 +159,9 @@ Trunk (Unreleased)
HADOOP-9372. Fix bad timeout annotations on tests.
(Arpit Agarwal via suresh)
MAPREDUCE-5177. Use common utils FileUtil#setReadable/Writable/Executable &
FileUtil#canRead/Write/Execute. (Ivan Mitic via suresh)
Release 2.0.5-beta - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -113,7 +113,7 @@ class Application<K1 extends WritableComparable, V1 extends Writable,
cmd.add(interpretor);
}
String executable = DistributedCache.getLocalCacheFiles(conf)[0].toString();
if (!new File(executable).canExecute()) {
if (!FileUtil.canExecute(new File(executable))) {
// LinuxTaskController sets +x permissions on all distcache files already.
// In case of DefaultTaskController, set permissions here.
FileUtil.chmod(executable, "u+x");

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.streaming;
import java.io.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FileUtil;
/**
* Maps a relative pathname to an absolute pathname using the PATH environment.
@ -79,7 +80,7 @@ public class PathFinder {
f = new File(entry + fileSep + filename);
}
// see if the filename matches and we can read it
if (f.isFile() && f.canRead()) {
if (f.isFile() && FileUtil.canRead(f)) {
return f;
}
classvalue = classvalue.substring(val + 1).trim();

View File

@ -46,6 +46,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.mapred.FileInputFormat;
@ -394,7 +395,7 @@ public class StreamJob implements Tool {
throws IllegalArgumentException {
for (String file : values) {
File f = new File(file);
if (!f.canRead()) {
if (!FileUtil.canRead(f)) {
fail("File: " + f.getAbsolutePath()
+ " does not exist, or is not readable.");
}