MAPREDUCE-5177. Use common utils FileUtil#setReadable/Writable/Executable & FileUtil#canRead/Write/Execute. Contributed by Ivan Mitic.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1477403 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
e2091275dc
commit
8f7ce62085
|
@ -159,6 +159,9 @@ Trunk (Unreleased)
|
||||||
HADOOP-9372. Fix bad timeout annotations on tests.
|
HADOOP-9372. Fix bad timeout annotations on tests.
|
||||||
(Arpit Agarwal via suresh)
|
(Arpit Agarwal via suresh)
|
||||||
|
|
||||||
|
MAPREDUCE-5177. Use common utils FileUtil#setReadable/Writable/Executable &
|
||||||
|
FileUtil#canRead/Write/Execute. (Ivan Mitic via suresh)
|
||||||
|
|
||||||
Release 2.0.5-beta - UNRELEASED
|
Release 2.0.5-beta - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -113,7 +113,7 @@ class Application<K1 extends WritableComparable, V1 extends Writable,
|
||||||
cmd.add(interpretor);
|
cmd.add(interpretor);
|
||||||
}
|
}
|
||||||
String executable = DistributedCache.getLocalCacheFiles(conf)[0].toString();
|
String executable = DistributedCache.getLocalCacheFiles(conf)[0].toString();
|
||||||
if (!new File(executable).canExecute()) {
|
if (!FileUtil.canExecute(new File(executable))) {
|
||||||
// LinuxTaskController sets +x permissions on all distcache files already.
|
// LinuxTaskController sets +x permissions on all distcache files already.
|
||||||
// In case of DefaultTaskController, set permissions here.
|
// In case of DefaultTaskController, set permissions here.
|
||||||
FileUtil.chmod(executable, "u+x");
|
FileUtil.chmod(executable, "u+x");
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.streaming;
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Maps a relative pathname to an absolute pathname using the PATH environment.
|
* Maps a relative pathname to an absolute pathname using the PATH environment.
|
||||||
|
@ -79,7 +80,7 @@ public class PathFinder {
|
||||||
f = new File(entry + fileSep + filename);
|
f = new File(entry + fileSep + filename);
|
||||||
}
|
}
|
||||||
// see if the filename matches and we can read it
|
// see if the filename matches and we can read it
|
||||||
if (f.isFile() && f.canRead()) {
|
if (f.isFile() && FileUtil.canRead(f)) {
|
||||||
return f;
|
return f;
|
||||||
}
|
}
|
||||||
classvalue = classvalue.substring(val + 1).trim();
|
classvalue = classvalue.substring(val + 1).trim();
|
||||||
|
|
|
@ -46,6 +46,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
|
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
|
||||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.FileAlreadyExistsException;
|
import org.apache.hadoop.fs.FileAlreadyExistsException;
|
||||||
import org.apache.hadoop.mapred.FileInputFormat;
|
import org.apache.hadoop.mapred.FileInputFormat;
|
||||||
|
@ -394,7 +395,7 @@ public class StreamJob implements Tool {
|
||||||
throws IllegalArgumentException {
|
throws IllegalArgumentException {
|
||||||
for (String file : values) {
|
for (String file : values) {
|
||||||
File f = new File(file);
|
File f = new File(file);
|
||||||
if (!f.canRead()) {
|
if (!FileUtil.canRead(f)) {
|
||||||
fail("File: " + f.getAbsolutePath()
|
fail("File: " + f.getAbsolutePath()
|
||||||
+ " does not exist, or is not readable.");
|
+ " does not exist, or is not readable.");
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue