YARN-506. Move to common utils FileUtil#setReadable/Writable/Executable and FileUtil#canRead/Write/Execute. Contributed by Ivan Mitic.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1477408 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2013-04-29 23:09:28 +00:00
parent 8f7ce62085
commit 27d4592771
7 changed files with 24 additions and 16 deletions

View File

@ -41,7 +41,7 @@ Trunk - Unreleased
classpath with new process's environment variables and localized resources
(Chris Nauroth via bikas)
BREAKDOWN OF HADOOP-8562 SUBTASKS
BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS
YARN-158. Yarn creating package-info.java must not depend on sh.
(Chris Nauroth via suresh)
@ -70,6 +70,10 @@ Trunk - Unreleased
YARN-359. Fixing commands for container signalling in Windows. (Chris Nauroth
via vinodkv)
YARN-506. Move to common utils FileUtil#setReadable/Writable/Executable and
FileUtil#canRead/Write/Execute. (Ivan Mitic via suresh)
Release 2.0.5-beta - UNRELEASED

View File

@ -28,6 +28,7 @@ import java.util.TimerTask;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.util.Shell.ExitCodeException;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
import org.apache.hadoop.util.StringUtils;
@ -321,7 +322,7 @@ public class NodeHealthScriptRunner extends AbstractService {
return false;
}
File f = new File(nodeHealthScript);
return f.exists() && f.canExecute();
return f.exists() && FileUtil.canExecute(f);
}
private synchronized void setHealthStatus(boolean isHealthy, String output) {

View File

@ -35,6 +35,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -307,7 +308,7 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
if (controllerPath != null) {
File f = new File(controllerPath + "/" + this.cgroupPrefix);
if (f.canWrite()) {
if (FileUtil.canWrite(f)) {
controllerPaths.put(CONTROLLER_CPU, controllerPath);
} else {
throw new IOException("Not able to enforce cpu weights; cannot write "

View File

@ -36,6 +36,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.yarn.api.records.ContainerId;
@ -104,9 +105,7 @@ public class TestLinuxContainerExecutor {
FileContext files = FileContext.getLocalFSFileContext();
Path workSpacePath = new Path(workSpace.getAbsolutePath());
files.mkdir(workSpacePath, null, true);
workSpace.setReadable(true, false);
workSpace.setExecutable(true, false);
workSpace.setWritable(true, false);
FileUtil.chmod(workSpace.getAbsolutePath(), "777");
File localDir = new File(workSpace.getAbsoluteFile(), "localDir");
files.mkdir(new Path(localDir.getAbsolutePath()),
new FsPermission("777"), false);

View File

@ -37,6 +37,7 @@ import junit.framework.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ContainerId;
@ -78,8 +79,8 @@ public class TestLinuxContainerExecutorWithMocks {
@Before
public void setup() {
File f = new File("./src/test/resources/mock-container-executor");
if(!f.canExecute()) {
f.setExecutable(true);
if(!FileUtil.canExecute(f)) {
FileUtil.setExecutable(f, true);
}
String executorPath = f.getAbsolutePath();
Configuration conf = new Configuration();
@ -140,8 +141,8 @@ public class TestLinuxContainerExecutorWithMocks {
// set the scheduler priority to make sure still works with nice -n prio
File f = new File("./src/test/resources/mock-container-executor");
if (!f.canExecute()) {
f.setExecutable(true);
if (!FileUtil.canExecute(f)) {
FileUtil.setExecutable(f, true);
}
String executorPath = f.getAbsolutePath();
Configuration conf = new Configuration();
@ -204,8 +205,8 @@ public class TestLinuxContainerExecutorWithMocks {
// reinitialize executer
File f = new File("./src/test/resources/mock-container-executer-with-error");
if (!f.canExecute()) {
f.setExecutable(true);
if (!FileUtil.canExecute(f)) {
FileUtil.setExecutable(f, true);
}
String executorPath = f.getAbsolutePath();
Configuration conf = new Configuration();

View File

@ -28,6 +28,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -81,7 +82,7 @@ public class TestNodeHealthService {
pw.println(scriptStr);
pw.flush();
pw.close();
nodeHealthscriptFile.setExecutable(setExecutable);
FileUtil.setExecutable(nodeHealthscriptFile, setExecutable);
}
@Test

View File

@ -34,6 +34,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.util.Shell;
@ -99,7 +100,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
String timeoutCommand = Shell.WINDOWS ? "@echo \"hello\"" :
"echo \"hello\"";
PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
shellFile.setExecutable(true);
FileUtil.setExecutable(shellFile, true);
writer.println(timeoutCommand);
writer.close();
@ -123,7 +124,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
ContainerLaunch.writeLaunchEnv(fos, env, resources, commands);
fos.flush();
fos.close();
tempFile.setExecutable(true);
FileUtil.setExecutable(tempFile, true);
Shell.ShellCommandExecutor shexc
= new Shell.ShellCommandExecutor(new String[]{tempFile.getAbsolutePath()}, tmpDir);
@ -367,7 +368,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
writer.println("while true; do\nsleep 1s;\ndone");
}
writer.close();
scriptFile.setExecutable(true);
FileUtil.setExecutable(scriptFile, true);
ContainerLaunchContext containerLaunchContext =
recordFactory.newRecordInstance(ContainerLaunchContext.class);