YARN-7226. Whitelisted variables do not support delayed variable expansion. Contributed by Jason Lowe
This commit is contained in:
parent
175ff0585d
commit
3ecbfff937
|
@ -26,10 +26,8 @@ import java.net.InetAddress;
|
|||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
|
@ -44,7 +42,6 @@ import org.apache.hadoop.conf.Configurable;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
|
@ -95,10 +92,15 @@ public abstract class ContainerExecutor implements Configurable {
|
|||
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
|
||||
private final ReadLock readLock = lock.readLock();
|
||||
private final WriteLock writeLock = lock.writeLock();
|
||||
private String[] whitelistVars;
|
||||
|
||||
@Override
|
||||
public void setConf(Configuration conf) {
|
||||
this.conf = conf;
|
||||
if (conf != null) {
|
||||
whitelistVars = conf.get(YarnConfiguration.NM_ENV_WHITELIST,
|
||||
YarnConfiguration.DEFAULT_NM_ENV_WHITELIST).split(",");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -323,6 +325,8 @@ public abstract class ContainerExecutor implements Configurable {
|
|||
public void writeLaunchEnv(OutputStream out, Map<String, String> environment,
|
||||
Map<Path, List<String>> resources, List<String> command, Path logDir,
|
||||
String user, String outFilename) throws IOException {
|
||||
updateEnvForWhitelistVars(environment);
|
||||
|
||||
ContainerLaunch.ShellScriptBuilder sb =
|
||||
ContainerLaunch.ShellScriptBuilder.create();
|
||||
|
||||
|
@ -333,23 +337,11 @@ public abstract class ContainerExecutor implements Configurable {
|
|||
sb.stdout(logDir, CONTAINER_PRE_LAUNCH_STDOUT);
|
||||
sb.stderr(logDir, CONTAINER_PRE_LAUNCH_STDERR);
|
||||
|
||||
Set<String> whitelist = new HashSet<>();
|
||||
|
||||
whitelist.add(YarnConfiguration.NM_DOCKER_CONTAINER_EXECUTOR_IMAGE_NAME);
|
||||
whitelist.add(ApplicationConstants.Environment.HADOOP_YARN_HOME.name());
|
||||
whitelist.add(ApplicationConstants.Environment.HADOOP_COMMON_HOME.name());
|
||||
whitelist.add(ApplicationConstants.Environment.HADOOP_HDFS_HOME.name());
|
||||
whitelist.add(ApplicationConstants.Environment.HADOOP_CONF_DIR.name());
|
||||
whitelist.add(ApplicationConstants.Environment.JAVA_HOME.name());
|
||||
|
||||
if (environment != null) {
|
||||
sb.echo("Setting up env variables");
|
||||
for (Map.Entry<String, String> env : environment.entrySet()) {
|
||||
if (!whitelist.contains(env.getKey())) {
|
||||
sb.env(env.getKey(), env.getValue());
|
||||
} else {
|
||||
sb.whitelistedEnv(env.getKey(), env.getValue());
|
||||
}
|
||||
sb.env(env.getKey(), env.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -650,6 +642,28 @@ public abstract class ContainerExecutor implements Configurable {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Propagate variables from the nodemanager's environment into the
|
||||
* container's environment if unspecified by the container.
|
||||
* @param env the environment to update
|
||||
* @see org.apache.hadoop.yarn.conf.YarnConfiguration#NM_ENV_WHITELIST
|
||||
*/
|
||||
protected void updateEnvForWhitelistVars(Map<String, String> env) {
|
||||
for(String var : whitelistVars) {
|
||||
if (!env.containsKey(var)) {
|
||||
String val = getNMEnvVar(var);
|
||||
if (val != null) {
|
||||
env.put(var, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
protected String getNMEnvVar(String varname) {
|
||||
return System.getenv(varname);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark the container as active.
|
||||
*
|
||||
|
|
|
@ -419,6 +419,13 @@ public class LinuxContainerExecutor extends ContainerExecutor {
|
|||
localDirs);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateEnvForWhitelistVars(Map<String, String> env) {
|
||||
if (linuxContainerRuntime.useWhitelistEnv(env)) {
|
||||
super.updateEnvForWhitelistVars(env);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int launchContainer(ContainerStartContext ctx)
|
||||
throws IOException, ConfigurationException {
|
||||
|
|
|
@ -88,7 +88,6 @@ import org.apache.hadoop.yarn.util.AuxiliaryServiceHelper;
|
|||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||
|
||||
public class ContainerLaunch implements Callable<Integer> {
|
||||
|
||||
|
@ -998,8 +997,6 @@ public class ContainerLaunch implements Callable<Integer> {
|
|||
|
||||
public abstract void command(List<String> command) throws IOException;
|
||||
|
||||
public abstract void whitelistedEnv(String key, String value) throws IOException;
|
||||
|
||||
protected static final String ENV_PRELAUNCH_STDOUT = "PRELAUNCH_OUT";
|
||||
protected static final String ENV_PRELAUNCH_STDERR = "PRELAUNCH_ERR";
|
||||
|
||||
|
@ -1138,11 +1135,6 @@ public class ContainerLaunch implements Callable<Integer> {
|
|||
line("exec /bin/bash -c \"", StringUtils.join(" ", command), "\"");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void whitelistedEnv(String key, String value) throws IOException {
|
||||
line("export ", key, "=${", key, ":-", "\"", value, "\"}");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setStdOut(final Path stdout) throws IOException {
|
||||
line("export ", ENV_PRELAUNCH_STDOUT, "=\"", stdout.toString(), "\"");
|
||||
|
@ -1238,12 +1230,6 @@ public class ContainerLaunch implements Callable<Integer> {
|
|||
errorCheck();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void whitelistedEnv(String key, String value) throws IOException {
|
||||
lineWithLenCheck("@set ", key, "=", value);
|
||||
errorCheck();
|
||||
}
|
||||
|
||||
//Dummy implementation
|
||||
@Override
|
||||
protected void setStdOut(final Path stdout) throws IOException {
|
||||
|
@ -1365,17 +1351,6 @@ public class ContainerLaunch implements Callable<Integer> {
|
|||
environment.put("JVM_PID", "$$");
|
||||
}
|
||||
|
||||
/**
|
||||
* Modifiable environment variables
|
||||
*/
|
||||
|
||||
// allow containers to override these variables
|
||||
String[] whitelist = conf.get(YarnConfiguration.NM_ENV_WHITELIST, YarnConfiguration.DEFAULT_NM_ENV_WHITELIST).split(",");
|
||||
|
||||
for(String whitelistEnvVariable : whitelist) {
|
||||
putEnvIfAbsent(environment, whitelistEnvVariable.trim());
|
||||
}
|
||||
|
||||
// variables here will be forced in, even if the container has specified them.
|
||||
Apps.setEnvFromInputString(environment, conf.get(
|
||||
YarnConfiguration.NM_ADMIN_USER_ENV,
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*;
|
||||
|
||||
|
@ -71,6 +72,11 @@ public class DefaultLinuxContainerRuntime implements LinuxContainerRuntime {
|
|||
this.conf = conf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean useWhitelistEnv(Map<String, String> env) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prepareContainer(ContainerRuntimeContext ctx)
|
||||
throws ContainerExecutionException {
|
||||
|
|
|
@ -84,6 +84,17 @@ public class DelegatingLinuxContainerRuntime implements LinuxContainerRuntime {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean useWhitelistEnv(Map<String, String> env) {
|
||||
try {
|
||||
LinuxContainerRuntime runtime = pickContainerRuntime(env);
|
||||
return runtime.useWhitelistEnv(env);
|
||||
} catch (ContainerExecutionException e) {
|
||||
LOG.debug("Unable to determine runtime");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
LinuxContainerRuntime pickContainerRuntime(
|
||||
Map<String, String> environment) throws ContainerExecutionException {
|
||||
|
|
|
@ -281,6 +281,13 @@ public class DockerLinuxContainerRuntime implements LinuxContainerRuntime {
|
|||
YarnConfiguration.DEFAULT_NM_DOCKER_USER_REMAPPING_GID_THRESHOLD);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean useWhitelistEnv(Map<String, String> env) {
|
||||
// Avoid propagating nodemanager environment variables into the container
|
||||
// so those variables can be picked up from the Docker image instead.
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prepareContainer(ContainerRuntimeContext ctx)
|
||||
throws ContainerExecutionException {
|
||||
|
|
|
@ -24,6 +24,8 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* An abstraction for various container runtime implementations. Examples
|
||||
* include Process Tree, Docker, Appc runtimes etc. These implementations
|
||||
|
@ -83,4 +85,13 @@ public interface ContainerRuntime {
|
|||
* and hostname
|
||||
*/
|
||||
String[] getIpAndHost(Container container) throws ContainerExecutionException;
|
||||
|
||||
/**
|
||||
* Whether to propagate the whitelist of environment variables from the
|
||||
* nodemanager environment into the container environment.
|
||||
* @param env the container's environment variables
|
||||
* @return true if whitelist variables should be propagated, false otherwise
|
||||
* @see org.apache.hadoop.yarn.conf.YarnConfiguration#NM_ENV_WHITELIST
|
||||
*/
|
||||
boolean useWhitelistEnv(Map<String, String> env);
|
||||
}
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.junit.Assume.assumeFalse;
|
||||
import static org.mockito.Mockito.any;
|
||||
import static org.mockito.Mockito.atLeastOnce;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
@ -37,6 +38,7 @@ import java.io.PrintStream;
|
|||
import java.io.PrintWriter;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
|
@ -94,6 +96,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
|
|||
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdater;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest;
|
||||
|
@ -103,6 +106,8 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Cont
|
|||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEventType;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerExitEvent;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.ShellScriptBuilder;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperationExecutor;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.DockerLinuxContainerRuntime;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerStartContext;
|
||||
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService;
|
||||
|
@ -112,7 +117,6 @@ import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
|
|||
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
|
||||
import org.apache.hadoop.yarn.util.Apps;
|
||||
import org.apache.hadoop.yarn.util.AuxiliaryServiceHelper;
|
||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||
import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
|
||||
import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
|
@ -120,7 +124,6 @@ import org.junit.Assert;
|
|||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
public class TestContainerLaunch extends BaseContainerManagerTest {
|
||||
|
||||
|
@ -186,10 +189,11 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
|
|||
commands.add("/bin/sh ./\\\"" + badSymlink + "\\\"");
|
||||
}
|
||||
|
||||
new DefaultContainerExecutor()
|
||||
.writeLaunchEnv(fos, env, resources, commands,
|
||||
new Path(localLogDir.getAbsolutePath()), "user",
|
||||
tempFile.getName());
|
||||
DefaultContainerExecutor executor = new DefaultContainerExecutor();
|
||||
executor.setConf(conf);
|
||||
executor.writeLaunchEnv(fos, env, resources, commands,
|
||||
new Path(localLogDir.getAbsolutePath()), "user",
|
||||
tempFile.getName());
|
||||
fos.flush();
|
||||
fos.close();
|
||||
FileUtil.setExecutable(tempFile, true);
|
||||
|
@ -260,9 +264,10 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
|
|||
} else {
|
||||
commands.add("/bin/sh ./\\\"" + symLink + "\\\"");
|
||||
}
|
||||
new DefaultContainerExecutor()
|
||||
.writeLaunchEnv(fos, env, resources, commands,
|
||||
new Path(localLogDir.getAbsolutePath()), "user");
|
||||
DefaultContainerExecutor executor = new DefaultContainerExecutor();
|
||||
executor.setConf(conf);
|
||||
executor.writeLaunchEnv(fos, env, resources, commands,
|
||||
new Path(localLogDir.getAbsolutePath()), "user");
|
||||
fos.flush();
|
||||
fos.close();
|
||||
FileUtil.setExecutable(tempFile, true);
|
||||
|
@ -297,6 +302,99 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test(timeout = 20000)
|
||||
public void testWriteEnvExport() throws Exception {
|
||||
// Valid only for unix
|
||||
assumeFalse(Shell.WINDOWS);
|
||||
File shellFile = Shell.appendScriptExtension(tmpDir, "hello");
|
||||
Map<String, String> env = new HashMap<String, String>();
|
||||
env.put("HADOOP_COMMON_HOME", "/opt/hadoopcommon");
|
||||
env.put("HADOOP_MAPRED_HOME", "/opt/hadoopbuild");
|
||||
Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
|
||||
FileOutputStream fos = new FileOutputStream(shellFile);
|
||||
List<String> commands = new ArrayList<String>();
|
||||
final Map<String, String> nmEnv = new HashMap<>();
|
||||
nmEnv.put("HADOOP_COMMON_HOME", "nodemanager_common_home");
|
||||
nmEnv.put("HADOOP_HDFS_HOME", "nodemanager_hdfs_home");
|
||||
nmEnv.put("HADOOP_YARN_HOME", "nodemanager_yarn_home");
|
||||
nmEnv.put("HADOOP_MAPRED_HOME", "nodemanager_mapred_home");
|
||||
DefaultContainerExecutor defaultContainerExecutor =
|
||||
new DefaultContainerExecutor() {
|
||||
@Override
|
||||
protected String getNMEnvVar(String varname) {
|
||||
return nmEnv.get(varname);
|
||||
}
|
||||
};
|
||||
YarnConfiguration conf = new YarnConfiguration();
|
||||
conf.set(YarnConfiguration.NM_ENV_WHITELIST,
|
||||
"HADOOP_MAPRED_HOME,HADOOP_YARN_HOME");
|
||||
defaultContainerExecutor.setConf(conf);
|
||||
defaultContainerExecutor.writeLaunchEnv(fos, env, resources, commands,
|
||||
new Path(localLogDir.getAbsolutePath()), "user");
|
||||
String shellContent =
|
||||
new String(Files.readAllBytes(Paths.get(shellFile.getAbsolutePath())),
|
||||
StandardCharsets.UTF_8);
|
||||
Assert.assertTrue(shellContent
|
||||
.contains("export HADOOP_COMMON_HOME=\"/opt/hadoopcommon\""));
|
||||
// Whitelisted variable overridden by container
|
||||
Assert.assertTrue(shellContent.contains(
|
||||
"export HADOOP_MAPRED_HOME=\"/opt/hadoopbuild\""));
|
||||
// Available in env but not in whitelist
|
||||
Assert.assertFalse(shellContent.contains("HADOOP_HDFS_HOME"));
|
||||
// Available in env and in whitelist
|
||||
Assert.assertTrue(shellContent.contains(
|
||||
"export HADOOP_YARN_HOME=\"nodemanager_yarn_home\""));
|
||||
fos.flush();
|
||||
fos.close();
|
||||
}
|
||||
|
||||
@Test(timeout = 20000)
|
||||
public void testWriteEnvExportDocker() throws Exception {
|
||||
// Valid only for unix
|
||||
assumeFalse(Shell.WINDOWS);
|
||||
File shellFile = Shell.appendScriptExtension(tmpDir, "hello");
|
||||
Map<String, String> env = new HashMap<String, String>();
|
||||
env.put("HADOOP_COMMON_HOME", "/opt/hadoopcommon");
|
||||
env.put("HADOOP_MAPRED_HOME", "/opt/hadoopbuild");
|
||||
Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
|
||||
FileOutputStream fos = new FileOutputStream(shellFile);
|
||||
List<String> commands = new ArrayList<String>();
|
||||
final Map<String, String> nmEnv = new HashMap<>();
|
||||
nmEnv.put("HADOOP_COMMON_HOME", "nodemanager_common_home");
|
||||
nmEnv.put("HADOOP_HDFS_HOME", "nodemanager_hdfs_home");
|
||||
nmEnv.put("HADOOP_YARN_HOME", "nodemanager_yarn_home");
|
||||
nmEnv.put("HADOOP_MAPRED_HOME", "nodemanager_mapred_home");
|
||||
DockerLinuxContainerRuntime dockerRuntime =
|
||||
new DockerLinuxContainerRuntime(
|
||||
mock(PrivilegedOperationExecutor.class));
|
||||
LinuxContainerExecutor lce =
|
||||
new LinuxContainerExecutor(dockerRuntime) {
|
||||
@Override
|
||||
protected String getNMEnvVar(String varname) {
|
||||
return nmEnv.get(varname);
|
||||
}
|
||||
};
|
||||
YarnConfiguration conf = new YarnConfiguration();
|
||||
conf.set(YarnConfiguration.NM_ENV_WHITELIST,
|
||||
"HADOOP_MAPRED_HOME,HADOOP_YARN_HOME");
|
||||
lce.setConf(conf);
|
||||
lce.writeLaunchEnv(fos, env, resources, commands,
|
||||
new Path(localLogDir.getAbsolutePath()), "user");
|
||||
String shellContent =
|
||||
new String(Files.readAllBytes(Paths.get(shellFile.getAbsolutePath())),
|
||||
StandardCharsets.UTF_8);
|
||||
Assert.assertTrue(shellContent
|
||||
.contains("export HADOOP_COMMON_HOME=\"/opt/hadoopcommon\""));
|
||||
// Whitelisted variable overridden by container
|
||||
Assert.assertTrue(shellContent.contains(
|
||||
"export HADOOP_MAPRED_HOME=\"/opt/hadoopbuild\""));
|
||||
// Verify no whitelisted variables inherited from NM env
|
||||
Assert.assertFalse(shellContent.contains("HADOOP_HDFS_HOME"));
|
||||
Assert.assertFalse(shellContent.contains("HADOOP_YARN_HOME"));
|
||||
fos.flush();
|
||||
fos.close();
|
||||
}
|
||||
|
||||
@Test (timeout = 20000)
|
||||
public void testInvalidEnvSyntaxDiagnostics() throws IOException {
|
||||
|
||||
|
@ -315,9 +413,10 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
|
|||
"\"workflowName\":\"\n\ninsert table " +
|
||||
"\npartition (cd_education_status)\nselect cd_demo_sk, cd_gender, " );
|
||||
List<String> commands = new ArrayList<String>();
|
||||
new DefaultContainerExecutor()
|
||||
.writeLaunchEnv(fos, env, resources, commands,
|
||||
new Path(localLogDir.getAbsolutePath()), "user");
|
||||
DefaultContainerExecutor executor = new DefaultContainerExecutor();
|
||||
executor.setConf(conf);
|
||||
executor.writeLaunchEnv(fos, env, resources, commands,
|
||||
new Path(localLogDir.getAbsolutePath()), "user");
|
||||
fos.flush();
|
||||
fos.close();
|
||||
|
||||
|
@ -398,6 +497,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
|
|||
List<String> commands = new ArrayList<String>();
|
||||
commands.add(command);
|
||||
ContainerExecutor exec = new DefaultContainerExecutor();
|
||||
exec.setConf(conf);
|
||||
exec.writeLaunchEnv(fos, env, resources, commands,
|
||||
new Path(localLogDir.getAbsolutePath()), "user");
|
||||
fos.flush();
|
||||
|
|
Loading…
Reference in New Issue