MAPREDUCE-3044. Pipes jobs stuck without making progress. (mahadev) - Merging -r1173909 from trunk
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1173910 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6f62a83a31
commit
9b93426833
|
@ -1364,6 +1364,8 @@ Release 0.23.0 - Unreleased
|
|||
MAPREDUCE-3066. Fixed default ResourceTracker address for the NodeManager.
|
||||
(Chris Riccomini via acmurthy)
|
||||
|
||||
MAPREDUCE-3044. Pipes jobs stuck without making progress. (mahadev)
|
||||
|
||||
Release 0.22.0 - Unreleased
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -31,6 +31,8 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
|
|||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
|
||||
public class MapReduceChildJVM {
|
||||
|
||||
|
@ -164,8 +166,8 @@ public class MapReduceChildJVM {
|
|||
private static void setupLog4jProperties(Vector<String> vargs,
|
||||
long logSize) {
|
||||
vargs.add("-Dlog4j.configuration=container-log4j.properties");
|
||||
vargs.add("-Dhadoop.yarn.mr.containerLogDir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR);
|
||||
vargs.add("-Dhadoop.yarn.mr.totalLogFileSize=" + logSize);
|
||||
vargs.add("-D" + MRJobConfig.TASK_LOG_DIR + "=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR);
|
||||
vargs.add("-D" + MRJobConfig.TASK_LOG_SIZE + "=" + logSize);
|
||||
}
|
||||
|
||||
public static List<String> getVMCommand(
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.SecureIOUtils;
|
||||
import org.apache.hadoop.mapreduce.JobID;
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
import org.apache.hadoop.mapreduce.util.ProcessTree;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.log4j.Appender;
|
||||
|
@ -75,10 +76,18 @@ public class TaskLog {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static String getMRv2LogDir() {
|
||||
return System.getProperty(MRJobConfig.TASK_LOG_DIR);
|
||||
}
|
||||
|
||||
public static File getTaskLogFile(TaskAttemptID taskid, boolean isCleanup,
|
||||
LogName filter) {
|
||||
return new File(getAttemptDir(taskid, isCleanup), filter.toString());
|
||||
if (getMRv2LogDir() != null) {
|
||||
return new File(getMRv2LogDir(), filter.toString());
|
||||
} else {
|
||||
return new File(getAttemptDir(taskid, isCleanup), filter.toString());
|
||||
}
|
||||
}
|
||||
|
||||
static File getRealTaskLogFileLocation(TaskAttemptID taskid,
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
package org.apache.hadoop.mapred.pipes;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.ServerSocket;
|
||||
|
@ -26,6 +27,7 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Random;
|
||||
import javax.crypto.SecretKey;
|
||||
|
||||
|
@ -111,7 +113,6 @@ class Application<K1 extends WritableComparable, V1 extends Writable,
|
|||
if (interpretor != null) {
|
||||
cmd.add(interpretor);
|
||||
}
|
||||
|
||||
String executable = DistributedCache.getLocalCacheFiles(conf)[0].toString();
|
||||
if (!new File(executable).canExecute()) {
|
||||
// LinuxTaskController sets +x permissions on all distcache files already.
|
||||
|
@ -129,7 +130,7 @@ class Application<K1 extends WritableComparable, V1 extends Writable,
|
|||
long logLength = TaskLog.getTaskLogLength(conf);
|
||||
cmd = TaskLog.captureOutAndError(null, cmd, stdout, stderr, logLength,
|
||||
false);
|
||||
|
||||
|
||||
process = runClient(cmd, env);
|
||||
clientSocket = serverSocket.accept();
|
||||
|
||||
|
|
|
@ -458,7 +458,12 @@ public interface MRJobConfig {
|
|||
// For now, generated by the AppManagers and used by NodeManagers and the
|
||||
// Containers.
|
||||
public static final String APPLICATION_TOKENS_FILE = "appTokens";
|
||||
|
||||
|
||||
/** The log directory for the containers */
|
||||
public static final String TASK_LOG_DIR = MR_PREFIX + "container.log.dir";
|
||||
|
||||
public static final String TASK_LOG_SIZE = MR_PREFIX + "log.filesize";
|
||||
|
||||
public static final String MAPREDUCE_V2_CHILD_CLASS =
|
||||
"org.apache.hadoop.mapred.YarnChild";
|
||||
|
||||
|
|
|
@ -12,12 +12,12 @@ log4j.threshold=ALL
|
|||
#
|
||||
|
||||
#Default values
|
||||
hadoop.yarn.mr.containerLogDir=null
|
||||
hadoop.yarn.mr.totalLogFileSize=100
|
||||
yarn.app.mapreduce.container.log.dir=null
|
||||
yarn.app.mapreduce.container.log.filesize=100
|
||||
|
||||
log4j.appender.CLA=org.apache.hadoop.yarn.ContainerLogAppender
|
||||
log4j.appender.CLA.containerLogDir=${hadoop.yarn.mr.containerLogDir}
|
||||
log4j.appender.CLA.totalLogFileSize=${hadoop.yarn.mr.totalLogFileSize}
|
||||
log4j.appender.CLA.containerLogDir=${yarn.app.mapreduce.container.log.dir}
|
||||
log4j.appender.CLA.totalLogFileSize=${yarn.app.mapreduce.container.log.filesize}
|
||||
|
||||
log4j.appender.CLA.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.CLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
||||
|
|
Loading…
Reference in New Issue