MAPREDUCE-4871. AM uses mapreduce.jobtracker.split.metainfo.maxsize but mapred-default has mapreduce.job.split.metainfo.maxsize (Jason Lowe via jeagles)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1451318 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6401d5d2ec
commit
892846dc04
|
@ -747,6 +747,9 @@ Release 0.23.7 - UNRELEASED
|
||||||
MAPREDUCE-5009. Killing the Task Attempt slated for commit does not clear
|
MAPREDUCE-5009. Killing the Task Attempt slated for commit does not clear
|
||||||
the value from the Task commitAttempt member (Robert Parker via jeagles)
|
the value from the Task commitAttempt member (Robert Parker via jeagles)
|
||||||
|
|
||||||
|
MAPREDUCE-4871. AM uses mapreduce.jobtracker.split.metainfo.maxsize but
|
||||||
|
mapred-default has mapreduce.job.split.metainfo.maxsize (Jason Lowe via
|
||||||
|
jeagles)
|
||||||
|
|
||||||
Release 0.23.6 - UNRELEASED
|
Release 0.23.6 - UNRELEASED
|
||||||
|
|
||||||
|
|
|
@ -63,6 +63,9 @@ public interface MRJobConfig {
|
||||||
|
|
||||||
public static final String SPLIT_FILE = "mapreduce.job.splitfile";
|
public static final String SPLIT_FILE = "mapreduce.job.splitfile";
|
||||||
|
|
||||||
|
public static final String SPLIT_METAINFO_MAXSIZE = "mapreduce.job.split.metainfo.maxsize";
|
||||||
|
public static final long DEFAULT_SPLIT_METAINFO_MAXSIZE = 10000000L;
|
||||||
|
|
||||||
public static final String NUM_MAPS = "mapreduce.job.maps";
|
public static final String NUM_MAPS = "mapreduce.job.maps";
|
||||||
|
|
||||||
public static final String MAX_TASK_FAILURES_PER_TRACKER = "mapreduce.job.maxtaskfailures.per.tracker";
|
public static final String MAX_TASK_FAILURES_PER_TRACKER = "mapreduce.job.maxtaskfailures.per.tracker";
|
||||||
|
|
|
@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.split;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
|
@ -29,9 +31,7 @@ import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.WritableUtils;
|
import org.apache.hadoop.io.WritableUtils;
|
||||||
import org.apache.hadoop.mapreduce.JobID;
|
import org.apache.hadoop.mapreduce.JobID;
|
||||||
import org.apache.hadoop.mapreduce.JobSubmissionFiles;
|
import org.apache.hadoop.mapreduce.JobSubmissionFiles;
|
||||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A utility that reads the split meta info and creates
|
* A utility that reads the split meta info and creates
|
||||||
|
@ -44,8 +44,8 @@ public class SplitMetaInfoReader {
|
||||||
public static JobSplit.TaskSplitMetaInfo[] readSplitMetaInfo(
|
public static JobSplit.TaskSplitMetaInfo[] readSplitMetaInfo(
|
||||||
JobID jobId, FileSystem fs, Configuration conf, Path jobSubmitDir)
|
JobID jobId, FileSystem fs, Configuration conf, Path jobSubmitDir)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
long maxMetaInfoSize = conf.getLong(JTConfig.JT_MAX_JOB_SPLIT_METAINFO_SIZE,
|
long maxMetaInfoSize = conf.getLong(MRJobConfig.SPLIT_METAINFO_MAXSIZE,
|
||||||
10000000L);
|
MRJobConfig.DEFAULT_SPLIT_METAINFO_MAXSIZE);
|
||||||
Path metaSplitFile = JobSubmissionFiles.getJobSplitMetaFile(jobSubmitDir);
|
Path metaSplitFile = JobSubmissionFiles.getJobSplitMetaFile(jobSubmitDir);
|
||||||
String jobSplitFile = JobSubmissionFiles.getJobSplitFile(jobSubmitDir).toString();
|
String jobSplitFile = JobSubmissionFiles.getJobSplitFile(jobSubmitDir).toString();
|
||||||
FileStatus fStatus = fs.getFileStatus(metaSplitFile);
|
FileStatus fStatus = fs.getFileStatus(metaSplitFile);
|
||||||
|
|
|
@ -521,6 +521,8 @@ public class ConfigUtil {
|
||||||
});
|
});
|
||||||
Configuration.addDeprecation("mapreduce.user.classpath.first",
|
Configuration.addDeprecation("mapreduce.user.classpath.first",
|
||||||
MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST);
|
MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST);
|
||||||
|
Configuration.addDeprecation(JTConfig.JT_MAX_JOB_SPLIT_METAINFO_SIZE,
|
||||||
|
MRJobConfig.SPLIT_METAINFO_MAXSIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
|
Loading…
Reference in New Issue