MAPREDUCE-5379. Include token tracking ids in jobconf. (kkambatl via tucu)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1523605 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9fe18b50dd
commit
e60b057c0a
|
@ -190,6 +190,8 @@ Release 2.1.1-beta - UNRELEASED
|
||||||
but just before ClientService to avoid race conditions during RM restart.
|
but just before ClientService to avoid race conditions during RM restart.
|
||||||
(Jian He via vinodkv)
|
(Jian He via vinodkv)
|
||||||
|
|
||||||
|
MAPREDUCE-5379. Include token tracking ids in jobconf. (kkambatl via tucu)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
MAPREDUCE-5446. TestJobHistoryEvents and TestJobHistoryParsing have race
|
MAPREDUCE-5446. TestJobHistoryEvents and TestJobHistoryParsing have race
|
||||||
|
|
|
@ -24,6 +24,7 @@ import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.security.NoSuchAlgorithmException;
|
import java.security.NoSuchAlgorithmException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -56,6 +57,7 @@ import org.apache.hadoop.security.Credentials;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
|
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
import org.codehaus.jackson.JsonParseException;
|
import org.codehaus.jackson.JsonParseException;
|
||||||
import org.codehaus.jackson.map.JsonMappingException;
|
import org.codehaus.jackson.map.JsonMappingException;
|
||||||
|
@ -405,6 +407,19 @@ class JobSubmitter {
|
||||||
// different job.
|
// different job.
|
||||||
TokenCache.cleanUpTokenReferral(conf);
|
TokenCache.cleanUpTokenReferral(conf);
|
||||||
|
|
||||||
|
if (conf.getBoolean(
|
||||||
|
MRJobConfig.JOB_TOKEN_TRACKING_IDS_ENABLED,
|
||||||
|
MRJobConfig.DEFAULT_JOB_TOKEN_TRACKING_IDS_ENABLED)) {
|
||||||
|
// Add HDFS tracking ids
|
||||||
|
ArrayList<String> trackingIds = new ArrayList<String>();
|
||||||
|
for (Token<? extends TokenIdentifier> t :
|
||||||
|
job.getCredentials().getAllTokens()) {
|
||||||
|
trackingIds.add(t.decodeIdentifier().getTrackingId());
|
||||||
|
}
|
||||||
|
conf.setStrings(MRJobConfig.JOB_TOKEN_TRACKING_IDS,
|
||||||
|
trackingIds.toArray(new String[trackingIds.size()]));
|
||||||
|
}
|
||||||
|
|
||||||
// Write job file to submit dir
|
// Write job file to submit dir
|
||||||
writeConf(conf, submitJobFile);
|
writeConf(conf, submitJobFile);
|
||||||
|
|
||||||
|
|
|
@ -313,6 +313,13 @@ public interface MRJobConfig {
|
||||||
public static final String MAPREDUCE_JOB_CREDENTIALS_BINARY =
|
public static final String MAPREDUCE_JOB_CREDENTIALS_BINARY =
|
||||||
"mapreduce.job.credentials.binary";
|
"mapreduce.job.credentials.binary";
|
||||||
|
|
||||||
|
/* Configs for tracking ids of tokens used by a job */
|
||||||
|
public static final String JOB_TOKEN_TRACKING_IDS_ENABLED =
|
||||||
|
"mapreduce.job.token.tracking.ids.enabled";
|
||||||
|
public static final boolean DEFAULT_JOB_TOKEN_TRACKING_IDS_ENABLED = false;
|
||||||
|
public static final String JOB_TOKEN_TRACKING_IDS =
|
||||||
|
"mapreduce.job.token.tracking.ids";
|
||||||
|
|
||||||
public static final String JOB_SUBMITHOST =
|
public static final String JOB_SUBMITHOST =
|
||||||
"mapreduce.job.submithostname";
|
"mapreduce.job.submithostname";
|
||||||
public static final String JOB_SUBMITHOSTADDR =
|
public static final String JOB_SUBMITHOSTADDR =
|
||||||
|
|
|
@ -748,6 +748,23 @@
|
||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.token.tracking.ids.enabled</name>
|
||||||
|
<value>false</value>
|
||||||
|
<description>Whether to write tracking ids of tokens to
|
||||||
|
job-conf. When true, the configuration property
|
||||||
|
"mapreduce.job.token.tracking.ids" is set to the token-tracking-ids of
|
||||||
|
the job</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.token.tracking.ids</name>
|
||||||
|
<value></value>
|
||||||
|
<description>When mapreduce.job.token.tracking.ids.enabled is
|
||||||
|
set to true, this is set by the framework to the
|
||||||
|
token-tracking-ids used by the job.</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>mapreduce.task.merge.progress.records</name>
|
<name>mapreduce.task.merge.progress.records</name>
|
||||||
<value>10000</value>
|
<value>10000</value>
|
||||||
|
|
Loading…
Reference in New Issue