MAPREDUCE-4579. Split TestTaskAttempt into two so as to pass tests on jdk7. Contributed by Thomas Graves.

svn merge --ignore-ancestry -c 1377943 ../../trunk/


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1377944 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2012-08-28 02:05:25 +00:00
parent aa92e2ab65
commit 14b1cf459b
3 changed files with 173 additions and 103 deletions

View File

@ -99,6 +99,9 @@ Release 2.1.0-alpha - Unreleased
MAPREDUCE-4580. Change MapReduce to use the yarn-client module. MAPREDUCE-4580. Change MapReduce to use the yarn-client module.
(Vinod Kumar Vavilapalli via sseth) (Vinod Kumar Vavilapalli via sseth)
MAPREDUCE-4579. Split TestTaskAttempt into two so as to pass tests on
jdk7. (Thomas Graves via vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -28,7 +28,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException; import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
@ -36,23 +35,17 @@ import java.util.Map;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.io.DataInputByteBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapTaskAttemptImpl; import org.apache.hadoop.mapred.MapTaskAttemptImpl;
import org.apache.hadoop.mapred.WrappedJvmID;
import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.JobCounter;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion; import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion;
import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.api.records.JobState;
@ -79,18 +72,14 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent; import org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.Clock;
import org.apache.hadoop.yarn.ClusterInfo; import org.apache.hadoop.yarn.ClusterInfo;
import org.apache.hadoop.yarn.SystemClock; import org.apache.hadoop.yarn.SystemClock;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.Event;
@ -101,82 +90,6 @@ import org.mockito.ArgumentCaptor;
@SuppressWarnings({"unchecked", "rawtypes"}) @SuppressWarnings({"unchecked", "rawtypes"})
public class TestTaskAttempt{ public class TestTaskAttempt{
@Test
public void testAttemptContainerRequest() throws Exception {
//WARNING: This test must run first. This is because there is an
// optimization where the credentials passed in are cached statically so
// they do not need to be recomputed when creating a new
// ContainerLaunchContext. if other tests run first this code will cache
// their credentials and this test will fail trying to look for the
// credentials it inserted in.
final Text SECRET_KEY_ALIAS = new Text("secretkeyalias");
final byte[] SECRET_KEY = ("secretkey").getBytes();
Map<ApplicationAccessType, String> acls =
new HashMap<ApplicationAccessType, String>(1);
acls.put(ApplicationAccessType.VIEW_APP, "otheruser");
ApplicationId appId = BuilderUtils.newApplicationId(1, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
Path jobFile = mock(Path.class);
EventHandler eventHandler = mock(EventHandler.class);
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
// setup UGI for security so tokens and keys are preserved
jobConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(jobConf);
Credentials credentials = new Credentials();
credentials.addSecretKey(SECRET_KEY_ALIAS, SECRET_KEY);
Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(
("tokenid").getBytes(), ("tokenpw").getBytes(),
new Text("tokenkind"), new Text("tokenservice"));
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
mock(TaskSplitMetaInfo.class), jobConf, taListener,
mock(OutputCommitter.class), jobToken, credentials,
new SystemClock(), null);
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, taImpl.getID().toString());
ContainerId containerId = BuilderUtils.newContainerId(1, 1, 1, 1);
ContainerLaunchContext launchCtx =
TaskAttemptImpl.createContainerLaunchContext(acls, containerId,
jobConf, jobToken, taImpl.createRemoteTask(),
TypeConverter.fromYarn(jobId), mock(Resource.class),
mock(WrappedJvmID.class), taListener,
credentials);
Assert.assertEquals("ACLs mismatch", acls, launchCtx.getApplicationACLs());
Credentials launchCredentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
dibb.reset(launchCtx.getContainerTokens());
launchCredentials.readTokenStorageStream(dibb);
// verify all tokens specified for the task attempt are in the launch context
for (Token<? extends TokenIdentifier> token : credentials.getAllTokens()) {
Token<? extends TokenIdentifier> launchToken =
launchCredentials.getToken(token.getService());
Assert.assertNotNull("Token " + token.getService() + " is missing",
launchToken);
Assert.assertEquals("Token " + token.getService() + " mismatch",
token, launchToken);
}
// verify the secret key is in the launch context
Assert.assertNotNull("Secret key missing",
launchCredentials.getSecretKey(SECRET_KEY_ALIAS));
Assert.assertTrue("Secret key mismatch", Arrays.equals(SECRET_KEY,
launchCredentials.getSecretKey(SECRET_KEY_ALIAS)));
}
static public class StubbedFS extends RawLocalFileSystem { static public class StubbedFS extends RawLocalFileSystem {
@Override @Override

View File

@ -0,0 +1,154 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app.job.impl;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import junit.framework.Assert;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.io.DataInputByteBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapTaskAttemptImpl;
import org.apache.hadoop.mapred.WrappedJvmID;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.SystemClock;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test;
@SuppressWarnings({"rawtypes"})
public class TestTaskAttemptContainerRequest {
//WARNING: This test must be the only test in this file. This is because
// there is an optimization where the credentials passed in are cached
// statically so they do not need to be recomputed when creating a new
// ContainerLaunchContext. if other tests run first this code will cache
// their credentials and this test will fail trying to look for the
// credentials it inserted in.
@Test
public void testAttemptContainerRequest() throws Exception {
final Text SECRET_KEY_ALIAS = new Text("secretkeyalias");
final byte[] SECRET_KEY = ("secretkey").getBytes();
Map<ApplicationAccessType, String> acls =
new HashMap<ApplicationAccessType, String>(1);
acls.put(ApplicationAccessType.VIEW_APP, "otheruser");
ApplicationId appId = BuilderUtils.newApplicationId(1, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
Path jobFile = mock(Path.class);
EventHandler eventHandler = mock(EventHandler.class);
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
// setup UGI for security so tokens and keys are preserved
jobConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(jobConf);
Credentials credentials = new Credentials();
credentials.addSecretKey(SECRET_KEY_ALIAS, SECRET_KEY);
Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(
("tokenid").getBytes(), ("tokenpw").getBytes(),
new Text("tokenkind"), new Text("tokenservice"));
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
mock(TaskSplitMetaInfo.class), jobConf, taListener,
mock(OutputCommitter.class), jobToken, credentials,
new SystemClock(), null);
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, taImpl.getID().toString());
ContainerId containerId = BuilderUtils.newContainerId(1, 1, 1, 1);
ContainerLaunchContext launchCtx =
TaskAttemptImpl.createContainerLaunchContext(acls, containerId,
jobConf, jobToken, taImpl.createRemoteTask(),
TypeConverter.fromYarn(jobId), mock(Resource.class),
mock(WrappedJvmID.class), taListener,
credentials);
Assert.assertEquals("ACLs mismatch", acls, launchCtx.getApplicationACLs());
Credentials launchCredentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
dibb.reset(launchCtx.getContainerTokens());
launchCredentials.readTokenStorageStream(dibb);
// verify all tokens specified for the task attempt are in the launch context
for (Token<? extends TokenIdentifier> token : credentials.getAllTokens()) {
Token<? extends TokenIdentifier> launchToken =
launchCredentials.getToken(token.getService());
Assert.assertNotNull("Token " + token.getService() + " is missing",
launchToken);
Assert.assertEquals("Token " + token.getService() + " mismatch",
token, launchToken);
}
// verify the secret key is in the launch context
Assert.assertNotNull("Secret key missing",
launchCredentials.getSecretKey(SECRET_KEY_ALIAS));
Assert.assertTrue("Secret key mismatch", Arrays.equals(SECRET_KEY,
launchCredentials.getSecretKey(SECRET_KEY_ALIAS)));
}
static public class StubbedFS extends RawLocalFileSystem {
@Override
public FileStatus getFileStatus(Path f) throws IOException {
return new FileStatus(1, false, 1, 1, 1, f);
}
}
}