MAPREDUCE-3462. Fix Gridmix JUnit testcase failures. (Ravi Prakash and Ravi Gummadi via amarrk)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1227051 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Amar Kamat 2012-01-04 05:38:17 +00:00
parent 7ee3e072b8
commit 4c0bac5670
5 changed files with 9 additions and 0 deletions

View File

@ -52,6 +52,9 @@ Trunk (unreleased changes)
MAPREDUCE-2944. Improve checking of input for JobClient.displayTasks() (XieXianshan via harsh)
BUG FIXES
MAPREDUCE-3462. Fix Gridmix JUnit testcase failures.
(Ravi Prakash and Ravi Gummadi via amarrk)
MAPREDUCE-3349. Log rack-name in JobHistory for unsuccessful tasks.
(Devaraj K and Amar Kamat via amarrk)

View File

@ -105,6 +105,7 @@ public void testRandomCompressedTextDataGenerator() throws Exception {
conf.setInt(RandomTextDataGenerator.GRIDMIX_DATAGEN_RANDOMTEXT_WORDSIZE,
wordSize);
conf.setLong(GenerateData.GRIDMIX_GEN_BYTES, dataSize);
conf.set("mapreduce.job.hdfs-servers", "");
FileSystem lfs = FileSystem.getLocal(conf);
@ -192,6 +193,7 @@ private void testCompressionRatioConfigure(float ratio)
CompressionEmulationUtil.setInputCompressionEmulationEnabled(conf, true);
conf.setLong(GenerateData.GRIDMIX_GEN_BYTES, dataSize);
conf.set("mapreduce.job.hdfs-servers", "");
float expectedRatio = CompressionEmulationUtil.DEFAULT_COMPRESSION_RATIO;
if (ratio > 0) {

View File

@ -141,6 +141,7 @@ private long[] configureDummyDistCacheFiles(Configuration conf,
boolean useOldProperties) throws IOException {
String user = UserGroupInformation.getCurrentUser().getShortUserName();
conf.set(MRJobConfig.USER_NAME, user);
conf.set("mapreduce.job.hdfs-servers", "");
// Set some dummy dist cache files in gridmix configuration so that they go
// into the configuration of JobStory objects.
String[] distCacheFiles = {"hdfs:///tmp/file1.txt",

View File

@ -521,6 +521,7 @@ private void doSubmission(boolean useDefaultQueue,
DebugGridmix client = new DebugGridmix();
conf = new Configuration();
conf.setEnum(GridmixJobSubmissionPolicy.JOB_SUBMISSION_POLICY,policy);
conf.set("mapreduce.job.hdfs-servers", "");
if (useDefaultQueue) {
conf.setBoolean(GridmixJob.GRIDMIX_USE_QUEUE_IN_TRACE, false);
conf.set(GridmixJob.GRIDMIX_DEFAULT_QUEUE, "q1");

View File

@ -205,6 +205,7 @@ public void testMapTasksOnlySleepJobs()
throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(SleepJob.SLEEPJOB_MAPTASK_ONLY, true);
conf.set("mapreduce.job.hdfs-servers", "");
DebugJobProducer jobProducer = new DebugJobProducer(5, conf);
JobConf jconf = GridmixTestUtils.mrCluster.createJobConf(new JobConf(conf));
UserGroupInformation ugi = UserGroupInformation.getLoginUser();
@ -253,6 +254,7 @@ private void doSubmission(String...optional) throws Exception {
DebugGridmix client = new DebugGridmix();
conf = new Configuration();
conf.setEnum(GridmixJobSubmissionPolicy.JOB_SUBMISSION_POLICY, policy);
conf.set("mapreduce.job.hdfs-servers", "");
conf = GridmixTestUtils.mrCluster.createJobConf(new JobConf(conf));
// allow synthetic users to create home directories
GridmixTestUtils.dfs.mkdirs(root, new FsPermission((short) 0777));