merge MAPREDUCE-3610 (Sho Shimauchi via harsh)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1227098 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Harsh J 2012-01-04 10:18:24 +00:00
parent 9bebe5e0b7
commit aa4147c428
3 changed files with 6 additions and 3 deletions

View File

@ -90,6 +90,8 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3547. Added a bunch of unit tests for the the RM/NM webservices.
(Thomas Graves via acmurthy)
MAPREDUCE-3610. Remove use of the 'dfs.block.size' config for default block size fetching. Use FS#getDefaultBlocksize instead. (Sho Shimauchi via harsh)
OPTIMIZATIONS
MAPREDUCE-3567. Extraneous JobConf objects in AM heap. (Vinod Kumar

View File

@ -858,8 +858,9 @@ public class JobImpl implements org.apache.hadoop.mapreduce.v2.app.job.Job,
int sysMaxReduces = 1;
long sysMaxBytes = conf.getLong(MRJobConfig.JOB_UBERTASK_MAXBYTES,
conf.getLong("dfs.block.size", 64*1024*1024)); //FIXME: this is
// wrong; get FS from [File?]InputFormat and default block size from that
fs.getDefaultBlockSize()); // FIXME: this is wrong; get FS from
// [File?]InputFormat and default block size
// from that
long sysMemSizeForUberSlot =
conf.getInt(MRJobConfig.MR_AM_VMEM_MB,

View File

@ -1144,7 +1144,7 @@ public class TestCombineFileInputFormat extends TestCase {
if (!(fs instanceof DistributedFileSystem)) {
throw new IOException("Wrong file system: " + fs.getClass().getName());
}
int blockSize = conf.getInt("dfs.block.size", 128 * 1024 * 1024);
long blockSize = fs.getDefaultBlockSize();
DummyInputFormat inFormat = new DummyInputFormat();
for (int i = 0; i < args.length; i++) {