MAPREDUCE-3610. Remove use of the 'dfs.block.size' config for default block size fetching. Use FS#getDefaultBlocksize instead. (Sho Shimauchi via harsh)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1227091 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Harsh J 2012-01-04 09:39:14 +00:00
parent 4c0bac5670
commit 2f19c59f1d
3 changed files with 6 additions and 3 deletions

View File

@ -178,6 +178,8 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3547. Added a bunch of unit tests for the the RM/NM webservices.
(Thomas Graves via acmurthy)
MAPREDUCE-3610. Remove use of the 'dfs.block.size' config for default block size fetching. Use FS#getDefaultBlocksize instead. (Sho Shimauchi via harsh)
OPTIMIZATIONS
MAPREDUCE-3567. Extraneous JobConf objects in AM heap. (Vinod Kumar

View File

@ -858,8 +858,9 @@ private void makeUberDecision(long dataInputLength) {
int sysMaxReduces = 1;
long sysMaxBytes = conf.getLong(MRJobConfig.JOB_UBERTASK_MAXBYTES,
conf.getLong("dfs.block.size", 64*1024*1024)); //FIXME: this is
// wrong; get FS from [File?]InputFormat and default block size from that
fs.getDefaultBlockSize()); // FIXME: this is wrong; get FS from
// [File?]InputFormat and default block size
// from that
long sysMemSizeForUberSlot =
conf.getInt(MRJobConfig.MR_AM_VMEM_MB,

View File

@ -1144,7 +1144,7 @@ private void splitRealFiles(String[] args) throws IOException {
if (!(fs instanceof DistributedFileSystem)) {
throw new IOException("Wrong file system: " + fs.getClass().getName());
}
int blockSize = conf.getInt("dfs.block.size", 128 * 1024 * 1024);
long blockSize = fs.getDefaultBlockSize();
DummyInputFormat inFormat = new DummyInputFormat();
for (int i = 0; i < args.length; i++) {