MAPREDUCE-3678. The Map tasks logs should have the value of input split it processed. Contributed by Harsh J. (harsh)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1396032 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
5c3a331040
commit
0aa8188d18
@ -152,6 +152,9 @@ Release 2.0.3-alpha - Unreleased
|
||||
|
||||
IMPROVEMENTS
|
||||
|
||||
MAPREDUCE-3678. The Map tasks logs should have the value of input
|
||||
split it processed. (harsh)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
BUG FIXES
|
||||
|
@ -424,6 +424,7 @@ private void updateJobWithSplit(final JobConf job, InputSplit inputSplit) {
|
||||
job.setLong(JobContext.MAP_INPUT_START, fileSplit.getStart());
|
||||
job.setLong(JobContext.MAP_INPUT_PATH, fileSplit.getLength());
|
||||
}
|
||||
LOG.info("Processing split: " + inputSplit);
|
||||
}
|
||||
|
||||
static class NewTrackingRecordReader<K,V>
|
||||
@ -694,6 +695,7 @@ void runNewMapper(final JobConf job,
|
||||
org.apache.hadoop.mapreduce.InputSplit split = null;
|
||||
split = getSplitDetails(new Path(splitIndex.getSplitLocation()),
|
||||
splitIndex.getStartOffset());
|
||||
LOG.info("Processing split: " + split);
|
||||
|
||||
org.apache.hadoop.mapreduce.RecordReader<INKEY,INVALUE> input =
|
||||
new NewTrackingRecordReader<INKEY,INVALUE>
|
||||
|
Loading…
Reference in New Issue
Block a user