diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Reducer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Reducer.java index bc7ad976f3..efb0950715 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Reducer.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Reducer.java @@ -26,6 +26,8 @@ import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.mapred.RawKeyValueIterator; +import java.util.Iterator; + /** * Reduces a set of intermediate values which share a key to a smaller set of * values. @@ -162,14 +164,15 @@ protected void cleanup(Context context * {@link #run(org.apache.hadoop.mapreduce.Reducer.Context)} method to * control how the reduce task works. */ - @SuppressWarnings("unchecked") public void run(Context context) throws IOException, InterruptedException { setup(context); while (context.nextKey()) { reduce(context.getCurrentKey(), context.getValues(), context); // If a back up store is used, reset it - ((ReduceContext.ValueIterator) - (context.getValues().iterator())).resetBackupStore(); + Iterator iter = context.getValues().iterator(); + if(iter instanceof ReduceContext.ValueIterator) { + ((ReduceContext.ValueIterator)iter).resetBackupStore(); + } } cleanup(context); }