MAPREDUCE-5159. Change ValueAggregatorJob to add APIs which can support binary compatibility with hadoop-1 examples. Contributed by Zhijie Shen.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1480394 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-05-08 18:28:22 +00:00
parent 8dee15abe3
commit 8888d3fc49
2 changed files with 33 additions and 5 deletions

View File

@ -221,6 +221,9 @@ Release 2.0.5-beta - UNRELEASED
MAPREDUCE-5036. Default shuffle handler port should not be 8080. MAPREDUCE-5036. Default shuffle handler port should not be 8080.
(Sandy Ryza via tomwhite) (Sandy Ryza via tomwhite)
MAPREDUCE-5159. Change ValueAggregatorJob to add APIs which can support
binary compatibility with hadoop-1 examples. (Zhijie Shen via vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method MAPREDUCE-4974. Optimising the LineRecordReader initialize() method

View File

@ -102,15 +102,17 @@ public static JobControl createValueAggregatorJobs(String args[]) throws IOExcep
/** /**
* Create an Aggregate based map/reduce job. * Create an Aggregate based map/reduce job.
* *
* @param args the arguments used for job creation. Generic hadoop * @param args the arguments used for job creation. Generic hadoop
* arguments are accepted. * arguments are accepted.
* @param caller the the caller class.
* @return a JobConf object ready for submission. * @return a JobConf object ready for submission.
* *
* @throws IOException * @throws IOException
* @see GenericOptionsParser * @see GenericOptionsParser
*/ */
public static JobConf createValueAggregatorJob(String args[]) @SuppressWarnings("rawtypes")
public static JobConf createValueAggregatorJob(String args[], Class<?> caller)
throws IOException { throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
@ -159,7 +161,7 @@ public static JobConf createValueAggregatorJob(String args[])
} }
String userJarFile = theJob.get("user.jar.file"); String userJarFile = theJob.get("user.jar.file");
if (userJarFile == null) { if (userJarFile == null) {
theJob.setJarByClass(ValueAggregator.class); theJob.setJarByClass(caller != null ? caller : ValueAggregatorJob.class);
} else { } else {
theJob.setJar(userJarFile); theJob.setJar(userJarFile);
} }
@ -183,6 +185,21 @@ public static JobConf createValueAggregatorJob(String args[])
return theJob; return theJob;
} }
/**
* Create an Aggregate based map/reduce job.
*
* @param args the arguments used for job creation. Generic hadoop
* arguments are accepted.
* @return a JobConf object ready for submission.
*
* @throws IOException
* @see GenericOptionsParser
*/
public static JobConf createValueAggregatorJob(String args[])
throws IOException {
return createValueAggregatorJob(args, ValueAggregator.class);
}
public static JobConf createValueAggregatorJob(String args[] public static JobConf createValueAggregatorJob(String args[]
, Class<? extends ValueAggregatorDescriptor>[] descriptors) , Class<? extends ValueAggregatorDescriptor>[] descriptors)
throws IOException { throws IOException {
@ -199,7 +216,15 @@ public static void setAggregatorDescriptors(JobConf job
job.set("aggregator.descriptor." + i, "UserDefined," + descriptors[i].getName()); job.set("aggregator.descriptor." + i, "UserDefined," + descriptors[i].getName());
} }
} }
public static JobConf createValueAggregatorJob(String args[],
Class<? extends ValueAggregatorDescriptor>[] descriptors,
Class<?> caller) throws IOException {
JobConf job = createValueAggregatorJob(args, caller);
setAggregatorDescriptors(job, descriptors);
return job;
}
/** /**
* create and run an Aggregate based map/reduce job. * create and run an Aggregate based map/reduce job.
* *