diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index 897739a4fa..305742eeb8 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -233,6 +233,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3456. $HADOOP_PREFIX/bin/yarn should set defaults for
$HADOOP_*_HOME (Eric Payne via mahadev)
+ MAPREDUCE-3458. Fix findbugs warnings in hadoop-examples. (Devaraj K
+ via mahadev)
+
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml b/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml
new file mode 100644
index 0000000000..244adafebd
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml
@@ -0,0 +1,63 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
index 48824df082..60557ddc73 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
@@ -27,6 +27,10 @@
Apache Hadoop MapReduce Examples
jar
+
+ ${basedir}
+
+
org.apache.hadoop
@@ -58,6 +62,18 @@
+
+
+ org.codehaus.mojo
+ findbugs-maven-plugin
+
+ true
+ true
+ ${mr.examples.basedir}/dev-support/findbugs-exclude.xml
+ Max
+
+
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/Join.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/Join.java
index 86a13ad0ca..2063d04655 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/Join.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/Join.java
@@ -19,7 +19,9 @@
package org.apache.hadoop.examples;
import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
@@ -29,9 +31,14 @@
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapreduce.*;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
-import org.apache.hadoop.mapreduce.lib.join.*;
+import org.apache.hadoop.mapreduce.lib.join.CompositeInputFormat;
+import org.apache.hadoop.mapreduce.lib.join.TupleWritable;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
@@ -52,7 +59,7 @@
* [in-dir]* in-dir out-dir
*/
public class Join extends Configured implements Tool {
- public static String REDUCES_PER_HOST = "mapreduce.join.reduces_per_host";
+ public static final String REDUCES_PER_HOST = "mapreduce.join.reduces_per_host";
static int printUsage() {
System.out.println("join [-r ] " +
"[-inFormat ] " +
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java
index 3b8759a977..5e636b901e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java
@@ -69,7 +69,7 @@ public String getName() {
}
public int[] getRotations() {
- return rotations;
+ return rotations.clone();
}
public boolean getFlippable() {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraGen.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraGen.java
index ec99c19bc4..9880d54003 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraGen.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraGen.java
@@ -70,7 +70,7 @@ public class TeraGen extends Configured implements Tool {
public static enum Counters {CHECKSUM}
- public static String NUM_ROWS = "mapreduce.terasort.num-rows";
+ public static final String NUM_ROWS = "mapreduce.terasort.num-rows";
/**
* An input format that assigns ranges of longs to each mapper.
*/
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java
index a381aba913..4ef0033012 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java
@@ -156,10 +156,10 @@ Text[] createPartitions(int numPartitions) {
* them and picks N-1 keys to generate N equally sized partitions.
* @param job the job to sample
* @param partFile where to write the output file to
- * @throws IOException if something goes wrong
+ * @throws Throwable if something goes wrong
*/
public static void writePartitionFile(final JobContext job,
- Path partFile) throws IOException, InterruptedException {
+ Path partFile) throws Throwable {
long t1 = System.currentTimeMillis();
Configuration conf = job.getConfiguration();
final TeraInputFormat inFormat = new TeraInputFormat();
@@ -174,11 +174,12 @@ public static void writePartitionFile(final JobContext job,
final long recordsPerSample = sampleSize / samples;
final int sampleStep = splits.size() / samples;
Thread[] samplerReader = new Thread[samples];
+ SamplerThreadGroup threadGroup = new SamplerThreadGroup("Sampler Reader Thread Group");
// take N samples from different parts of the input
for(int i=0; i < samples; ++i) {
final int idx = i;
samplerReader[i] =
- new Thread ("Sampler Reader " + idx) {
+ new Thread (threadGroup,"Sampler Reader " + idx) {
{
setDaemon(true);
}
@@ -201,7 +202,7 @@ public void run() {
} catch (IOException ie){
System.err.println("Got an exception while reading splits " +
StringUtils.stringifyException(ie));
- System.exit(-1);
+ throw new RuntimeException(ie);
} catch (InterruptedException e) {
}
@@ -215,6 +216,9 @@ public void run() {
for (int i = 0; i < samples; i++) {
try {
samplerReader[i].join();
+ if(threadGroup.getThrowable() != null){
+ throw threadGroup.getThrowable();
+ }
} catch (InterruptedException e) {
}
}
@@ -225,6 +229,25 @@ public void run() {
long t3 = System.currentTimeMillis();
System.out.println("Computing parititions took " + (t3 - t2) + "ms");
}
+
+ static class SamplerThreadGroup extends ThreadGroup{
+
+ private Throwable throwable;
+
+ public SamplerThreadGroup(String s) {
+ super(s);
+ }
+
+ @Override
+ public void uncaughtException(Thread thread, Throwable throwable) {
+ this.throwable = throwable;
+ }
+
+ public Throwable getThrowable() {
+ return this.throwable;
+ }
+
+ }
static class TeraRecordReader extends RecordReader {
private FSDataInputStream in;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java
index 2ce8155b47..7daa3016c2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java
@@ -31,7 +31,6 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -300,7 +299,12 @@ public int run(String[] args) throws Exception {
TeraInputFormat.PARTITION_FILENAME);
URI partitionUri = new URI(partitionFile.toString() +
"#" + TeraInputFormat.PARTITION_FILENAME);
- TeraInputFormat.writePartitionFile(job, partitionFile);
+ try {
+ TeraInputFormat.writePartitionFile(job, partitionFile);
+ } catch (Throwable e) {
+ LOG.error(e.getMessage());
+ return -1;
+ }
job.addCacheFile(partitionUri);
job.createSymlink();
long end = System.currentTimeMillis();