diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index 87e41223396..76325478507 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -188,6 +188,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3456. $HADOOP_PREFIX/bin/yarn should set defaults for
$HADOOP_*_HOME (Eric Payne via mahadev)
+ MAPREDUCE-3458. Fix findbugs warnings in hadoop-examples. (Devaraj K
+ via mahadev)
+
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml b/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml
new file mode 100644
index 00000000000..244adafebdc
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/dev-support/findbugs-exclude.xml
@@ -0,0 +1,63 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
index 4f54c4c1de2..86885c5c6e3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
@@ -27,6 +27,10 @@
Apache Hadoop MapReduce Examples
jar
+
+ ${basedir}
+
+
org.apache.hadoop
@@ -59,6 +63,18 @@
+
+
+ org.codehaus.mojo
+ findbugs-maven-plugin
+
+ true
+ true
+ ${mr.examples.basedir}/dev-support/findbugs-exclude.xml
+ Max
+
+
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/Join.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/Join.java
index 86a13ad0ca2..2063d04655a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/Join.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/Join.java
@@ -19,7 +19,9 @@
package org.apache.hadoop.examples;
import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
@@ -29,9 +31,14 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapreduce.*;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
-import org.apache.hadoop.mapreduce.lib.join.*;
+import org.apache.hadoop.mapreduce.lib.join.CompositeInputFormat;
+import org.apache.hadoop.mapreduce.lib.join.TupleWritable;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
@@ -52,7 +59,7 @@ import org.apache.hadoop.util.ToolRunner;
* [in-dir]* in-dir out-dir
*/
public class Join extends Configured implements Tool {
- public static String REDUCES_PER_HOST = "mapreduce.join.reduces_per_host";
+ public static final String REDUCES_PER_HOST = "mapreduce.join.reduces_per_host";
static int printUsage() {
System.out.println("join [-r ] " +
"[-inFormat ] " +
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java
index 3b8759a9774..5e636b901e7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java
@@ -69,7 +69,7 @@ public class Pentomino {
}
public int[] getRotations() {
- return rotations;
+ return rotations.clone();
}
public boolean getFlippable() {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraGen.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraGen.java
index ec99c19bc44..9880d54003e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraGen.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraGen.java
@@ -70,7 +70,7 @@ public class TeraGen extends Configured implements Tool {
public static enum Counters {CHECKSUM}
- public static String NUM_ROWS = "mapreduce.terasort.num-rows";
+ public static final String NUM_ROWS = "mapreduce.terasort.num-rows";
/**
* An input format that assigns ranges of longs to each mapper.
*/
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java
index a381aba913f..4ef0033012f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraInputFormat.java
@@ -156,10 +156,10 @@ public class TeraInputFormat extends FileInputFormat {
* them and picks N-1 keys to generate N equally sized partitions.
* @param job the job to sample
* @param partFile where to write the output file to
- * @throws IOException if something goes wrong
+ * @throws Throwable if something goes wrong
*/
public static void writePartitionFile(final JobContext job,
- Path partFile) throws IOException, InterruptedException {
+ Path partFile) throws Throwable {
long t1 = System.currentTimeMillis();
Configuration conf = job.getConfiguration();
final TeraInputFormat inFormat = new TeraInputFormat();
@@ -174,11 +174,12 @@ public class TeraInputFormat extends FileInputFormat {
final long recordsPerSample = sampleSize / samples;
final int sampleStep = splits.size() / samples;
Thread[] samplerReader = new Thread[samples];
+ SamplerThreadGroup threadGroup = new SamplerThreadGroup("Sampler Reader Thread Group");
// take N samples from different parts of the input
for(int i=0; i < samples; ++i) {
final int idx = i;
samplerReader[i] =
- new Thread ("Sampler Reader " + idx) {
+ new Thread (threadGroup,"Sampler Reader " + idx) {
{
setDaemon(true);
}
@@ -201,7 +202,7 @@ public class TeraInputFormat extends FileInputFormat {
} catch (IOException ie){
System.err.println("Got an exception while reading splits " +
StringUtils.stringifyException(ie));
- System.exit(-1);
+ throw new RuntimeException(ie);
} catch (InterruptedException e) {
}
@@ -215,6 +216,9 @@ public class TeraInputFormat extends FileInputFormat {
for (int i = 0; i < samples; i++) {
try {
samplerReader[i].join();
+ if(threadGroup.getThrowable() != null){
+ throw threadGroup.getThrowable();
+ }
} catch (InterruptedException e) {
}
}
@@ -225,6 +229,25 @@ public class TeraInputFormat extends FileInputFormat {
long t3 = System.currentTimeMillis();
System.out.println("Computing parititions took " + (t3 - t2) + "ms");
}
+
+ static class SamplerThreadGroup extends ThreadGroup{
+
+ private Throwable throwable;
+
+ public SamplerThreadGroup(String s) {
+ super(s);
+ }
+
+ @Override
+ public void uncaughtException(Thread thread, Throwable throwable) {
+ this.throwable = throwable;
+ }
+
+ public Throwable getThrowable() {
+ return this.throwable;
+ }
+
+ }
static class TeraRecordReader extends RecordReader {
private FSDataInputStream in;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java
index 2ce8155b476..7daa3016c21 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -300,7 +299,12 @@ public class TeraSort extends Configured implements Tool {
TeraInputFormat.PARTITION_FILENAME);
URI partitionUri = new URI(partitionFile.toString() +
"#" + TeraInputFormat.PARTITION_FILENAME);
- TeraInputFormat.writePartitionFile(job, partitionFile);
+ try {
+ TeraInputFormat.writePartitionFile(job, partitionFile);
+ } catch (Throwable e) {
+ LOG.error(e.getMessage());
+ return -1;
+ }
job.addCacheFile(partitionUri);
job.createSymlink();
long end = System.currentTimeMillis();