From 814afa46efef201cb782072432fc744e1cb9c463 Mon Sep 17 00:00:00 2001 From: Junping Du Date: Mon, 16 Feb 2015 09:38:05 -0800 Subject: [PATCH] MAPREDUCE-6225. Fix new findbug warnings in hadoop-mapreduce-client-core. Contributed by Varun Saxena --- hadoop-mapreduce-project/CHANGES.txt | 3 +++ .../java/org/apache/hadoop/mapred/IndexCache.java | 2 +- .../org/apache/hadoop/mapred/TaskLogAppender.java | 2 +- .../mapred/lib/CombineFileRecordReader.java | 5 ----- .../org/apache/hadoop/mapreduce/JobSubmitter.java | 15 ++++++--------- .../lib/fieldsel/FieldSelectionHelper.java | 2 +- .../lib/input/CombineFileRecordReader.java | 5 ----- .../mapreduce/security/SecureShuffleUtils.java | 15 ++++++++++----- .../mapreduce/task/reduce/InMemoryReader.java | 4 +--- .../hadoop/mapreduce/util/ResourceBundles.java | 2 +- 10 files changed, 24 insertions(+), 31 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index bb9e105616..9ef7a32f46 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -294,6 +294,9 @@ Release 2.7.0 - UNRELEASED MAPREDUCE-6256. Removed unused private methods in o.a.h.mapreduce.Job.java. (Naganarasimha G R via ozawa) + MAPREDUCE-6225. Fix new findbug warnings in hadoop-mapreduce-client-core. + (Varun Saxena via junping_du) + OPTIMIZATIONS MAPREDUCE-6169. MergeQueue should release reference to the current item diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java index 54add3a81a..c3db9514d3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java @@ -145,7 +145,7 @@ private IndexInformation readIndexFileToCache(Path indexFileName, */ public void removeMap(String mapId) { IndexInformation info = cache.get(mapId); - if (info == null || ((info != null) && isUnderConstruction(info))) { + if (info == null || isUnderConstruction(info)) { return; } info = cache.remove(mapId); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLogAppender.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLogAppender.java index 2162a2602c..d10b764640 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLogAppender.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLogAppender.java @@ -75,7 +75,7 @@ private synchronized void setOptionsFromSystemProperties() { if (maxEvents == null) { String propValue = System.getProperty(LOGSIZE_PROPERTY, "0"); - setTotalLogFileSize(Long.valueOf(propValue)); + setTotalLogFileSize(Long.parseLong(propValue)); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java index f54f1760d7..a25eb29888 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java @@ -21,8 +21,6 @@ import java.io.*; import java.lang.reflect.*; -import org.apache.hadoop.fs.FileSystem; - import org.apache.hadoop.mapred.*; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -49,9 +47,7 @@ public class CombineFileRecordReader implements RecordReader { protected CombineFileSplit split; protected JobConf jc; protected Reporter reporter; - protected Class> rrClass; protected Constructor> rrConstructor; - protected FileSystem fs; protected int idx; protected long progress; @@ -106,7 +102,6 @@ public CombineFileRecordReader(JobConf job, CombineFileSplit split, throws IOException { this.split = split; this.jc = job; - this.rrClass = rrClass; this.reporter = reporter; this.idx = 0; this.curReader = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java index ba496ee84f..75357f73c4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java @@ -390,10 +390,12 @@ private void copyAndConfigureFiles(Job job, Path jobSubmitDir) short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10); copyAndConfigureFiles(job, jobSubmitDir, replication); - // Set the working directory - if (job.getWorkingDirectory() == null) { - job.setWorkingDirectory(jtFs.getWorkingDirectory()); - } + // Get the working directory. If not set, sets it to filesystem working dir + // This code has been added so that working directory reset before running + // the job. This is necessary for backward compatibility as other systems + // might use the public API JobConf#setWorkingDirectory to reset the working + // directory. + job.getWorkingDirectory(); } /** @@ -773,11 +775,6 @@ private void addLog4jToDistributedCache(Job job, if (!log4jPropertyFile.isEmpty()) { short replication = (short)conf.getInt(Job.SUBMIT_REPLICATION, 10); copyLog4jPropertyFile(job, jobSubmitDir, replication); - - // Set the working directory - if (job.getWorkingDirectory() == null) { - job.setWorkingDirectory(jtFs.getWorkingDirectory()); - } } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java index 11d6ee2d97..6e22fe9044 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java @@ -90,7 +90,7 @@ private static int extractFields(String[] fieldListSpec, } pos = fieldSpec.indexOf('-'); if (pos < 0) { - Integer fn = new Integer(fieldSpec); + Integer fn = Integer.valueOf(fieldSpec); fieldList.add(fn); } else { String start = fieldSpec.substring(0, pos); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java index 767f79a1c0..f71b946ec9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java @@ -21,8 +21,6 @@ import java.io.*; import java.lang.reflect.*; -import org.apache.hadoop.fs.FileSystem; - import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -46,9 +44,7 @@ public class CombineFileRecordReader extends RecordReader { Integer.class}; protected CombineFileSplit split; - protected Class> rrClass; protected Constructor> rrConstructor; - protected FileSystem fs; protected TaskAttemptContext context; protected int idx; @@ -111,7 +107,6 @@ public CombineFileRecordReader(CombineFileSplit split, throws IOException { this.split = split; this.context = context; - this.rrClass = rrClass; this.idx = 0; this.curReader = null; this.progress = 0; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java index 040a3e3665..60919d0f79 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java @@ -21,8 +21,8 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; +import java.io.UnsupportedEncodingException; import java.net.URL; - import javax.crypto.SecretKey; import javax.servlet.http.HttpServletRequest; @@ -141,10 +141,15 @@ private static String buildMsgFrom(String uri_path, String uri_query, int port) */ public static String toHex(byte[] ba) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); - PrintStream ps = new PrintStream(baos); - for (byte b : ba) { - ps.printf("%x", b); + String strHex = ""; + try { + PrintStream ps = new PrintStream(baos, false, "UTF-8"); + for (byte b : ba) { + ps.printf("%x", b); + } + strHex = baos.toString("UTF-8"); + } catch (UnsupportedEncodingException e) { } - return baos.toString(); + return strHex; } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java index b246d24f2b..8a6ab44eda 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/InMemoryReader.java @@ -79,10 +79,8 @@ private void dumpOnError() { File dumpFile = new File("../output/" + taskAttemptId + ".dump"); System.err.println("Dumping corrupt map-output of " + taskAttemptId + " to " + dumpFile.getAbsolutePath()); - try { - FileOutputStream fos = new FileOutputStream(dumpFile); + try (FileOutputStream fos = new FileOutputStream(dumpFile)) { fos.write(buffer, 0, bufferSize); - fos.close(); } catch (IOException ioe) { System.err.println("Failed to dump map-output of " + taskAttemptId); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java index 52addcfa86..4a11740b3c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java @@ -59,7 +59,7 @@ public static synchronized T getValue(String bundleName, String key, catch (Exception e) { return defaultValue; } - return value == null ? defaultValue : value; + return value; } private static String getLookupKey(String key, String suffix) {