From 34fe11c987730932f99dec6eb458a22624eb075b Mon Sep 17 00:00:00 2001 From: Akira Ajisaka Date: Wed, 4 Feb 2015 09:25:44 -0800 Subject: [PATCH] MAPREDUCE-6243. Fix findbugs warnings in hadoop-rumen. Contributed by Masatake Iwasaki. --- hadoop-mapreduce-project/CHANGES.txt | 3 +++ .../apache/hadoop/tools/rumen/Hadoop20JHParser.java | 5 ----- .../apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java | 2 +- .../rumen/MapAttempt20LineHistoryEventEmitter.java | 2 +- .../apache/hadoop/tools/rumen/ParsedConfigFile.java | 11 +++++++---- .../hadoop/tools/rumen/RandomSeedGenerator.java | 4 +++- .../rumen/ReduceAttempt20LineHistoryEventEmitter.java | 2 +- 7 files changed, 16 insertions(+), 13 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index ac9de117a5..a633b4ecb9 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -330,6 +330,9 @@ Release 2.7.0 - UNRELEASED MAPREDUCE-6231. Grep example job is not working on a fully-distributed cluster. (aajisaka) + MAPREDUCE-6243. Fix findbugs warnings in hadoop-rumen. (Masatake Iwasaki + via aajisaka) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Hadoop20JHParser.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Hadoop20JHParser.java index 9cfd85d5ca..08e825b403 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Hadoop20JHParser.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Hadoop20JHParser.java @@ -192,11 +192,6 @@ private String getFullLine() throws IOException { do { addedLine = getOneLine(); - - if (addedLine == null) { - return sb.toString(); - } - sb.append("\n"); sb.append(addedLine); } while (addedLine.length() < endLineString.length() diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java index 653fff8550..47fdb1ad55 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java @@ -559,7 +559,7 @@ private boolean setNextDirectoryInputStream() throws FileNotFoundException, input = maybeUncompressedPath(new Path(inputDirectoryPath, currentFileName)); - return input != null; + return true; } private String readInputLine() throws IOException { diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/MapAttempt20LineHistoryEventEmitter.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/MapAttempt20LineHistoryEventEmitter.java index f4de3ad679..6e73582d7c 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/MapAttempt20LineHistoryEventEmitter.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/MapAttempt20LineHistoryEventEmitter.java @@ -67,7 +67,7 @@ HistoryEvent maybeEmitEvent(ParsedLine line, String taskAttemptIDName, MapAttempt20LineHistoryEventEmitter that = (MapAttempt20LineHistoryEventEmitter) thatg; - if (finishTime != null && "success".equalsIgnoreCase(status)) { + if ("success".equalsIgnoreCase(status)) { return new MapAttemptFinishedEvent (taskAttemptID, that.originalTaskType, status, diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedConfigFile.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedConfigFile.java index c99441e1e0..1d85872c08 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedConfigFile.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedConfigFile.java @@ -25,6 +25,8 @@ import java.io.ByteArrayInputStream; import java.io.IOException; +import java.nio.charset.Charset; + import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.ParserConfigurationException; @@ -44,6 +46,7 @@ class ParsedConfigFile { Pattern.compile("_(job_[0-9]+_[0-9]+)_"); private static final Pattern heapPattern = Pattern.compile("-Xmx([0-9]+)([mMgG])"); + private static final Charset UTF_8 = Charset.forName("UTF-8"); final int heapMegabytes; @@ -100,7 +103,7 @@ private int maybeGetIntValue(String propName, String attr, String value, } try { - InputStream is = new ByteArrayInputStream(xmlString.getBytes()); + InputStream is = new ByteArrayInputStream(xmlString.getBytes(UTF_8)); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); @@ -151,7 +154,7 @@ private int maybeGetIntValue(String propName, String attr, String value, properties.setProperty(attr, value); - if ("mapred.child.java.opts".equals(attr) && value != null) { + if ("mapred.child.java.opts".equals(attr)) { Matcher matcher = heapPattern.matcher(value); if (matcher.find()) { String heapSize = matcher.group(1); @@ -164,11 +167,11 @@ private int maybeGetIntValue(String propName, String attr, String value, } } - if (MRJobConfig.QUEUE_NAME.equals(attr) && value != null) { + if (MRJobConfig.QUEUE_NAME.equals(attr)) { queue = value; } - if (MRJobConfig.JOB_NAME.equals(attr) && value != null) { + if (MRJobConfig.JOB_NAME.equals(attr)) { jobName = value; } diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java index 20ad66c540..014fb6c33d 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.tools.rumen; +import java.nio.charset.Charset; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -42,6 +43,7 @@ */ public class RandomSeedGenerator { private static Log LOG = LogFactory.getLog(RandomSeedGenerator.class); + private static final Charset UTF_8 = Charset.forName("UTF-8"); /** MD5 algorithm instance, one for each thread. */ private static final ThreadLocal md5Holder = @@ -72,7 +74,7 @@ public static long getSeed(String streamId, long masterSeed) { // We could have fed the bytes of masterSeed one by one to md5.update() // instead String str = streamId + '/' + masterSeed; - byte[] digest = md5.digest(str.getBytes()); + byte[] digest = md5.digest(str.getBytes(UTF_8)); // Create a long from the first 8 bytes of the digest // This is fine as MD5 has the avalanche property. // Paranoids could have XOR folded the other 8 bytes in too. diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ReduceAttempt20LineHistoryEventEmitter.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ReduceAttempt20LineHistoryEventEmitter.java index 74bac99ece..0261ea225f 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ReduceAttempt20LineHistoryEventEmitter.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ReduceAttempt20LineHistoryEventEmitter.java @@ -66,7 +66,7 @@ HistoryEvent maybeEmitEvent(ParsedLine line, String taskAttemptIDName, String shuffleFinish = line.get("SHUFFLE_FINISHED"); String sortFinish = line.get("SORT_FINISHED"); - if (finishTime != null && shuffleFinish != null && sortFinish != null + if (shuffleFinish != null && sortFinish != null && "success".equalsIgnoreCase(status)) { ReduceAttempt20LineHistoryEventEmitter that = (ReduceAttempt20LineHistoryEventEmitter) thatg;