From 0a030871ddada24ef4f27ed270f912b7abe447ee Mon Sep 17 00:00:00 2001 From: Akira Ajisaka Date: Wed, 4 Feb 2015 09:25:44 -0800 Subject: [PATCH] MAPREDUCE-6243. Fix findbugs warnings in hadoop-rumen. Contributed by Masatake Iwasaki. (cherry picked from commit 34fe11c987730932f99dec6eb458a22624eb075b) --- hadoop-mapreduce-project/CHANGES.txt | 3 +++ .../apache/hadoop/tools/rumen/Hadoop20JHParser.java | 5 ----- .../apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java | 2 +- .../rumen/MapAttempt20LineHistoryEventEmitter.java | 2 +- .../apache/hadoop/tools/rumen/ParsedConfigFile.java | 11 +++++++---- .../hadoop/tools/rumen/RandomSeedGenerator.java | 4 +++- .../rumen/ReduceAttempt20LineHistoryEventEmitter.java | 2 +- 7 files changed, 16 insertions(+), 13 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 95f21526dd6..26eae70a438 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -98,6 +98,9 @@ Release 2.7.0 - UNRELEASED MAPREDUCE-6231. Grep example job is not working on a fully-distributed cluster. (aajisaka) + MAPREDUCE-6243. Fix findbugs warnings in hadoop-rumen. (Masatake Iwasaki + via aajisaka) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Hadoop20JHParser.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Hadoop20JHParser.java index 9cfd85d5ca8..08e825b4035 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Hadoop20JHParser.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Hadoop20JHParser.java @@ -192,11 +192,6 @@ public class Hadoop20JHParser implements JobHistoryParser { do { addedLine = getOneLine(); - - if (addedLine == null) { - return sb.toString(); - } - sb.append("\n"); sb.append(addedLine); } while (addedLine.length() < endLineString.length() diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java index 653fff8550e..47fdb1ad55b 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java @@ -559,7 +559,7 @@ public class HadoopLogsAnalyzer extends Configured implements Tool { input = maybeUncompressedPath(new Path(inputDirectoryPath, currentFileName)); - return input != null; + return true; } private String readInputLine() throws IOException { diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/MapAttempt20LineHistoryEventEmitter.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/MapAttempt20LineHistoryEventEmitter.java index f4de3ad679e..6e73582d7c0 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/MapAttempt20LineHistoryEventEmitter.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/MapAttempt20LineHistoryEventEmitter.java @@ -67,7 +67,7 @@ public class MapAttempt20LineHistoryEventEmitter extends MapAttempt20LineHistoryEventEmitter that = (MapAttempt20LineHistoryEventEmitter) thatg; - if (finishTime != null && "success".equalsIgnoreCase(status)) { + if ("success".equalsIgnoreCase(status)) { return new MapAttemptFinishedEvent (taskAttemptID, that.originalTaskType, status, diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedConfigFile.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedConfigFile.java index c99441e1e0a..1d85872c08d 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedConfigFile.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedConfigFile.java @@ -25,6 +25,8 @@ import java.io.InputStream; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.nio.charset.Charset; + import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.ParserConfigurationException; @@ -44,6 +46,7 @@ class ParsedConfigFile { Pattern.compile("_(job_[0-9]+_[0-9]+)_"); private static final Pattern heapPattern = Pattern.compile("-Xmx([0-9]+)([mMgG])"); + private static final Charset UTF_8 = Charset.forName("UTF-8"); final int heapMegabytes; @@ -100,7 +103,7 @@ class ParsedConfigFile { } try { - InputStream is = new ByteArrayInputStream(xmlString.getBytes()); + InputStream is = new ByteArrayInputStream(xmlString.getBytes(UTF_8)); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); @@ -151,7 +154,7 @@ class ParsedConfigFile { properties.setProperty(attr, value); - if ("mapred.child.java.opts".equals(attr) && value != null) { + if ("mapred.child.java.opts".equals(attr)) { Matcher matcher = heapPattern.matcher(value); if (matcher.find()) { String heapSize = matcher.group(1); @@ -164,11 +167,11 @@ class ParsedConfigFile { } } - if (MRJobConfig.QUEUE_NAME.equals(attr) && value != null) { + if (MRJobConfig.QUEUE_NAME.equals(attr)) { queue = value; } - if (MRJobConfig.JOB_NAME.equals(attr) && value != null) { + if (MRJobConfig.JOB_NAME.equals(attr)) { jobName = value; } diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java index 20ad66c5403..014fb6c33d2 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.tools.rumen; +import java.nio.charset.Charset; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -42,6 +43,7 @@ import org.apache.commons.logging.LogFactory; */ public class RandomSeedGenerator { private static Log LOG = LogFactory.getLog(RandomSeedGenerator.class); + private static final Charset UTF_8 = Charset.forName("UTF-8"); /** MD5 algorithm instance, one for each thread. */ private static final ThreadLocal md5Holder = @@ -72,7 +74,7 @@ public class RandomSeedGenerator { // We could have fed the bytes of masterSeed one by one to md5.update() // instead String str = streamId + '/' + masterSeed; - byte[] digest = md5.digest(str.getBytes()); + byte[] digest = md5.digest(str.getBytes(UTF_8)); // Create a long from the first 8 bytes of the digest // This is fine as MD5 has the avalanche property. // Paranoids could have XOR folded the other 8 bytes in too. diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ReduceAttempt20LineHistoryEventEmitter.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ReduceAttempt20LineHistoryEventEmitter.java index 74bac99ece2..0261ea225f2 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ReduceAttempt20LineHistoryEventEmitter.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ReduceAttempt20LineHistoryEventEmitter.java @@ -66,7 +66,7 @@ public class ReduceAttempt20LineHistoryEventEmitter String shuffleFinish = line.get("SHUFFLE_FINISHED"); String sortFinish = line.get("SORT_FINISHED"); - if (finishTime != null && shuffleFinish != null && sortFinish != null + if (shuffleFinish != null && sortFinish != null && "success".equalsIgnoreCase(status)) { ReduceAttempt20LineHistoryEventEmitter that = (ReduceAttempt20LineHistoryEventEmitter) thatg;