MAPREDUCE-6243. Fix findbugs warnings in hadoop-rumen. Contributed by Masatake Iwasaki.

(cherry picked from commit 34fe11c987)
This commit is contained in:
Akira Ajisaka 2015-02-04 09:25:44 -08:00
parent f92d4fa20f
commit 0a030871dd
7 changed files with 16 additions and 13 deletions

View File

@ -98,6 +98,9 @@ Release 2.7.0 - UNRELEASED
MAPREDUCE-6231. Grep example job is not working on a fully-distributed MAPREDUCE-6231. Grep example job is not working on a fully-distributed
cluster. (aajisaka) cluster. (aajisaka)
MAPREDUCE-6243. Fix findbugs warnings in hadoop-rumen. (Masatake Iwasaki
via aajisaka)
Release 2.6.0 - 2014-11-18 Release 2.6.0 - 2014-11-18
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -192,11 +192,6 @@ public class Hadoop20JHParser implements JobHistoryParser {
do { do {
addedLine = getOneLine(); addedLine = getOneLine();
if (addedLine == null) {
return sb.toString();
}
sb.append("\n"); sb.append("\n");
sb.append(addedLine); sb.append(addedLine);
} while (addedLine.length() < endLineString.length() } while (addedLine.length() < endLineString.length()

View File

@ -559,7 +559,7 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
input = input =
maybeUncompressedPath(new Path(inputDirectoryPath, currentFileName)); maybeUncompressedPath(new Path(inputDirectoryPath, currentFileName));
return input != null; return true;
} }
private String readInputLine() throws IOException { private String readInputLine() throws IOException {

View File

@ -67,7 +67,7 @@ public class MapAttempt20LineHistoryEventEmitter extends
MapAttempt20LineHistoryEventEmitter that = MapAttempt20LineHistoryEventEmitter that =
(MapAttempt20LineHistoryEventEmitter) thatg; (MapAttempt20LineHistoryEventEmitter) thatg;
if (finishTime != null && "success".equalsIgnoreCase(status)) { if ("success".equalsIgnoreCase(status)) {
return new MapAttemptFinishedEvent return new MapAttemptFinishedEvent
(taskAttemptID, (taskAttemptID,
that.originalTaskType, status, that.originalTaskType, status,

View File

@ -25,6 +25,8 @@ import java.io.InputStream;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
@ -44,6 +46,7 @@ class ParsedConfigFile {
Pattern.compile("_(job_[0-9]+_[0-9]+)_"); Pattern.compile("_(job_[0-9]+_[0-9]+)_");
private static final Pattern heapPattern = private static final Pattern heapPattern =
Pattern.compile("-Xmx([0-9]+)([mMgG])"); Pattern.compile("-Xmx([0-9]+)([mMgG])");
private static final Charset UTF_8 = Charset.forName("UTF-8");
final int heapMegabytes; final int heapMegabytes;
@ -100,7 +103,7 @@ class ParsedConfigFile {
} }
try { try {
InputStream is = new ByteArrayInputStream(xmlString.getBytes()); InputStream is = new ByteArrayInputStream(xmlString.getBytes(UTF_8));
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
@ -151,7 +154,7 @@ class ParsedConfigFile {
properties.setProperty(attr, value); properties.setProperty(attr, value);
if ("mapred.child.java.opts".equals(attr) && value != null) { if ("mapred.child.java.opts".equals(attr)) {
Matcher matcher = heapPattern.matcher(value); Matcher matcher = heapPattern.matcher(value);
if (matcher.find()) { if (matcher.find()) {
String heapSize = matcher.group(1); String heapSize = matcher.group(1);
@ -164,11 +167,11 @@ class ParsedConfigFile {
} }
} }
if (MRJobConfig.QUEUE_NAME.equals(attr) && value != null) { if (MRJobConfig.QUEUE_NAME.equals(attr)) {
queue = value; queue = value;
} }
if (MRJobConfig.JOB_NAME.equals(attr) && value != null) { if (MRJobConfig.JOB_NAME.equals(attr)) {
jobName = value; jobName = value;
} }

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.tools.rumen; package org.apache.hadoop.tools.rumen;
import java.nio.charset.Charset;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
@ -42,6 +43,7 @@ import org.apache.commons.logging.LogFactory;
*/ */
public class RandomSeedGenerator { public class RandomSeedGenerator {
private static Log LOG = LogFactory.getLog(RandomSeedGenerator.class); private static Log LOG = LogFactory.getLog(RandomSeedGenerator.class);
private static final Charset UTF_8 = Charset.forName("UTF-8");
/** MD5 algorithm instance, one for each thread. */ /** MD5 algorithm instance, one for each thread. */
private static final ThreadLocal<MessageDigest> md5Holder = private static final ThreadLocal<MessageDigest> md5Holder =
@ -72,7 +74,7 @@ public class RandomSeedGenerator {
// We could have fed the bytes of masterSeed one by one to md5.update() // We could have fed the bytes of masterSeed one by one to md5.update()
// instead // instead
String str = streamId + '/' + masterSeed; String str = streamId + '/' + masterSeed;
byte[] digest = md5.digest(str.getBytes()); byte[] digest = md5.digest(str.getBytes(UTF_8));
// Create a long from the first 8 bytes of the digest // Create a long from the first 8 bytes of the digest
// This is fine as MD5 has the avalanche property. // This is fine as MD5 has the avalanche property.
// Paranoids could have XOR folded the other 8 bytes in too. // Paranoids could have XOR folded the other 8 bytes in too.

View File

@ -66,7 +66,7 @@ public class ReduceAttempt20LineHistoryEventEmitter
String shuffleFinish = line.get("SHUFFLE_FINISHED"); String shuffleFinish = line.get("SHUFFLE_FINISHED");
String sortFinish = line.get("SORT_FINISHED"); String sortFinish = line.get("SORT_FINISHED");
if (finishTime != null && shuffleFinish != null && sortFinish != null if (shuffleFinish != null && sortFinish != null
&& "success".equalsIgnoreCase(status)) { && "success".equalsIgnoreCase(status)) {
ReduceAttempt20LineHistoryEventEmitter that = ReduceAttempt20LineHistoryEventEmitter that =
(ReduceAttempt20LineHistoryEventEmitter) thatg; (ReduceAttempt20LineHistoryEventEmitter) thatg;