YARN-2939. Fix new findbugs warnings in hadoop-yarn-common. (Li Lu via junping_du)

This commit is contained in:
Junping Du 2014-12-22 03:06:37 -08:00
parent ecf1469fa5
commit a696fbb001
15 changed files with 79 additions and 56 deletions

View File

@ -261,6 +261,8 @@ Release 2.7.0 - UNRELEASED
YARN-2977. Fixed intermittent TestNMClient failure. YARN-2977. Fixed intermittent TestNMClient failure.
(Junping Du via ozawa) (Junping Du via ozawa)
YARN-2939. Fix new findbugs warnings in hadoop-yarn-common. (Li Lu via junping_du)
Release 2.6.0 - 2014-11-18 Release 2.6.0 - 2014-11-18
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -265,6 +265,11 @@
<Class name="org.apache.hadoop.yarn.YarnUncaughtExceptionHandler"/> <Class name="org.apache.hadoop.yarn.YarnUncaughtExceptionHandler"/>
<Bug pattern="DM_EXIT"/> <Bug pattern="DM_EXIT"/>
</Match> </Match>
<Match>
<Class name="org.apache.hadoop.yarn.event.AsyncDispatcher$2"/>
<Method name="run" />
<Bug pattern="DM_EXIT"/>
</Match>
<!-- AsyncDispatcher will kill the process if there is an error dispatching --> <!-- AsyncDispatcher will kill the process if there is an error dispatching -->
<Match> <Match>
@ -391,4 +396,10 @@
<Bug pattern="UI_INHERITANCE_UNSAFE_GETRESOURCE"/> <Bug pattern="UI_INHERITANCE_UNSAFE_GETRESOURCE"/>
</Match> </Match>
<!-- Ignore the false alarms on DM_DEFAULT_ENCODING (encoding already set) -->
<Match>
<Class name="org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat$LogReader" />
<Bug pattern="DM_DEFAULT_ENCODING" />
</Match>
</FindBugsFilter> </FindBugsFilter>

View File

@ -68,7 +68,7 @@ public class PriorityPBImpl extends Priority {
@Override @Override
public String toString() { public String toString() {
return Integer.valueOf(getPriority()).toString(); return Integer.toString(getPriority());
} }
} }

View File

@ -136,9 +136,13 @@ public class TimelineClientImpl extends TimelineClient {
// Indicates if retries happened last time. Only tests should read it. // Indicates if retries happened last time. Only tests should read it.
// In unit tests, retryOn() calls should _not_ be concurrent. // In unit tests, retryOn() calls should _not_ be concurrent.
private boolean retried = false;
@Private @Private
@VisibleForTesting @VisibleForTesting
public boolean retried = false; boolean getRetired() {
return retried;
}
// Constructor with default retry settings // Constructor with default retry settings
public TimelineClientConnectionRetry(Configuration conf) { public TimelineClientConnectionRetry(Configuration conf) {

View File

@ -30,6 +30,7 @@ import java.io.InputStreamReader;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.PrintStream; import java.io.PrintStream;
import java.io.Writer; import java.io.Writer;
import java.nio.charset.Charset;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -263,7 +264,7 @@ public class AggregatedLogFormat {
this.uploadedFiles.add(logFile); this.uploadedFiles.add(logFile);
} catch (IOException e) { } catch (IOException e) {
String message = logErrorMessage(logFile, e); String message = logErrorMessage(logFile, e);
out.write(message.getBytes()); out.write(message.getBytes(Charset.forName("UTF-8")));
} finally { } finally {
IOUtils.cleanup(LOG, in); IOUtils.cleanup(LOG, in);
} }
@ -651,7 +652,7 @@ public class AggregatedLogFormat {
OutputStream os = null; OutputStream os = null;
PrintStream ps = null; PrintStream ps = null;
try { try {
os = new WriterOutputStream(writer); os = new WriterOutputStream(writer, Charset.forName("UTF-8"));
ps = new PrintStream(os); ps = new PrintStream(os);
while (true) { while (true) {
try { try {
@ -781,7 +782,8 @@ public class AggregatedLogFormat {
currentLogData = currentLogData =
new BoundedInputStream(valueStream, currentLogLength); new BoundedInputStream(valueStream, currentLogLength);
currentLogData.setPropagateClose(false); currentLogData.setPropagateClose(false);
currentLogISR = new InputStreamReader(currentLogData); currentLogISR = new InputStreamReader(currentLogData,
Charset.forName("UTF-8"));
currentLogType = logType; currentLogType = logType;
} catch (EOFException e) { } catch (EOFException e) {
} }

View File

@ -83,11 +83,16 @@ public class CommonNodeLabelsManager extends AbstractService {
protected NodeLabelsStore store; protected NodeLabelsStore store;
protected static class Label { protected static class Label {
public Resource resource; private Resource resource;
protected Label() { protected Label() {
this.resource = Resource.newInstance(0, 0); this.resource = Resource.newInstance(0, 0);
} }
public Resource getResource() {
return this.resource;
}
} }
/** /**

View File

@ -29,7 +29,6 @@ import org.apache.hadoop.yarn.api.records.NodeId;
public abstract class NodeLabelsStore implements Closeable { public abstract class NodeLabelsStore implements Closeable {
protected final CommonNodeLabelsManager mgr; protected final CommonNodeLabelsManager mgr;
protected Configuration conf;
public NodeLabelsStore(CommonNodeLabelsManager mgr) { public NodeLabelsStore(CommonNodeLabelsManager mgr) {
this.mgr = mgr; this.mgr = mgr;
@ -59,9 +58,7 @@ public abstract class NodeLabelsStore implements Closeable {
*/ */
public abstract void recover() throws IOException; public abstract void recover() throws IOException;
public void init(Configuration conf) throws Exception { public void init(Configuration conf) throws Exception {}
this.conf = conf;
}
public CommonNodeLabelsManager getNodeLabelsManager() { public CommonNodeLabelsManager getNodeLabelsManager() {
return mgr; return mgr;

View File

@ -17,8 +17,10 @@
*/ */
package org.apache.hadoop.yarn.state; package org.apache.hadoop.yarn.state;
import java.io.FileWriter; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.Charset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -149,7 +151,7 @@ public class Graph {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
if (this.parent == null) { if (this.parent == null) {
sb.append("digraph " + name + " {\n"); sb.append("digraph " + name + " {\n");
sb.append(String.format("graph [ label=%s, fontsize=24, fontname=Helvetica];\n", sb.append(String.format("graph [ label=%s, fontsize=24, fontname=Helvetica];%n",
wrapSafeString(name))); wrapSafeString(name)));
sb.append("node [fontsize=12, fontname=Helvetica];\n"); sb.append("node [fontsize=12, fontname=Helvetica];\n");
sb.append("edge [fontsize=9, fontcolor=blue, fontname=Arial];\n"); sb.append("edge [fontsize=9, fontcolor=blue, fontname=Arial];\n");
@ -163,14 +165,14 @@ public class Graph {
} }
for (Node n : nodes) { for (Node n : nodes) {
sb.append(String.format( sb.append(String.format(
"%s%s [ label = %s ];\n", "%s%s [ label = %s ];%n",
indent, indent,
wrapSafeString(n.getUniqueId()), wrapSafeString(n.getUniqueId()),
n.id)); n.id));
List<Edge> combinedOuts = combineEdges(n.outs); List<Edge> combinedOuts = combineEdges(n.outs);
for (Edge e : combinedOuts) { for (Edge e : combinedOuts) {
sb.append(String.format( sb.append(String.format(
"%s%s -> %s [ label = %s ];\n", "%s%s -> %s [ label = %s ];%n",
indent, indent,
wrapSafeString(e.from.getUniqueId()), wrapSafeString(e.from.getUniqueId()),
wrapSafeString(e.to.getUniqueId()), wrapSafeString(e.to.getUniqueId()),
@ -186,7 +188,8 @@ public class Graph {
} }
public void save(String filepath) throws IOException { public void save(String filepath) throws IOException {
FileWriter fout = new FileWriter(filepath); OutputStreamWriter fout = new OutputStreamWriter(
new FileOutputStream(filepath), Charset.forName("UTF-8"));
fout.write(generateGraphViz()); fout.write(generateGraphViz());
fout.close(); fout.close();
} }

View File

@ -56,7 +56,7 @@ public class VisualizeStateMachine {
public static void main(String [] args) throws Exception { public static void main(String [] args) throws Exception {
if (args.length < 3) { if (args.length < 3) {
System.err.printf("Usage: %s <GraphName> <class[,class[,...]]> <OutputFile>\n", System.err.printf("Usage: %s <GraphName> <class[,class[,...]]> <OutputFile>%n",
VisualizeStateMachine.class.getName()); VisualizeStateMachine.class.getName());
System.exit(1); System.exit(1);
} }

View File

@ -19,9 +19,11 @@
package org.apache.hadoop.yarn.util; package org.apache.hadoop.yarn.util;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FileReader; import java.io.InputStreamReader;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -147,9 +149,10 @@ public class LinuxResourceCalculatorPlugin extends ResourceCalculatorPlugin {
// Read "/proc/memInfo" file // Read "/proc/memInfo" file
BufferedReader in = null; BufferedReader in = null;
FileReader fReader = null; InputStreamReader fReader = null;
try { try {
fReader = new FileReader(procfsMemFile); fReader = new InputStreamReader(
new FileInputStream(procfsMemFile), Charset.forName("UTF-8"));
in = new BufferedReader(fReader); in = new BufferedReader(fReader);
} catch (FileNotFoundException f) { } catch (FileNotFoundException f) {
// shouldn't happen.... // shouldn't happen....
@ -206,9 +209,10 @@ public class LinuxResourceCalculatorPlugin extends ResourceCalculatorPlugin {
} }
// Read "/proc/cpuinfo" file // Read "/proc/cpuinfo" file
BufferedReader in = null; BufferedReader in = null;
FileReader fReader = null; InputStreamReader fReader = null;
try { try {
fReader = new FileReader(procfsCpuFile); fReader = new InputStreamReader(
new FileInputStream(procfsCpuFile), Charset.forName("UTF-8"));
in = new BufferedReader(fReader); in = new BufferedReader(fReader);
} catch (FileNotFoundException f) { } catch (FileNotFoundException f) {
// shouldn't happen.... // shouldn't happen....
@ -253,9 +257,10 @@ public class LinuxResourceCalculatorPlugin extends ResourceCalculatorPlugin {
private void readProcStatFile() { private void readProcStatFile() {
// Read "/proc/stat" file // Read "/proc/stat" file
BufferedReader in = null; BufferedReader in = null;
FileReader fReader = null; InputStreamReader fReader = null;
try { try {
fReader = new FileReader(procfsStatFile); fReader = new InputStreamReader(
new FileInputStream(procfsStatFile), Charset.forName("UTF-8"));
in = new BufferedReader(fReader); in = new BufferedReader(fReader);
} catch (FileNotFoundException f) { } catch (FileNotFoundException f) {
// shouldn't happen.... // shouldn't happen....

View File

@ -20,10 +20,12 @@ package org.apache.hadoop.yarn.util;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FileReader; import java.io.InputStreamReader;
import java.io.IOException; import java.io.IOException;
import java.math.BigInteger; import java.math.BigInteger;
import java.nio.charset.Charset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedList; import java.util.LinkedList;
@ -297,7 +299,7 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
} }
private static final String PROCESSTREE_DUMP_FORMAT = private static final String PROCESSTREE_DUMP_FORMAT =
"\t|- %s %s %d %d %s %d %d %d %d %s\n"; "\t|- %s %s %d %d %s %d %d %d %d %s%n";
public List<String> getCurrentProcessIDs() { public List<String> getCurrentProcessIDs() {
List<String> currentPIDs = new ArrayList<String>(); List<String> currentPIDs = new ArrayList<String>();
@ -317,7 +319,7 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
// The header. // The header.
ret.append(String.format("\t|- PID PPID PGRPID SESSID CMD_NAME " ret.append(String.format("\t|- PID PPID PGRPID SESSID CMD_NAME "
+ "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) " + "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) "
+ "RSSMEM_USAGE(PAGES) FULL_CMD_LINE\n")); + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE%n"));
for (ProcessInfo p : processTree.values()) { for (ProcessInfo p : processTree.values()) {
if (p != null) { if (p != null) {
ret.append(String.format(PROCESSTREE_DUMP_FORMAT, p.getPid(), p ret.append(String.format(PROCESSTREE_DUMP_FORMAT, p.getPid(), p
@ -489,10 +491,12 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
ProcessInfo ret = null; ProcessInfo ret = null;
// Read "procfsDir/<pid>/stat" file - typically /proc/<pid>/stat // Read "procfsDir/<pid>/stat" file - typically /proc/<pid>/stat
BufferedReader in = null; BufferedReader in = null;
FileReader fReader = null; InputStreamReader fReader = null;
try { try {
File pidDir = new File(procfsDir, pinfo.getPid()); File pidDir = new File(procfsDir, pinfo.getPid());
fReader = new FileReader(new File(pidDir, PROCFS_STAT_FILE)); fReader = new InputStreamReader(
new FileInputStream(
new File(pidDir, PROCFS_STAT_FILE)), Charset.forName("UTF-8"));
in = new BufferedReader(fReader); in = new BufferedReader(fReader);
} catch (FileNotFoundException f) { } catch (FileNotFoundException f) {
// The process vanished in the interim! // The process vanished in the interim!
@ -671,11 +675,12 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
return ret; return ret;
} }
BufferedReader in = null; BufferedReader in = null;
FileReader fReader = null; InputStreamReader fReader = null;
try { try {
fReader = fReader = new InputStreamReader(
new FileReader(new File(new File(procfsDir, pid.toString()), new FileInputStream(
PROCFS_CMDLINE_FILE)); new File(new File(procfsDir, pid.toString()), PROCFS_CMDLINE_FILE)),
Charset.forName("UTF-8"));
} catch (FileNotFoundException f) { } catch (FileNotFoundException f) {
// The process vanished in the interim! // The process vanished in the interim!
return ret; return ret;
@ -725,14 +730,15 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
private static void constructProcessSMAPInfo(ProcessTreeSmapMemInfo pInfo, private static void constructProcessSMAPInfo(ProcessTreeSmapMemInfo pInfo,
String procfsDir) { String procfsDir) {
BufferedReader in = null; BufferedReader in = null;
FileReader fReader = null; InputStreamReader fReader = null;
try { try {
File pidDir = new File(procfsDir, pInfo.getPid()); File pidDir = new File(procfsDir, pInfo.getPid());
File file = new File(pidDir, SMAPS); File file = new File(pidDir, SMAPS);
if (!file.exists()) { if (!file.exists()) {
return; return;
} }
fReader = new FileReader(file); fReader = new InputStreamReader(
new FileInputStream(file), Charset.forName("UTF-8"));
in = new BufferedReader(fReader); in = new BufferedReader(fReader);
ProcessSmapMemoryInfo memoryMappingInfo = null; ProcessSmapMemoryInfo memoryMappingInfo = null;
List<String> lines = IOUtils.readLines(in); List<String> lines = IOUtils.readLines(in);

View File

@ -33,18 +33,6 @@ import org.apache.hadoop.util.Shell;
@InterfaceStability.Unstable @InterfaceStability.Unstable
public abstract class ResourceCalculatorPlugin extends Configured { public abstract class ResourceCalculatorPlugin extends Configured {
protected String processPid = null;
/**
* set the pid of the process for which <code>getProcResourceValues</code>
* will be invoked
*
* @param pid
*/
public void setProcessPid(String pid) {
processPid = pid;
}
/** /**
* Obtain the total size of the virtual memory present in the system. * Obtain the total size of the virtual memory present in the system.
* *

View File

@ -162,10 +162,10 @@ public class WindowsBasedProcessTree extends ResourceCalculatorProcessTree {
StringBuilder ret = new StringBuilder(); StringBuilder ret = new StringBuilder();
// The header. // The header.
ret.append(String.format("\t|- PID " + "CPU_TIME(MILLIS) " ret.append(String.format("\t|- PID " + "CPU_TIME(MILLIS) "
+ "VMEM(BYTES) WORKING_SET(BYTES)\n")); + "VMEM(BYTES) WORKING_SET(BYTES)%n"));
for (ProcessInfo p : processTree.values()) { for (ProcessInfo p : processTree.values()) {
if (p != null) { if (p != null) {
ret.append(String.format("\t|- %s %d %d %d\n", p.pid, ret.append(String.format("\t|- %s %d %d %d%n", p.pid,
p.cpuTimeMs, p.vmem, p.workingSet)); p.cpuTimeMs, p.vmem, p.workingSet));
} }
} }

View File

@ -197,7 +197,7 @@ public class TestTimelineClient {
ce.getMessage().contains("Connection retries limit exceeded")); ce.getMessage().contains("Connection retries limit exceeded"));
// we would expect this exception here, check if the client has retried // we would expect this exception here, check if the client has retried
Assert.assertTrue("Retry filter didn't perform any retries! ", client Assert.assertTrue("Retry filter didn't perform any retries! ", client
.connectionRetry.retried); .connectionRetry.getRetired());
} }
} }
@ -272,7 +272,7 @@ public class TestTimelineClient {
.getMessage().contains("Connection retries limit exceeded")); .getMessage().contains("Connection retries limit exceeded"));
// we would expect this exception here, check if the client has retried // we would expect this exception here, check if the client has retried
Assert.assertTrue("Retry filter didn't perform any retries! ", Assert.assertTrue("Retry filter didn't perform any retries! ",
client.connectionRetry.retried); client.connectionRetry.getRetired());
} }
private static ClientResponse mockEntityClientResponse( private static ClientResponse mockEntityClientResponse(

View File

@ -351,7 +351,7 @@ public class RMNodeLabelsManager extends CommonNodeLabelsManager {
if (oldLabels.isEmpty()) { if (oldLabels.isEmpty()) {
// update labels // update labels
Label label = labelCollections.get(NO_LABEL); Label label = labelCollections.get(NO_LABEL);
Resources.subtractFrom(label.resource, oldNM.resource); Resources.subtractFrom(label.getResource(), oldNM.resource);
// update queues, all queue can access this node // update queues, all queue can access this node
for (Queue q : queueCollections.values()) { for (Queue q : queueCollections.values()) {
@ -364,7 +364,7 @@ public class RMNodeLabelsManager extends CommonNodeLabelsManager {
if (null == label) { if (null == label) {
continue; continue;
} }
Resources.subtractFrom(label.resource, oldNM.resource); Resources.subtractFrom(label.getResource(), oldNM.resource);
} }
// update queues, only queue can access this node will be subtract // update queues, only queue can access this node will be subtract
@ -383,7 +383,7 @@ public class RMNodeLabelsManager extends CommonNodeLabelsManager {
if (newLabels.isEmpty()) { if (newLabels.isEmpty()) {
// update labels // update labels
Label label = labelCollections.get(NO_LABEL); Label label = labelCollections.get(NO_LABEL);
Resources.addTo(label.resource, newNM.resource); Resources.addTo(label.getResource(), newNM.resource);
// update queues, all queue can access this node // update queues, all queue can access this node
for (Queue q : queueCollections.values()) { for (Queue q : queueCollections.values()) {
@ -393,7 +393,7 @@ public class RMNodeLabelsManager extends CommonNodeLabelsManager {
// update labels // update labels
for (String labelName : newLabels) { for (String labelName : newLabels) {
Label label = labelCollections.get(labelName); Label label = labelCollections.get(labelName);
Resources.addTo(label.resource, newNM.resource); Resources.addTo(label.getResource(), newNM.resource);
} }
// update queues, only queue can access this node will be subtract // update queues, only queue can access this node will be subtract
@ -414,7 +414,7 @@ public class RMNodeLabelsManager extends CommonNodeLabelsManager {
if (null == labelCollections.get(label)) { if (null == labelCollections.get(label)) {
return Resources.none(); return Resources.none();
} }
return labelCollections.get(label).resource; return labelCollections.get(label).getResource();
} finally { } finally {
readLock.unlock(); readLock.unlock();
} }