YARN-2937. Fixed new findbugs warnings in hadoop-yarn-nodemanager. Contributed by Varun Saxena.

This commit is contained in:
Zhijie Shen 2014-12-23 20:32:36 -08:00
parent d468c9aaf1
commit 41a548a916
11 changed files with 61 additions and 47 deletions

View File

@ -278,6 +278,9 @@ Release 2.7.0 - UNRELEASED
YARN-2940. Fix new findbugs warnings in rest of the hadoop-yarn components. (Li Lu YARN-2940. Fix new findbugs warnings in rest of the hadoop-yarn components. (Li Lu
via junping_du) via junping_du)
YARN-2937. Fixed new findbugs warnings in hadoop-yarn-nodemanager. (Varun Saxena
via zjshen)
Release 2.6.0 - 2014-11-18 Release 2.6.0 - 2014-11-18
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -407,4 +407,14 @@
<Class name="org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils$KeyParser" /> <Class name="org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils$KeyParser" />
<Bug pattern="EI_EXPOSE_REP2" /> <Bug pattern="EI_EXPOSE_REP2" />
</Match> </Match>
<!-- Ignore unrelated RV_RETURN_VALUE_IGNORED_BAD_PRACTICE warnings. No need to wait for result from executor service -->
<Match>
<Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncher" />
<Bug pattern="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE" />
</Match>
<Match>
<Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService" />
<Bug pattern="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE" />
</Match>
</FindBugsFilter> </FindBugsFilter>

View File

@ -226,7 +226,7 @@ public abstract class ContainerExecutor implements Configurable {
PrintStream pout = null; PrintStream pout = null;
try { try {
pout = new PrintStream(out); pout = new PrintStream(out, false, "UTF-8");
sb.write(pout); sb.write(pout);
} finally { } finally {
if (out != null) { if (out != null) {

View File

@ -32,12 +32,11 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.List; import java.util.List;
import java.util.Random;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.fs.UnsupportedFileSystemException;
@ -292,7 +291,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
try { try {
out = lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE)); out = lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE));
pout = new PrintStream(out); pout = new PrintStream(out, false, "UTF-8");
writeLocalWrapperScript(launchDst, pidFile, pout); writeLocalWrapperScript(launchDst, pidFile, pout);
} finally { } finally {
IOUtils.cleanup(LOG, pout, out); IOUtils.cleanup(LOG, pout, out);
@ -345,7 +344,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
PrintStream pout = null; PrintStream pout = null;
try { try {
out = lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE)); out = lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE));
pout = new PrintStream(out); pout = new PrintStream(out, false, "UTF-8");
// We need to do a move as writing to a file is not atomic // We need to do a move as writing to a file is not atomic
// Process reading a file being written to may get garbled data // Process reading a file being written to may get garbled data
// hence write pid to tmp file first followed by a mv // hence write pid to tmp file first followed by a mv
@ -539,8 +538,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
// make probability to pick a directory proportional to // make probability to pick a directory proportional to
// the available space on the directory. // the available space on the directory.
Random r = new Random(); long randomPosition = RandomUtils.nextLong() % totalAvailable;
long randomPosition = Math.abs(r.nextLong()) % totalAvailable;
int dir = 0; int dir = 0;
// skip zero available space directory, // skip zero available space directory,
// because totalAvailable is greater than 0 and randomPosition // because totalAvailable is greater than 0 and randomPosition

View File

@ -22,6 +22,8 @@ import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
@ -59,7 +61,6 @@ import java.util.Random;
import java.util.Set; import java.util.Set;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import static org.apache.hadoop.fs.CreateFlag.CREATE; import static org.apache.hadoop.fs.CreateFlag.CREATE;
import static org.apache.hadoop.fs.CreateFlag.OVERWRITE; import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;
@ -310,9 +311,9 @@ public class DockerContainerExecutor extends ContainerExecutor {
PrintStream ps = null; PrintStream ps = null;
ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream();
try { try {
pout = new PrintStream(out); pout = new PrintStream(out, false, "UTF-8");
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
ps = new PrintStream(baos); ps = new PrintStream(baos, false, "UTF-8");
sb.write(ps); sb.write(ps);
} }
sb.write(pout); sb.write(pout);
@ -326,7 +327,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
} }
} }
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Script: " + baos.toString()); LOG.debug("Script: " + baos.toString("UTF-8"));
} }
} }
@ -440,7 +441,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
try { try {
out = lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE)); out = lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE));
pout = new PrintStream(out); pout = new PrintStream(out, false, "UTF-8");
writeLocalWrapperScript(launchDst, pidFile, pout); writeLocalWrapperScript(launchDst, pidFile, pout);
} finally { } finally {
IOUtils.cleanup(LOG, pout, out); IOUtils.cleanup(LOG, pout, out);
@ -498,7 +499,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
PrintStream pout = null; PrintStream pout = null;
try { try {
out = lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE)); out = lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE));
pout = new PrintStream(out); pout = new PrintStream(out, false, "UTF-8");
// We need to do a move as writing to a file is not atomic // We need to do a move as writing to a file is not atomic
// Process reading a file being written to may get garbled data // Process reading a file being written to may get garbled data
// hence write pid to tmp file first followed by a mv // hence write pid to tmp file first followed by a mv
@ -736,8 +737,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
// make probability to pick a directory proportional to // make probability to pick a directory proportional to
// the available space on the directory. // the available space on the directory.
Random r = new Random(); long randomPosition = RandomUtils.nextLong() % totalAvailable;
long randomPosition = Math.abs(r.nextLong()) % totalAvailable;
int dir = 0; int dir = 0;
// skip zero available space directory, // skip zero available space directory,
// because totalAvailable is greater than 0 and randomPosition // because totalAvailable is greater than 0 and randomPosition

View File

@ -301,9 +301,6 @@ public class LinuxContainerExecutor extends ContainerExecutor {
return ExitCode.TERMINATED.getExitCode(); return ExitCode.TERMINATED.getExitCode();
} }
} catch (ExitCodeException e) { } catch (ExitCodeException e) {
if (null == shExec) {
return -1;
}
int exitCode = shExec.getExitCode(); int exitCode = shExec.getExitCode();
LOG.warn("Exit code from container " + containerId + " is : " + exitCode); LOG.warn("Exit code from container " + containerId + " is : " + exitCode);
// 143 (SIGTERM) and 137 (SIGKILL) exit codes means the container was // 143 (SIGTERM) and 137 (SIGKILL) exit codes means the container was

View File

@ -29,6 +29,7 @@ import java.io.OutputStream;
import java.io.PrintStream; import java.io.PrintStream;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -501,7 +502,7 @@ public class WindowsSecureContainerExecutor extends DefaultContainerExecutor {
try try
{ {
BufferedReader lines = new BufferedReader( BufferedReader lines = new BufferedReader(
new InputStreamReader(stream)); new InputStreamReader(stream, Charset.forName("UTF-8")));
char[] buf = new char[512]; char[] buf = new char[512];
int nRead; int nRead;
while ((nRead = lines.read(buf, 0, buf.length)) > 0) { while ((nRead = lines.read(buf, 0, buf.length)) > 0) {
@ -718,7 +719,7 @@ public class WindowsSecureContainerExecutor extends DefaultContainerExecutor {
} }
catch(Throwable e) { catch(Throwable e) {
LOG.warn(String.format( LOG.warn(String.format(
"An exception occured during the cleanup of localizer job %s:\n%s", "An exception occured during the cleanup of localizer job %s:%n%s",
localizerPid, localizerPid,
org.apache.hadoop.util.StringUtils.stringifyException(e))); org.apache.hadoop.util.StringUtils.stringifyException(e)));
} }

View File

@ -37,7 +37,6 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;

View File

@ -20,9 +20,13 @@ package org.apache.hadoop.yarn.server.nodemanager.util;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.FileReader; import java.io.FileInputStream;
import java.io.FileWriter; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
@ -38,6 +42,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -235,7 +240,6 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
private void updateCgroup(String controller, String groupName, String param, private void updateCgroup(String controller, String groupName, String param,
String value) throws IOException { String value) throws IOException {
FileWriter f = null;
String path = pathForCgroup(controller, groupName); String path = pathForCgroup(controller, groupName);
param = controller + "." + param; param = controller + "." + param;
@ -243,19 +247,25 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
LOG.debug("updateCgroup: " + path + ": " + param + "=" + value); LOG.debug("updateCgroup: " + path + ": " + param + "=" + value);
} }
PrintWriter pw = null;
try { try {
f = new FileWriter(path + "/" + param, false); File file = new File(path + "/" + param);
f.write(value); Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
pw = new PrintWriter(w);
pw.write(value);
} catch (IOException e) { } catch (IOException e) {
throw new IOException("Unable to set " + param + "=" + value + throw new IOException("Unable to set " + param + "=" + value +
" for cgroup at: " + path, e); " for cgroup at: " + path, e);
} finally { } finally {
if (f != null) { if (pw != null) {
try { boolean hasError = pw.checkError();
f.close(); pw.close();
} catch (IOException e) { if(hasError) {
LOG.warn("Unable to close cgroup file: " + throw new IOException("Unable to set " + param + "=" + value +
path, e); " for cgroup at: " + path);
}
if(pw.checkError()) {
throw new IOException("Error while closing cgroup file " + path);
} }
} }
} }
@ -376,7 +386,8 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
BufferedReader in = null; BufferedReader in = null;
try { try {
in = new BufferedReader(new FileReader(new File(getMtabFileName()))); FileInputStream fis = new FileInputStream(new File(getMtabFileName()));
in = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
for (String str = in.readLine(); str != null; for (String str = in.readLine(); str != null;
str = in.readLine()) { str = in.readLine()) {
@ -396,12 +407,7 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
} catch (IOException e) { } catch (IOException e) {
throw new IOException("Error while reading " + getMtabFileName(), e); throw new IOException("Error while reading " + getMtabFileName(), e);
} finally { } finally {
// Close the streams IOUtils.cleanup(LOG, in);
try {
in.close();
} catch (IOException e2) {
LOG.warn("Error closing the stream: " + getMtabFileName(), e2);
}
} }
return ret; return ret;

View File

@ -19,8 +19,9 @@ package org.apache.hadoop.yarn.server.nodemanager.util;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.FileReader; import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -49,14 +50,14 @@ public class ProcessIdFileReader {
LOG.debug("Accessing pid from pid file " + path); LOG.debug("Accessing pid from pid file " + path);
String processId = null; String processId = null;
FileReader fileReader = null;
BufferedReader bufReader = null; BufferedReader bufReader = null;
try { try {
File file = new File(path.toString()); File file = new File(path.toString());
if (file.exists()) { if (file.exists()) {
fileReader = new FileReader(file); FileInputStream fis = new FileInputStream(file);
bufReader = new BufferedReader(fileReader); bufReader = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
while (true) { while (true) {
String line = bufReader.readLine(); String line = bufReader.readLine();
if (line == null) { if (line == null) {
@ -91,9 +92,6 @@ public class ProcessIdFileReader {
} }
} }
} finally { } finally {
if (fileReader != null) {
fileReader.close();
}
if (bufReader != null) { if (bufReader != null) {
bufReader.close(); bufReader.close();
} }

View File

@ -28,6 +28,7 @@ import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@ -151,7 +152,8 @@ public class ContainerLogsPage extends NMView {
} }
IOUtils.skipFully(logByteStream, start); IOUtils.skipFully(logByteStream, start);
InputStreamReader reader = new InputStreamReader(logByteStream); InputStreamReader reader =
new InputStreamReader(logByteStream, Charset.forName("UTF-8"));
int bufferSize = 65536; int bufferSize = 65536;
char[] cbuf = new char[bufferSize]; char[] cbuf = new char[bufferSize];