diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 1f1f92d4aa8..a2085015cf0 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -278,6 +278,9 @@ Release 2.7.0 - UNRELEASED
YARN-2940. Fix new findbugs warnings in rest of the hadoop-yarn components. (Li Lu
via junping_du)
+ YARN-2937. Fixed new findbugs warnings in hadoop-yarn-nodemanager. (Varun Saxena
+ via zjshen)
+
Release 2.6.0 - 2014-11-18
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
index 34837471527..de0210f9113 100644
--- a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
+++ b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
@@ -407,4 +407,14 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
index 327f8824a4b..77193df3280 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
@@ -226,7 +226,7 @@ public abstract class ContainerExecutor implements Configurable {
PrintStream pout = null;
try {
- pout = new PrintStream(out);
+ pout = new PrintStream(out, false, "UTF-8");
sb.write(pout);
} finally {
if (out != null) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java
index cc2de999bc8..f3d21210b85 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java
@@ -32,12 +32,11 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
-import java.util.Random;
import java.util.Map;
+import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
@@ -292,7 +291,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
try {
out = lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE));
- pout = new PrintStream(out);
+ pout = new PrintStream(out, false, "UTF-8");
writeLocalWrapperScript(launchDst, pidFile, pout);
} finally {
IOUtils.cleanup(LOG, pout, out);
@@ -345,7 +344,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
PrintStream pout = null;
try {
out = lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE));
- pout = new PrintStream(out);
+ pout = new PrintStream(out, false, "UTF-8");
// We need to do a move as writing to a file is not atomic
// Process reading a file being written to may get garbled data
// hence write pid to tmp file first followed by a mv
@@ -539,8 +538,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
// make probability to pick a directory proportional to
// the available space on the directory.
- Random r = new Random();
- long randomPosition = Math.abs(r.nextLong()) % totalAvailable;
+ long randomPosition = RandomUtils.nextLong() % totalAvailable;
int dir = 0;
// skip zero available space directory,
// because totalAvailable is greater than 0 and randomPosition
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java
index d8dd8907117..c854173fad3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java
@@ -22,6 +22,8 @@ import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
+
+import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -59,7 +61,6 @@ import java.util.Random;
import java.util.Set;
import java.util.regex.Pattern;
import java.net.InetSocketAddress;
-
import static org.apache.hadoop.fs.CreateFlag.CREATE;
import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;
@@ -310,9 +311,9 @@ public class DockerContainerExecutor extends ContainerExecutor {
PrintStream ps = null;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
- pout = new PrintStream(out);
+ pout = new PrintStream(out, false, "UTF-8");
if (LOG.isDebugEnabled()) {
- ps = new PrintStream(baos);
+ ps = new PrintStream(baos, false, "UTF-8");
sb.write(ps);
}
sb.write(pout);
@@ -326,7 +327,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
}
}
if (LOG.isDebugEnabled()) {
- LOG.debug("Script: " + baos.toString());
+ LOG.debug("Script: " + baos.toString("UTF-8"));
}
}
@@ -440,7 +441,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
try {
out = lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE));
- pout = new PrintStream(out);
+ pout = new PrintStream(out, false, "UTF-8");
writeLocalWrapperScript(launchDst, pidFile, pout);
} finally {
IOUtils.cleanup(LOG, pout, out);
@@ -498,7 +499,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
PrintStream pout = null;
try {
out = lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE));
- pout = new PrintStream(out);
+ pout = new PrintStream(out, false, "UTF-8");
// We need to do a move as writing to a file is not atomic
// Process reading a file being written to may get garbled data
// hence write pid to tmp file first followed by a mv
@@ -736,8 +737,7 @@ public class DockerContainerExecutor extends ContainerExecutor {
// make probability to pick a directory proportional to
// the available space on the directory.
- Random r = new Random();
- long randomPosition = Math.abs(r.nextLong()) % totalAvailable;
+ long randomPosition = RandomUtils.nextLong() % totalAvailable;
int dir = 0;
// skip zero available space directory,
// because totalAvailable is greater than 0 and randomPosition
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
index 4db4ef26279..4606f0c45da 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
@@ -301,9 +301,6 @@ public class LinuxContainerExecutor extends ContainerExecutor {
return ExitCode.TERMINATED.getExitCode();
}
} catch (ExitCodeException e) {
- if (null == shExec) {
- return -1;
- }
int exitCode = shExec.getExitCode();
LOG.warn("Exit code from container " + containerId + " is : " + exitCode);
// 143 (SIGTERM) and 137 (SIGKILL) exit codes means the container was
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
index ebc05a7c237..cd3e71a8d62 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
@@ -29,6 +29,7 @@ import java.io.OutputStream;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.net.URISyntaxException;
+import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -501,7 +502,7 @@ public class WindowsSecureContainerExecutor extends DefaultContainerExecutor {
try
{
BufferedReader lines = new BufferedReader(
- new InputStreamReader(stream));
+ new InputStreamReader(stream, Charset.forName("UTF-8")));
char[] buf = new char[512];
int nRead;
while ((nRead = lines.read(buf, 0, buf.length)) > 0) {
@@ -718,7 +719,7 @@ public class WindowsSecureContainerExecutor extends DefaultContainerExecutor {
}
catch(Throwable e) {
LOG.warn(String.format(
- "An exception occured during the cleanup of localizer job %s:\n%s",
+ "An exception occured during the cleanup of localizer job %s:%n%s",
localizerPid,
org.apache.hadoop.util.StringUtils.stringifyException(e)));
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
index f0c506dcfa9..3f6616a85ab 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
@@ -37,7 +37,6 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java
index 63039d8ed2c..a832a7aafcf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java
@@ -20,9 +20,13 @@ package org.apache.hadoop.yarn.server.nodemanager.util;
import java.io.BufferedReader;
import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -38,6 +42,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -235,7 +240,6 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
private void updateCgroup(String controller, String groupName, String param,
String value) throws IOException {
- FileWriter f = null;
String path = pathForCgroup(controller, groupName);
param = controller + "." + param;
@@ -243,19 +247,25 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
LOG.debug("updateCgroup: " + path + ": " + param + "=" + value);
}
+ PrintWriter pw = null;
try {
- f = new FileWriter(path + "/" + param, false);
- f.write(value);
+ File file = new File(path + "/" + param);
+ Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
+ pw = new PrintWriter(w);
+ pw.write(value);
} catch (IOException e) {
throw new IOException("Unable to set " + param + "=" + value +
" for cgroup at: " + path, e);
} finally {
- if (f != null) {
- try {
- f.close();
- } catch (IOException e) {
- LOG.warn("Unable to close cgroup file: " +
- path, e);
+ if (pw != null) {
+ boolean hasError = pw.checkError();
+ pw.close();
+ if(hasError) {
+ throw new IOException("Unable to set " + param + "=" + value +
+ " for cgroup at: " + path);
+ }
+ if(pw.checkError()) {
+ throw new IOException("Error while closing cgroup file " + path);
}
}
}
@@ -376,7 +386,8 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
BufferedReader in = null;
try {
- in = new BufferedReader(new FileReader(new File(getMtabFileName())));
+ FileInputStream fis = new FileInputStream(new File(getMtabFileName()));
+ in = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
for (String str = in.readLine(); str != null;
str = in.readLine()) {
@@ -396,12 +407,7 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
} catch (IOException e) {
throw new IOException("Error while reading " + getMtabFileName(), e);
} finally {
- // Close the streams
- try {
- in.close();
- } catch (IOException e2) {
- LOG.warn("Error closing the stream: " + getMtabFileName(), e2);
- }
+ IOUtils.cleanup(LOG, in);
}
return ret;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java
index 0c7c2505237..52fcdec020e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java
@@ -19,8 +19,9 @@ package org.apache.hadoop.yarn.server.nodemanager.util;
import java.io.BufferedReader;
import java.io.File;
-import java.io.FileReader;
+import java.io.FileInputStream;
import java.io.IOException;
+import java.io.InputStreamReader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -49,14 +50,14 @@ public class ProcessIdFileReader {
LOG.debug("Accessing pid from pid file " + path);
String processId = null;
- FileReader fileReader = null;
BufferedReader bufReader = null;
try {
File file = new File(path.toString());
if (file.exists()) {
- fileReader = new FileReader(file);
- bufReader = new BufferedReader(fileReader);
+ FileInputStream fis = new FileInputStream(file);
+ bufReader = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
+
while (true) {
String line = bufReader.readLine();
if (line == null) {
@@ -91,9 +92,6 @@ public class ProcessIdFileReader {
}
}
} finally {
- if (fileReader != null) {
- fileReader.close();
- }
if (bufReader != null) {
bufReader.close();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
index 7d2948ea834..48e0c87ac60 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
@@ -28,6 +28,7 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -151,7 +152,8 @@ public class ContainerLogsPage extends NMView {
}
IOUtils.skipFully(logByteStream, start);
- InputStreamReader reader = new InputStreamReader(logByteStream);
+ InputStreamReader reader =
+ new InputStreamReader(logByteStream, Charset.forName("UTF-8"));
int bufferSize = 65536;
char[] cbuf = new char[bufferSize];