HADOOP-6489. Fix 3 findbugs warnings. Contributed by Erik Steffl.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@899856 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2010-01-16 00:44:40 +00:00
parent 08561f76db
commit a90d3205d2
4 changed files with 33 additions and 22 deletions

View File

@ -137,6 +137,8 @@ Trunk (unreleased changes)
HADOOP-6402. testConf.xsl is not well-formed XML. (Steve Loughran HADOOP-6402. testConf.xsl is not well-formed XML. (Steve Loughran
via tomwhite) via tomwhite)
HADOOP-6489. Fix 3 findbugs warnings. (Erik Steffl via suresh)
Release 0.21.0 - Unreleased Release 0.21.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -156,6 +156,7 @@ public final class FileContext {
public static final Log LOG = LogFactory.getLog(FileContext.class); public static final Log LOG = LogFactory.getLog(FileContext.class);
public static final FsPermission DEFAULT_PERM = FsPermission.getDefault(); public static final FsPermission DEFAULT_PERM = FsPermission.getDefault();
volatile private static FileContext localFsSingleton = null;
/** /**
* List of files that should be deleted on JVM shutdown. * List of files that should be deleted on JVM shutdown.
@ -342,7 +343,6 @@ public final class FileContext {
return getFileContext(new Configuration()); return getFileContext(new Configuration());
} }
private static FileContext localFsSingleton = null;
/** /**
* *
* @return a FileContext for the local filesystem using the default config. * @return a FileContext for the local filesystem using the default config.

View File

@ -199,29 +199,38 @@ public class CompressionCodecFactory {
System.out.println("Codec for " + args[i] + " not found."); System.out.println("Codec for " + args[i] + " not found.");
} else { } else {
if (encode) { if (encode) {
CompressionOutputStream out = CompressionOutputStream out = null;
codec.createOutputStream(new java.io.FileOutputStream(args[i])); java.io.InputStream in = null;
byte[] buffer = new byte[100]; try {
String inFilename = removeSuffix(args[i], out = codec.createOutputStream(
codec.getDefaultExtension()); new java.io.FileOutputStream(args[i]));
java.io.InputStream in = new java.io.FileInputStream(inFilename); byte[] buffer = new byte[100];
int len = in.read(buffer); String inFilename = removeSuffix(args[i],
while (len > 0) { codec.getDefaultExtension());
out.write(buffer, 0, len); in = new java.io.FileInputStream(inFilename);
len = in.read(buffer); int len = in.read(buffer);
while (len > 0) {
out.write(buffer, 0, len);
len = in.read(buffer);
}
} finally {
if(out != null) { out.close(); }
if(in != null) { in.close(); }
} }
in.close();
out.close();
} else { } else {
CompressionInputStream in = CompressionInputStream in = null;
codec.createInputStream(new java.io.FileInputStream(args[i])); try {
byte[] buffer = new byte[100]; in = codec.createInputStream(
int len = in.read(buffer); new java.io.FileInputStream(args[i]));
while (len > 0) { byte[] buffer = new byte[100];
System.out.write(buffer, 0, len); int len = in.read(buffer);
len = in.read(buffer); while (len > 0) {
System.out.write(buffer, 0, len);
len = in.read(buffer);
}
} finally {
if(in != null) { in.close(); }
} }
in.close();
} }
} }
} }

View File

@ -47,7 +47,7 @@ import org.apache.hadoop.io.serializer.SerializerBase;
public class ReflectionUtils { public class ReflectionUtils {
private static final Class<?>[] EMPTY_ARRAY = new Class[]{}; private static final Class<?>[] EMPTY_ARRAY = new Class[]{};
private static SerializationFactory serialFactory = null; volatile private static SerializationFactory serialFactory = null;
/** /**
* Cache of constructors for each class. Pins the classes so they * Cache of constructors for each class. Pins the classes so they