HADOOP-6489. Fix 3 findbugs warnings. Contributed by Erik Steffl.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@899856 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2010-01-16 00:44:40 +00:00
parent 08561f76db
commit a90d3205d2
4 changed files with 33 additions and 22 deletions

View File

@ -137,6 +137,8 @@ Trunk (unreleased changes)
HADOOP-6402. testConf.xsl is not well-formed XML. (Steve Loughran
via tomwhite)
HADOOP-6489. Fix 3 findbugs warnings. (Erik Steffl via suresh)
Release 0.21.0 - Unreleased
INCOMPATIBLE CHANGES

View File

@ -156,6 +156,7 @@ public final class FileContext {
public static final Log LOG = LogFactory.getLog(FileContext.class);
public static final FsPermission DEFAULT_PERM = FsPermission.getDefault();
volatile private static FileContext localFsSingleton = null;
/**
* List of files that should be deleted on JVM shutdown.
@ -342,7 +343,6 @@ public final class FileContext {
return getFileContext(new Configuration());
}
private static FileContext localFsSingleton = null;
/**
*
* @return a FileContext for the local filesystem using the default config.

View File

@ -199,29 +199,38 @@ public class CompressionCodecFactory {
System.out.println("Codec for " + args[i] + " not found.");
} else {
if (encode) {
CompressionOutputStream out =
codec.createOutputStream(new java.io.FileOutputStream(args[i]));
byte[] buffer = new byte[100];
String inFilename = removeSuffix(args[i],
codec.getDefaultExtension());
java.io.InputStream in = new java.io.FileInputStream(inFilename);
int len = in.read(buffer);
while (len > 0) {
out.write(buffer, 0, len);
len = in.read(buffer);
CompressionOutputStream out = null;
java.io.InputStream in = null;
try {
out = codec.createOutputStream(
new java.io.FileOutputStream(args[i]));
byte[] buffer = new byte[100];
String inFilename = removeSuffix(args[i],
codec.getDefaultExtension());
in = new java.io.FileInputStream(inFilename);
int len = in.read(buffer);
while (len > 0) {
out.write(buffer, 0, len);
len = in.read(buffer);
}
} finally {
if(out != null) { out.close(); }
if(in != null) { in.close(); }
}
in.close();
out.close();
} else {
CompressionInputStream in =
codec.createInputStream(new java.io.FileInputStream(args[i]));
byte[] buffer = new byte[100];
int len = in.read(buffer);
while (len > 0) {
System.out.write(buffer, 0, len);
len = in.read(buffer);
CompressionInputStream in = null;
try {
in = codec.createInputStream(
new java.io.FileInputStream(args[i]));
byte[] buffer = new byte[100];
int len = in.read(buffer);
while (len > 0) {
System.out.write(buffer, 0, len);
len = in.read(buffer);
}
} finally {
if(in != null) { in.close(); }
}
in.close();
}
}
}

View File

@ -47,7 +47,7 @@ import org.apache.hadoop.io.serializer.SerializerBase;
public class ReflectionUtils {
private static final Class<?>[] EMPTY_ARRAY = new Class[]{};
private static SerializationFactory serialFactory = null;
volatile private static SerializationFactory serialFactory = null;
/**
* Cache of constructors for each class. Pins the classes so they