HADOOP-10482. Fix various findbugs warnings in hadoop-common. Contributed by Haohui Mai.

This commit is contained in:
Haohui Mai 2014-12-10 12:44:25 -08:00
parent a1e4a12dc0
commit 7e51350d9a
30 changed files with 118 additions and 145 deletions

View File

@ -192,6 +192,8 @@ Release 2.7.0 - UNRELEASED
HADOOP-11381. Fix findbugs warnings in hadoop-distcp, hadoop-aws,
hadoop-azure, and hadoop-openstack. (Li Lu via wheat9)
HADOOP-10482. Fix various findbugs warnings in hadoop-common. (wheat9)
Release 2.6.0 - 2014-11-18
INCOMPATIBLE CHANGES

View File

@ -259,6 +259,16 @@
<Method name="writeVLong" />
<Bug pattern="SF_SWITCH_FALLTHROUGH" />
</Match>
<Match>
<Class name="org.apache.hadoop.io.Text" />
<Method name="bytesToCodePoint" />
<Bug pattern="SF_SWITCH_NO_DEFAULT" />
</Match>
<Match>
<Class name="org.apache.hadoop.util.PureJavaCrc32C" />
<Method name="update" />
<Bug pattern="SF_SWITCH_NO_DEFAULT" />
</Match>
<!--
The switch condition fall through is intentional and for performance
purposes.

View File

@ -1481,11 +1481,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
* @param pattern new value
*/
public void setPattern(String name, Pattern pattern) {
if (null == pattern) {
set(name, null);
} else {
set(name, pattern.pattern());
}
assert pattern != null : "Pattern cannot be null";
set(name, pattern.pattern());
}
/**

View File

@ -144,13 +144,8 @@ public class JavaKeyStoreProvider extends KeyProvider {
// Provided Password file does not exist
throw new IOException("Password file does not exists");
}
if (pwdFile != null) {
InputStream is = pwdFile.openStream();
try {
password = IOUtils.toString(is).trim().toCharArray();
} finally {
is.close();
}
try (InputStream is = pwdFile.openStream()) {
password = IOUtils.toString(is).trim().toCharArray();
}
}
}

View File

@ -2619,9 +2619,6 @@ public abstract class FileSystem extends Configured implements Closeable {
private static FileSystem createFileSystem(URI uri, Configuration conf
) throws IOException {
Class<?> clazz = getFileSystemClass(uri.getScheme(), conf);
if (clazz == null) {
throw new IOException("No FileSystem for scheme: " + uri.getScheme());
}
FileSystem fs = (FileSystem)ReflectionUtils.newInstance(clazz, conf);
fs.initialize(uri, conf);
return fs;

View File

@ -220,12 +220,7 @@ public class HarFileSystem extends FileSystem {
return FileSystem.getDefaultUri(conf);
}
String authority = rawURI.getAuthority();
if (authority == null) {
throw new IOException("URI: " + rawURI
+ " is an invalid Har URI since authority==null."
+ " Expecting har://<scheme>-<host>/<path>.");
}
int i = authority.indexOf('-');
if (i < 0) {
throw new IOException("URI: " + rawURI
@ -489,19 +484,12 @@ public class HarFileSystem extends FileSystem {
}
static class Store {
public Store() {
begin = end = startHash = endHash = 0;
}
public Store(long begin, long end, int startHash, int endHash) {
public Store(long begin, long end) {
this.begin = begin;
this.end = end;
this.startHash = startHash;
this.endHash = endHash;
}
public long begin;
public long end;
public int startHash;
public int endHash;
}
/**
@ -594,7 +582,7 @@ public class HarFileSystem extends FileSystem {
public HarStatus(String harString) throws UnsupportedEncodingException {
String[] splits = harString.split(" ");
this.name = decodeFileName(splits[0]);
this.isDir = "dir".equals(splits[1]) ? true: false;
this.isDir = "dir".equals(splits[1]);
// this is equal to "none" if its a directory
this.partName = splits[2];
this.startIndex = Long.parseLong(splits[3]);
@ -1167,11 +1155,8 @@ public class HarFileSystem extends FileSystem {
int b = lin.readLine(line);
read += b;
readStr = line.toString().split(" ");
int startHash = Integer.parseInt(readStr[0]);
int endHash = Integer.parseInt(readStr[1]);
stores.add(new Store(Long.parseLong(readStr[2]),
Long.parseLong(readStr[3]), startHash,
endHash));
Long.parseLong(readStr[3])));
line.clear();
}
} catch (IOException ioe) {

View File

@ -369,7 +369,7 @@ public class LocalDirAllocator {
// Keep rolling the wheel till we get a valid path
Random r = new java.util.Random();
while (numDirsSearched < numDirs && returnPath == null) {
long randomPosition = Math.abs(r.nextLong()) % totalAvailable;
long randomPosition = (r.nextLong() >>> 1) % totalAvailable;
int dir = 0;
while (randomPosition > availableOnDisk[dir]) {
randomPosition -= availableOnDisk[dir];

View File

@ -143,13 +143,13 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
switch (finalCrcType) {
case CRC32:
return new MD5MD5CRC32GzipFileChecksum(
Integer.valueOf(bytesPerCRC),
Integer.valueOf(crcPerBlock),
Integer.parseInt(bytesPerCRC),
Integer.parseInt(crcPerBlock),
new MD5Hash(md5));
case CRC32C:
return new MD5MD5CRC32CastagnoliFileChecksum(
Integer.valueOf(bytesPerCRC),
Integer.valueOf(crcPerBlock),
Integer.parseInt(bytesPerCRC),
Integer.parseInt(crcPerBlock),
new MD5Hash(md5));
default:
// we should never get here since finalCrcType will

View File

@ -23,6 +23,7 @@ import java.io.InputStream;
import java.net.ConnectException;
import java.net.URI;
import com.google.common.base.Preconditions;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.net.ftp.FTP;
@ -101,17 +102,12 @@ public class FTPFileSystem extends FileSystem {
if (userAndPassword == null) {
userAndPassword = (conf.get("fs.ftp.user." + host, null) + ":" + conf
.get("fs.ftp.password." + host, null));
if (userAndPassword == null) {
throw new IOException("Invalid user/passsword specified");
}
}
String[] userPasswdInfo = userAndPassword.split(":");
Preconditions.checkState(userPasswdInfo.length > 1,
"Invalid username / password");
conf.set(FS_FTP_USER_PREFIX + host, userPasswdInfo[0]);
if (userPasswdInfo.length > 1) {
conf.set(FS_FTP_PASSWORD_PREFIX + host, userPasswdInfo[1]);
} else {
conf.set(FS_FTP_PASSWORD_PREFIX + host, null);
}
conf.set(FS_FTP_PASSWORD_PREFIX + host, userPasswdInfo[1]);
setConf(conf);
this.uri = uri;
}
@ -293,7 +289,8 @@ public class FTPFileSystem extends FileSystem {
*/
private boolean exists(FTPClient client, Path file) throws IOException {
try {
return getFileStatus(client, file) != null;
getFileStatus(client, file);
return true;
} catch (FileNotFoundException fnfe) {
return false;
}
@ -333,10 +330,8 @@ public class FTPFileSystem extends FileSystem {
if (dirEntries != null && dirEntries.length > 0 && !(recursive)) {
throw new IOException("Directory: " + file + " is not empty.");
}
if (dirEntries != null) {
for (int i = 0; i < dirEntries.length; i++) {
delete(client, new Path(absolute, dirEntries[i].getPath()), recursive);
}
for (FileStatus dirEntry : dirEntries) {
delete(client, new Path(absolute, dirEntry.getPath()), recursive);
}
return client.removeDirectory(pathName);
}

View File

@ -57,7 +57,7 @@ class Ls extends FsCommand {
protected static final SimpleDateFormat dateFormat =
protected final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm");
protected int maxRepl = 3, maxLen = 10, maxOwner = 0, maxGroup = 0;

View File

@ -55,8 +55,8 @@ class Stat extends FsCommand {
"in the specified format. Format accepts filesize in blocks (%b), group name of owner(%g), " +
"filename (%n), block size (%o), replication (%r), user name of owner(%u), modification date (%y, %Y)\n";
protected static final SimpleDateFormat timeFmt;
static {
protected final SimpleDateFormat timeFmt;
{
timeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
timeFmt.setTimeZone(TimeZone.getTimeZone("UTC"));
}

View File

@ -83,6 +83,8 @@ class Test extends FsCommand {
case 'z':
test = (item.stat.getLen() == 0);
break;
default:
break;
}
if (!test) exitCode = 1;
}

View File

@ -168,12 +168,6 @@ public abstract class HAAdmin extends Configured implements Tool {
private boolean isOtherTargetNodeActive(String targetNodeToActivate, boolean forceActive)
throws IOException {
Collection<String> targetIds = getTargetIds(targetNodeToActivate);
if(targetIds == null) {
errOut.println("transitionToActive: No target node in the "
+ "current configuration");
printUsage(errOut, "-transitionToActive");
return true;
}
targetIds.remove(targetNodeToActivate);
for(String targetId : targetIds) {
HAServiceTarget target = resolveTarget(targetId);

View File

@ -310,6 +310,8 @@ public class SshFenceByTcpPort extends Configured
case com.jcraft.jsch.Logger.FATAL:
LOG.fatal(message);
break;
default:
break;
}
}
}

View File

@ -99,11 +99,11 @@ public class LongWritable implements WritableComparable<LongWritable> {
@Override
public int compare(WritableComparable a, WritableComparable b) {
return -super.compare(a, b);
return super.compare(b, a);
}
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return -super.compare(b1, s1, l1, b2, s2, l2);
return super.compare(b2, s2, l2, b1, s1, l1);
}
}

View File

@ -584,6 +584,8 @@ public class Text extends BinaryComparable
state = TRAIL_BYTE;
}
break;
default:
break;
} // switch (state)
count++;
}

View File

@ -40,7 +40,7 @@ public class DecompressorStream extends CompressionInputStream {
throws IOException {
super(in);
if (in == null || decompressor == null) {
if (decompressor == null) {
throw new NullPointerException();
} else if (bufferSize <= 0) {
throw new IllegalArgumentException("Illegal bufferSize");

View File

@ -662,7 +662,7 @@ public abstract class Server {
assert !running;
readSelector.wakeup();
try {
join();
super.join();
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
@ -1116,7 +1116,8 @@ public abstract class Server {
private ByteBuffer data;
private ByteBuffer dataLengthBuffer;
private LinkedList<Call> responseQueue;
private volatile int rpcCount = 0; // number of outstanding rpcs
// number of outstanding rpcs
private AtomicInteger rpcCount = new AtomicInteger();
private long lastContact;
private int dataLength;
private Socket socket;
@ -1201,17 +1202,17 @@ public abstract class Server {
/* Return true if the connection has no outstanding rpc */
private boolean isIdle() {
return rpcCount == 0;
return rpcCount.get() == 0;
}
/* Decrement the outstanding RPC count */
private void decRpcCount() {
rpcCount--;
rpcCount.decrementAndGet();
}
/* Increment the outstanding RPC count */
private void incRpcCount() {
rpcCount++;
rpcCount.incrementAndGet();
}
private UserGroupInformation getAuthorizedUgi(String authorizedId)
@ -1982,9 +1983,9 @@ public abstract class Server {
LOG.debug("Ignoring socket shutdown exception", e);
}
if (channel.isOpen()) {
try {channel.close();} catch(Exception e) {}
IOUtils.cleanup(null, channel);
}
try {socket.close();} catch(Exception e) {}
IOUtils.cleanup(null, socket);
}
}

View File

@ -84,11 +84,6 @@ public class GangliaContext31 extends GangliaContext {
value + " from hostname" + hostName);
String units = getUnits(name);
if (units == null) {
LOG.warn("Metric name " + name + ", value " + value
+ " had 'null' units");
units = "";
}
int slope = getSlope(name);
int tmax = getTmax(name);
int dmax = getDmax(name);

View File

@ -54,7 +54,7 @@ public class CompositeContext extends AbstractMetricsContext {
int nKids;
try {
String sKids = getAttribute(ARITY_LABEL);
nKids = Integer.valueOf(sKids);
nKids = Integer.parseInt(sKids);
} catch (Exception e) {
LOG.error("Unable to initialize composite metric " + contextName +
": could not init arity", e);

View File

@ -23,23 +23,24 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A mutable int counter for implementing metrics sources
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MutableCounterInt extends MutableCounter {
private volatile int value;
private AtomicInteger value = new AtomicInteger();
MutableCounterInt(MetricsInfo info, int initValue) {
super(info);
this.value = initValue;
this.value.set(initValue);
}
@Override
public synchronized void incr() {
++value;
setChanged();
public void incr() {
incr(1);
}
/**
@ -47,18 +48,18 @@ public class MutableCounterInt extends MutableCounter {
* @param delta of the increment
*/
public synchronized void incr(int delta) {
value += delta;
value.addAndGet(delta);
setChanged();
}
public int value() {
return value;
return value.get();
}
@Override
public void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
builder.addCounter(info(), value);
builder.addCounter(info(), value());
clearChanged();
}
}

View File

@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import java.util.concurrent.atomic.AtomicLong;
/**
* A mutable long counter
*/
@ -30,36 +32,35 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
@InterfaceStability.Evolving
public class MutableCounterLong extends MutableCounter {
private volatile long value;
private AtomicLong value = new AtomicLong();
MutableCounterLong(MetricsInfo info, long initValue) {
super(info);
this.value = initValue;
this.value.set(initValue);
}
@Override
public synchronized void incr() {
++value;
setChanged();
public void incr() {
incr(1);
}
/**
* Increment the value by a delta
* @param delta of the increment
*/
public synchronized void incr(long delta) {
value += delta;
public void incr(long delta) {
value.addAndGet(delta);
setChanged();
}
public long value() {
return value;
return value.get();
}
@Override
public void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
builder.addCounter(info(), value);
builder.addCounter(info(), value());
clearChanged();
}
}

View File

@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A mutable int gauge
*/
@ -30,44 +32,42 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
@InterfaceStability.Evolving
public class MutableGaugeInt extends MutableGauge {
private volatile int value;
private AtomicInteger value = new AtomicInteger();
MutableGaugeInt(MetricsInfo info, int initValue) {
super(info);
this.value = initValue;
this.value.set(initValue);
}
public int value() {
return value;
return value.get();
}
@Override
public synchronized void incr() {
++value;
setChanged();
public void incr() {
incr(1);
}
/**
* Increment by delta
* @param delta of the increment
*/
public synchronized void incr(int delta) {
value += delta;
public void incr(int delta) {
value.addAndGet(delta);
setChanged();
}
@Override
public synchronized void decr() {
--value;
setChanged();
public void decr() {
decr(1);
}
/**
* decrement by delta
* @param delta of the decrement
*/
public synchronized void decr(int delta) {
value -= delta;
public void decr(int delta) {
value.addAndGet(-delta);
setChanged();
}
@ -76,14 +76,14 @@ public class MutableGaugeInt extends MutableGauge {
* @param value to set
*/
public void set(int value) {
this.value = value;
this.value.set(value);
setChanged();
}
@Override
public void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
builder.addGauge(info(), value);
builder.addGauge(info(), value());
clearChanged();
}
}

View File

@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import java.util.concurrent.atomic.AtomicLong;
/**
* A mutable long gauge
*/
@ -30,44 +32,42 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
@InterfaceStability.Evolving
public class MutableGaugeLong extends MutableGauge {
private volatile long value;
private AtomicLong value = new AtomicLong();
MutableGaugeLong(MetricsInfo info, long initValue) {
super(info);
this.value = initValue;
this.value.set(initValue);
}
public long value() {
return value;
return value.get();
}
@Override
public synchronized void incr() {
++value;
setChanged();
public void incr() {
incr(1);
}
/**
* Increment by delta
* @param delta of the increment
*/
public synchronized void incr(long delta) {
value += delta;
public void incr(long delta) {
value.addAndGet(delta);
setChanged();
}
@Override
public synchronized void decr() {
--value;
setChanged();
public void decr() {
decr(1);
}
/**
* decrement by delta
* @param delta of the decrement
*/
public synchronized void decr(long delta) {
value -= delta;
public void decr(long delta) {
value.addAndGet(-delta);
setChanged();
}
@ -76,13 +76,13 @@ public class MutableGaugeLong extends MutableGauge {
* @param value to set
*/
public void set(long value) {
this.value = value;
this.value.set(value);
setChanged();
}
public void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
builder.addGauge(info(), value);
builder.addGauge(info(), value());
clearChanged();
}
}

View File

@ -287,7 +287,7 @@ public class NetUtils {
if (fqHost == null) {
try {
fqHost = SecurityUtil.getByName(host).getHostName();
// slight race condition, but won't hurt
// slight race condition, but won't hurt
canonicalizedHostCache.put(host, fqHost);
} catch (UnknownHostException e) {
fqHost = host;

View File

@ -171,8 +171,7 @@ public class ScriptBasedMappingWithDependency extends ScriptBasedMapping
@Override
public String toString() {
return super.toString() + ", " + dependencyScriptName != null ?
("dependency script " + dependencyScriptName) : NO_SCRIPT;
return "dependency script " + dependencyScriptName;
}
}
}

View File

@ -367,10 +367,8 @@ public class LdapGroupsMapping
return "";
}
Reader reader = null;
try {
try (Reader reader = new FileReader(pwFile)) {
StringBuilder password = new StringBuilder();
reader = new FileReader(pwFile);
int c = reader.read();
while (c > -1) {
password.append((char)c);
@ -379,8 +377,6 @@ public class LdapGroupsMapping
return password.toString().trim();
} catch (IOException ioe) {
throw new RuntimeException("Could not read password file: " + pwFile, ioe);
} finally {
IOUtils.cleanup(LOG, reader);
}
}
}

View File

@ -195,6 +195,8 @@ public class ComparableVersion
case 'm':
value = "milestone";
break;
default:
break;
}
}
this.value = ALIASES.getProperty( value , value );

View File

@ -34,16 +34,13 @@ public class PrintJarMainClass {
* @param args
*/
public static void main(String[] args) {
try {
JarFile jar_file = new JarFile(args[0]);
if (jar_file != null) {
Manifest manifest = jar_file.getManifest();
if (manifest != null) {
String value = manifest.getMainAttributes().getValue("Main-Class");
if (value != null) {
System.out.println(value.replaceAll("/", "."));
return;
}
try (JarFile jar_file = new JarFile(args[0])) {
Manifest manifest = jar_file.getManifest();
if (manifest != null) {
String value = manifest.getMainAttributes().getValue("Main-Class");
if (value != null) {
System.out.println(value.replaceAll("/", "."));
return;
}
}
} catch (Throwable e) {

View File

@ -72,14 +72,14 @@ public class ServletUtil {
throw new IOException("Invalid request has no " + param + " parameter");
}
return Long.valueOf(paramStr);
return Long.parseLong(paramStr);
}
public static final String HTML_TAIL = "<hr />\n"
+ "<a href='http://hadoop.apache.org/core'>Hadoop</a>, "
+ "<a href='http://hadoop.apache.org/core'>Hadoop</a>, "
+ Calendar.getInstance().get(Calendar.YEAR) + ".\n"
+ "</body></html>";
/**
* HTML footer to be added in the jsps.
* @return the HTML footer.