HDFS-4033. Miscellaneous findbugs 2 fixes. Contributed by Eli Collins

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1430534 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2013-01-08 21:05:33 +00:00
parent 239b2742d0
commit 5cdb7e5ce7
11 changed files with 41 additions and 32 deletions

View File

@ -679,6 +679,8 @@ public class PBHelper {
case DatanodeProtocol.DNA_SHUTDOWN: case DatanodeProtocol.DNA_SHUTDOWN:
builder.setAction(BlockCommandProto.Action.SHUTDOWN); builder.setAction(BlockCommandProto.Action.SHUTDOWN);
break; break;
default:
throw new AssertionError("Invalid action");
} }
Block[] blocks = cmd.getBlocks(); Block[] blocks = cmd.getBlocks();
for (int i = 0; i < blocks.length; i++) { for (int i = 0; i < blocks.length; i++) {

View File

@ -1370,7 +1370,7 @@ public class Balancer {
" in this iteration"); " in this iteration");
} }
formatter.format("%-24s %10d %19s %18s %17s\n", formatter.format("%-24s %10d %19s %18s %17s%n",
DateFormat.getDateTimeInstance().format(new Date()), DateFormat.getDateTimeInstance().format(new Date()),
iteration, iteration,
StringUtils.byteDesc(bytesMoved.get()), StringUtils.byteDesc(bytesMoved.get()),

View File

@ -62,6 +62,7 @@ import org.apache.hadoop.hdfs.web.resources.DelegationParam;
import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.DoAsParam;
import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.hdfs.web.resources.UserParam;
import org.apache.hadoop.http.HtmlQuoting; import org.apache.hadoop.http.HtmlQuoting;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
@ -181,7 +182,7 @@ public class JspHelper {
s.setSoTimeout(HdfsServerConstants.READ_TIMEOUT); s.setSoTimeout(HdfsServerConstants.READ_TIMEOUT);
} catch (IOException e) { } catch (IOException e) {
deadNodes.add(chosenNode); deadNodes.add(chosenNode);
s.close(); IOUtils.closeSocket(s);
s = null; s = null;
failures++; failures++;
} }
@ -388,6 +389,8 @@ public class JspHelper {
int dint = d1.getVolumeFailures() - d2.getVolumeFailures(); int dint = d1.getVolumeFailures() - d2.getVolumeFailures();
ret = (dint < 0) ? -1 : ((dint > 0) ? 1 : 0); ret = (dint < 0) ? -1 : ((dint > 0) ? 1 : 0);
break; break;
default:
throw new IllegalArgumentException("Invalid sortField");
} }
return (sortOrder == SORT_ORDER_DSC) ? -ret : ret; return (sortOrder == SORT_ORDER_DSC) ? -ret : ret;
} }

View File

@ -703,7 +703,7 @@ class BlockPoolSliceScanner {
(info.lastScanType == ScanType.VERIFICATION_SCAN) ? "local" : "none"; (info.lastScanType == ScanType.VERIFICATION_SCAN) ? "local" : "none";
buffer.append(String.format("%-26s : status : %-6s type : %-6s" + buffer.append(String.format("%-26s : status : %-6s type : %-6s" +
" scan time : " + " scan time : " +
"%-15d %s\n", info.block, "%-15d %s%n", info.block,
(info.lastScanOk ? "ok" : "failed"), (info.lastScanOk ? "ok" : "failed"),
scanType, scanTime, scanType, scanTime,
(scanTime <= 0) ? "not yet verified" : (scanTime <= 0) ? "not yet verified" :
@ -716,21 +716,21 @@ class BlockPoolSliceScanner {
double pctProgress = (totalBytesToScan == 0) ? 100 : double pctProgress = (totalBytesToScan == 0) ? 100 :
(totalBytesToScan-bytesLeft)*100.0/totalBytesToScan; (totalBytesToScan-bytesLeft)*100.0/totalBytesToScan;
buffer.append(String.format("\nTotal Blocks : %6d" + buffer.append(String.format("%nTotal Blocks : %6d" +
"\nVerified in last hour : %6d" + "%nVerified in last hour : %6d" +
"\nVerified in last day : %6d" + "%nVerified in last day : %6d" +
"\nVerified in last week : %6d" + "%nVerified in last week : %6d" +
"\nVerified in last four weeks : %6d" + "%nVerified in last four weeks : %6d" +
"\nVerified in SCAN_PERIOD : %6d" + "%nVerified in SCAN_PERIOD : %6d" +
"\nNot yet verified : %6d" + "%nNot yet verified : %6d" +
"\nVerified since restart : %6d" + "%nVerified since restart : %6d" +
"\nScans since restart : %6d" + "%nScans since restart : %6d" +
"\nScan errors since restart : %6d" + "%nScan errors since restart : %6d" +
"\nTransient scan errors : %6d" + "%nTransient scan errors : %6d" +
"\nCurrent scan rate limit KBps : %6d" + "%nCurrent scan rate limit KBps : %6d" +
"\nProgress this period : %6.0f%%" + "%nProgress this period : %6.0f%%" +
"\nTime left in cur period : %6.2f%%" + "%nTime left in cur period : %6.2f%%" +
"\n", "%n",
total, inOneHour, inOneDay, inOneWeek, total, inOneHour, inOneDay, inOneWeek,
inFourWeeks, inScanPeriod, neverScanned, inFourWeeks, inScanPeriod, neverScanned,
totalScans, totalScans, totalScans, totalScans,

View File

@ -79,9 +79,6 @@ public class DatanodeJspHelper {
.getCanonicalHostName(); .getCanonicalHostName();
} }
private static final SimpleDateFormat lsDateFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm");
/** /**
* Get the default chunk size. * Get the default chunk size.
* @param conf the configuration * @param conf the configuration
@ -205,8 +202,8 @@ public class DatanodeJspHelper {
+ JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr);
cols[0] = "<a href=\"" + datanodeUrl + "\">" cols[0] = "<a href=\"" + datanodeUrl + "\">"
+ HtmlQuoting.quoteHtmlChars(localFileName) + "</a>"; + HtmlQuoting.quoteHtmlChars(localFileName) + "</a>";
cols[5] = lsDateFormat.format(new Date((files[i] cols[5] = new SimpleDateFormat("yyyy-MM-dd HH:mm").format(
.getModificationTime()))); new Date((files[i].getModificationTime())));
cols[6] = files[i].getPermission().toString(); cols[6] = files[i].getPermission().toString();
cols[7] = files[i].getOwner(); cols[7] = files[i].getOwner();
cols[8] = files[i].getGroup(); cols[8] = files[i].getGroup();

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.hdfs.server.datanode.fsdataset; package org.apache.hadoop.hdfs.server.datanode.fsdataset;
import java.io.Closeable; import java.io.Closeable;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.InputStream; import java.io.InputStream;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
@ -30,9 +32,9 @@ public class ReplicaInputStreams implements Closeable {
private final InputStream checksumIn; private final InputStream checksumIn;
/** Create an object with a data input stream and a checksum input stream. */ /** Create an object with a data input stream and a checksum input stream. */
public ReplicaInputStreams(InputStream dataIn, InputStream checksumIn) { public ReplicaInputStreams(FileDescriptor dataFd, FileDescriptor checksumFd) {
this.dataIn = dataIn; this.dataIn = new FileInputStream(dataFd);
this.checksumIn = checksumIn; this.checksumIn = new FileInputStream(checksumFd);
} }
/** @return the data input stream. */ /** @return the data input stream. */

View File

@ -390,8 +390,7 @@ class FsDatasetImpl implements FsDatasetSpi<FsVolumeImpl> {
if (ckoff > 0) { if (ckoff > 0) {
metaInFile.seek(ckoff); metaInFile.seek(ckoff);
} }
return new ReplicaInputStreams(new FileInputStream(blockInFile.getFD()), return new ReplicaInputStreams(blockInFile.getFD(), metaInFile.getFD());
new FileInputStream(metaInFile.getFD()));
} }
static File moveBlockFiles(Block b, File srcfile, File destdir static File moveBlockFiles(Block b, File srcfile, File destdir

View File

@ -4290,6 +4290,8 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
case SAFEMODE_ENTER: // enter safe mode case SAFEMODE_ENTER: // enter safe mode
enterSafeMode(false); enterSafeMode(false);
break; break;
default:
LOG.error("Unexpected safe mode action");
} }
} }
return isInSafeMode(); return isInSafeMode();

View File

@ -90,8 +90,8 @@ public class ReceivedDeletedBlockInfo {
ReceivedDeletedBlockInfo other = (ReceivedDeletedBlockInfo) o; ReceivedDeletedBlockInfo other = (ReceivedDeletedBlockInfo) o;
return this.block.equals(other.getBlock()) return this.block.equals(other.getBlock())
&& this.status == other.status && this.status == other.status
&& (this.delHints == other.delHints || && this.delHints != null
this.delHints != null && this.delHints.equals(other.delHints)); && this.delHints.equals(other.delHints);
} }
@Override @Override

View File

@ -89,6 +89,9 @@ public class RemoteEditLog implements Comparable<RemoteEditLog> {
new Function<RemoteEditLog, Long>() { new Function<RemoteEditLog, Long>() {
@Override @Override
public Long apply(RemoteEditLog log) { public Long apply(RemoteEditLog log) {
if (null == log) {
return HdfsConstants.INVALID_TXID;
}
return log.getStartTxId(); return log.getStartTxId();
} }
}; };

View File

@ -63,7 +63,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public class JMXGet { public class JMXGet {
private static final String format = "%s=%s\n"; private static final String format = "%s=%s%n";
private ArrayList<ObjectName> hadoopObjectNames; private ArrayList<ObjectName> hadoopObjectNames;
private MBeanServerConnection mbsc; private MBeanServerConnection mbsc;
private String service = "NameNode", port = "", server = "localhost"; private String service = "NameNode", port = "", server = "localhost";
@ -126,7 +126,8 @@ public class JMXGet {
continue; continue;
} }
} }
err("Info: key = " + key + "; val = "+ val.getClass() +":"+ val); err("Info: key = " + key + "; val = " +
(val == null ? "null" : val.getClass()) + ":" + val);
break; break;
} }