HDFS-5419. Fixup test-patch.sh warnings on HDFS-4949 branch. (wang)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-4949@1535607 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Wang 2013-10-25 01:56:05 +00:00
parent e87b2a3684
commit dc2ee20aec
10 changed files with 80 additions and 42 deletions

View File

@ -65,9 +65,9 @@ public abstract class BatchedRemoteIterator<K, E> implements RemoteIterator<E> {
/**
* Perform the actual remote request.
*
* @param key The key to send.
* @return A list of replies.
*
* @param prevKey The key to send.
* @return A list of replies.
*/
public abstract BatchedEntries<E> makeRequest(K prevKey) throws IOException;

View File

@ -121,3 +121,5 @@ HDFS-4949 (Unreleased)
HDFS-5405. Fix possible RetryCache hang for caching RPC handlers in
FSNamesystem. (wang)
HDFS-5419. Fixup test-patch.sh warnings on HDFS-4949 branch. (wang)

View File

@ -346,4 +346,22 @@
<Method name="create" />
<Bug pattern="UL_UNRELEASED_LOCK" />
</Match>
<!-- Manually verified to be okay, we want to throw away the top bit here -->
<Match>
<Class name="org.apache.hadoop.hdfs.server.namenode.CachedBlock" />
<Method name="getReplication" />
<Bug pattern="ICAST_QUESTIONABLE_UNSIGNED_RIGHT_SHIFT" />
</Match>
<!-- These two are used for shutting down and kicking the CRMon, do not need strong sync -->
<Match>
<Class name="org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor" />
<Field name="shutdown" />
<Bug pattern="IS2_INCONSISTENT_SYNC" />
</Match>
<Match>
<Class name="org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor" />
<Field name="rescanImmediately" />
<Bug pattern="IS2_INCONSISTENT_SYNC" />
</Match>
</FindBugsFilter>

View File

@ -115,25 +115,26 @@ public class CachePoolInfo {
append(", ownerName:").append(ownerName).
append(", groupName:").append(groupName).
append(", mode:").append((mode == null) ? "null" :
String.format("0%03o", mode)).
String.format("0%03o", mode.toShort())).
append(", weight:").append(weight).
append("}").toString();
}
@Override
public boolean equals(Object o) {
try {
CachePoolInfo other = (CachePoolInfo)o;
return new EqualsBuilder().
append(poolName, other.poolName).
append(ownerName, other.ownerName).
append(groupName, other.groupName).
append(mode, other.mode).
append(weight, other.weight).
isEquals();
} catch (ClassCastException e) {
if (o == null) { return false; }
if (o == this) { return true; }
if (o.getClass() != getClass()) {
return false;
}
CachePoolInfo other = (CachePoolInfo)o;
return new EqualsBuilder().
append(poolName, other.poolName).
append(ownerName, other.ownerName).
append(groupName, other.groupName).
append(mode, other.mode).
append(weight, other.weight).
isEquals();
}
@Override

View File

@ -82,6 +82,8 @@ public final class PathBasedCacheEntry {
@Override
public boolean equals(Object o) {
if (o == null) { return false; }
if (o == this) { return true; }
if (o.getClass() != this.getClass()) {
return false;
}

View File

@ -83,6 +83,8 @@ public final class CachedBlock implements Element,
@Override
public boolean equals(Object o) {
if (o == null) { return false; }
if (o == this) { return true; }
if (o.getClass() != this.getClass()) {
return false;
}
@ -100,7 +102,7 @@ public final class CachedBlock implements Element,
}
public short getReplication() {
return (short)(replicationAndMark >>> 1);
return (short) (replicationAndMark >>> 1);
}
/**

View File

@ -720,7 +720,7 @@ public class CacheAdmin extends Configured implements Tool {
return 0;
}
String commandName = args.get(0);
commandName.replaceAll("^[-]*", "");
commandName = commandName.replaceAll("^[-]*", "");
Command command = determineCommand(commandName);
if (command == null) {
System.err.print("Sorry, I don't know the command '" +

View File

@ -124,23 +124,14 @@ public class TableListing {
public Builder() {
}
/**
* See {@link #addField(String, Justification, boolean)
*/
public Builder addField(String title) {
return addField(title, Justification.LEFT, false);
}
/**
* See {@link #addField(String, Justification, boolean)
*/
public Builder addField(String title, Justification justification) {
return addField(title, justification, false);
}
/**
* See {@link #addField(String, Justification, boolean)
*/
public Builder addField(String title, boolean wrap) {
return addField(title, Justification.LEFT, wrap);
}
@ -150,7 +141,7 @@ public class TableListing {
*
* @param title Field title.
* @param justification Right or left justification. Defaults to left.
* @Param wrapWidth Width at which to auto-wrap the content of the cell.
* @param wrap Width at which to auto-wrap the content of the cell.
* Defaults to Integer.MAX_VALUE.
* @return This Builder object
*/

View File

@ -298,16 +298,36 @@ public class JsonUtil {
return m;
}
private static int getInt(Map<?, ?> m, String key, final int defaultValue) {
Object value = m.get(key);
if (value == null) {
return defaultValue;
}
return (int) (long) (Long) value;
}
private static long getLong(Map<?, ?> m, String key, final long defaultValue) {
Object value = m.get(key);
if (value == null) {
return defaultValue;
}
return (long) (Long) value;
}
private static String getString(Map<?, ?> m, String key,
final String defaultValue) {
Object value = m.get(key);
if (value == null) {
return defaultValue;
}
return (String) value;
}
/** Convert a Json map to an DatanodeInfo object. */
static DatanodeInfo toDatanodeInfo(final Map<?, ?> m) {
if (m == null) {
return null;
}
Object infoSecurePort = m.get("infoSecurePort");
if (infoSecurePort == null) {
infoSecurePort = 0l; // same as the default value in hdfs.proto
}
return new DatanodeInfo(
(String)m.get("ipAddr"),
@ -315,19 +335,19 @@ public class JsonUtil {
(String)m.get("storageID"),
(int)(long)(Long)m.get("xferPort"),
(int)(long)(Long)m.get("infoPort"),
(int)(long)(Long)infoSecurePort,
getInt(m, "infoSecurePort", 0),
(int)(long)(Long)m.get("ipcPort"),
(Long)m.get("capacity"),
(Long)m.get("dfsUsed"),
(Long)m.get("remaining"),
(Long)m.get("blockPoolUsed"),
(Long)m.get("cacheCapacity"),
(Long)m.get("cacheUsed"),
(Long)m.get("lastUpdate"),
(int)(long)(Long)m.get("xceiverCount"),
(String)m.get("networkLocation"),
AdminStates.valueOf((String)m.get("adminState")));
getLong(m, "capacity", 0l),
getLong(m, "dfsUsed", 0l),
getLong(m, "remaining", 0l),
getLong(m, "blockPoolUsed", 0l),
getLong(m, "cacheCapacity", 0l),
getLong(m, "cacheUsed", 0l),
getLong(m, "lastUpdate", 0l),
getInt(m, "xceiverCount", 0),
getString(m, "networkLocation", ""),
AdminStates.valueOf(getString(m, "adminState", "NORMAL")));
}
/** Convert a DatanodeInfo[] to a Json array. */

View File

@ -79,6 +79,8 @@ public class TestJsonUtil {
response.put("xceiverCount", 4096l);
response.put("networkLocation", "foo.bar.baz");
response.put("adminState", "NORMAL");
response.put("cacheCapacity", 123l);
response.put("cacheUsed", 321l);
JsonUtil.toDatanodeInfo(response);
}