HADOOP-10437. Fix the javac warnings in the conf and the util package.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1582015 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2014-03-26 19:03:16 +00:00
parent fe8c3dc2b8
commit a126a01fa1
15 changed files with 35 additions and 41 deletions

View File

@ -457,6 +457,9 @@ Release 2.4.0 - UNRELEASED
HADOOP-10440. HarFsInputStream.read(byte[]) updates position incorrectly.
(guodongdong via szetszwo)
HADOOP-10437. Fix the javac warnings in the conf and the util package.
(szetszwo)
BREAKDOWN OF HADOOP-10184 SUBTASKS AND RELATED JIRAS
HADOOP-10185. FileSystem API for ACLs. (cnauroth)

View File

@ -2259,13 +2259,13 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
root = (Element)resource;
}
if (doc == null && root == null) {
if (quiet)
if (root == null) {
if (doc == null) {
if (quiet) {
return null;
}
throw new RuntimeException(resource + " not found");
}
if (root == null) {
root = doc.getDocumentElement();
}
Properties toAddTo = properties;

View File

@ -131,15 +131,14 @@ public class ReconfigurationServlet extends HttpServlet {
@SuppressWarnings("unchecked")
private Enumeration<String> getParams(HttpServletRequest req) {
return (Enumeration<String>) req.getParameterNames();
return req.getParameterNames();
}
/**
* Apply configuratio changes after admin has approved them.
*/
private void applyChanges(PrintWriter out, Reconfigurable reconf,
HttpServletRequest req)
throws IOException, ReconfigurationException {
HttpServletRequest req) throws ReconfigurationException {
Configuration oldConf = reconf.getConf();
Configuration newConf = new Configuration();

View File

@ -47,18 +47,18 @@ public class WritableName {
/** Set the name that a class should be known as to something other than the
* class name. */
public static synchronized void setName(Class writableClass, String name) {
public static synchronized void setName(Class<?> writableClass, String name) {
CLASS_TO_NAME.put(writableClass, name);
NAME_TO_CLASS.put(name, writableClass);
}
/** Add an alternate name for a class. */
public static synchronized void addName(Class writableClass, String name) {
public static synchronized void addName(Class<?> writableClass, String name) {
NAME_TO_CLASS.put(name, writableClass);
}
/** Return the name for a class. Default is {@link Class#getName()}. */
public static synchronized String getName(Class writableClass) {
public static synchronized String getName(Class<?> writableClass) {
String name = CLASS_TO_NAME.get(writableClass);
if (name != null)
return name;

View File

@ -17,7 +17,6 @@
*/
package org.apache.hadoop.util;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -26,6 +25,7 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@ -68,7 +68,7 @@ public class AsyncDiskService {
*
* @param volumes The roots of the file system volumes.
*/
public AsyncDiskService(String[] volumes) throws IOException {
public AsyncDiskService(String[] volumes) {
threadFactory = new ThreadFactory() {
@Override

View File

@ -36,13 +36,13 @@ public class ClassUtil {
* @return a jar file that contains the class, or null.
* @throws IOException
*/
public static String findContainingJar(Class clazz) {
public static String findContainingJar(Class<?> clazz) {
ClassLoader loader = clazz.getClassLoader();
String classFile = clazz.getName().replaceAll("\\.", "/") + ".class";
try {
for (Enumeration itr = loader.getResources(classFile);
for(final Enumeration<URL> itr = loader.getResources(classFile);
itr.hasMoreElements();) {
URL url = (URL) itr.nextElement();
final URL url = itr.nextElement();
if ("jar".equals(url.getProtocol())) {
String toReturn = url.getPath();
if (toReturn.startsWith("file:")) {

View File

@ -27,7 +27,6 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Shell;
/**
* Class that provides utility functions for checking disk problem
@ -35,9 +34,6 @@ import org.apache.hadoop.util.Shell;
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class DiskChecker {
private static final long SHELL_TIMEOUT = 10 * 1000;
public static class DiskErrorException extends IOException {
public DiskErrorException(String msg) {
super(msg);

View File

@ -25,7 +25,6 @@ import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.cli.CommandLine;
@ -397,7 +396,8 @@ public class GenericOptionsParser {
if (!localFs.exists(path)) {
throw new FileNotFoundException("File " + tmp + " does not exist.");
}
finalPath = path.makeQualified(localFs).toString();
finalPath = path.makeQualified(localFs.getUri(),
localFs.getWorkingDirectory()).toString();
}
else {
// check if the file exists in this file system
@ -408,7 +408,8 @@ public class GenericOptionsParser {
if (!fs.exists(path)) {
throw new FileNotFoundException("File " + tmp + " does not exist.");
}
finalPath = path.makeQualified(fs).toString();
finalPath = path.makeQualified(fs.getUri(),
fs.getWorkingDirectory()).toString();
}
finalArr[i] = finalPath;
}

View File

@ -170,8 +170,7 @@ public class HostsFileReader {
}
public synchronized void updateFileNames(String includesFile,
String excludesFile)
throws IOException {
String excludesFile) {
setIncludesFile(includesFile);
setExcludesFile(excludesFile);
}

View File

@ -245,7 +245,7 @@ public class LineReader implements Closeable {
}
} while (newlineLength == 0 && bytesConsumed < maxBytesToConsume);
if (bytesConsumed > (long)Integer.MAX_VALUE) {
if (bytesConsumed > Integer.MAX_VALUE) {
throw new IOException("Too many bytes before newline: " + bytesConsumed);
}
return (int)bytesConsumed;
@ -343,7 +343,7 @@ public class LineReader implements Closeable {
}
} while (delPosn < recordDelimiterBytes.length
&& bytesConsumed < maxBytesToConsume);
if (bytesConsumed > (long) Integer.MAX_VALUE) {
if (bytesConsumed > Integer.MAX_VALUE) {
throw new IOException("Too many bytes before delimiter: " + bytesConsumed);
}
return (int) bytesConsumed;

View File

@ -99,7 +99,8 @@ public class ProgramDriver {
* @throws NoSuchMethodException
* @throws SecurityException
*/
public void addClass (String name, Class mainClass, String description) throws Throwable {
public void addClass(String name, Class<?> mainClass, String description)
throws Throwable {
programs.put(name , new ProgramDescription(mainClass, description));
}

View File

@ -64,7 +64,7 @@ public class Progress {
public synchronized Progress addPhase() {
Progress phase = addNewPhase();
// set equal weightage for all phases
progressPerPhase = 1.0f / (float)phases.size();
progressPerPhase = 1.0f / phases.size();
fixedWeightageForAllPhases = true;
return phase;
}
@ -110,7 +110,7 @@ public class Progress {
addNewPhase();
}
// set equal weightage for all phases
progressPerPhase = 1.0f / (float)phases.size();
progressPerPhase = 1.0f / phases.size();
fixedWeightageForAllPhases = true;
}

View File

@ -78,7 +78,7 @@ public class RunJar {
try {
Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
JarEntry entry = (JarEntry)entries.nextElement();
final JarEntry entry = entries.nextElement();
if (!entry.isDirectory() &&
unpackRegex.matcher(entry.getName()).matches()) {
InputStream in = jar.getInputStream(entry);

View File

@ -431,7 +431,7 @@ public class StringUtils {
ArrayList<String> strList = new ArrayList<String>();
int startIndex = 0;
int nextIndex = 0;
while ((nextIndex = str.indexOf((int)separator, startIndex)) != -1) {
while ((nextIndex = str.indexOf(separator, startIndex)) != -1) {
strList.add(str.substring(startIndex, nextIndex));
startIndex = nextIndex + 1;
}

View File

@ -19,18 +19,13 @@
package org.apache.hadoop.util;
import java.io.IOException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.Enumeration;
import java.io.InputStream;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.apache.hadoop.io.IOUtils;
/**