svn merge -c 1582015 from trunk for HADOOP-10437. Fix the javac warnings in the conf and the util package.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1582017 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c87fbeb7a8
commit
8ae6d5b5bd
|
@ -156,6 +156,9 @@ Release 2.4.0 - UNRELEASED
|
||||||
HADOOP-10440. HarFsInputStream.read(byte[]) updates position incorrectly.
|
HADOOP-10440. HarFsInputStream.read(byte[]) updates position incorrectly.
|
||||||
(guodongdong via szetszwo)
|
(guodongdong via szetszwo)
|
||||||
|
|
||||||
|
HADOOP-10437. Fix the javac warnings in the conf and the util package.
|
||||||
|
(szetszwo)
|
||||||
|
|
||||||
BREAKDOWN OF HADOOP-10184 SUBTASKS AND RELATED JIRAS
|
BREAKDOWN OF HADOOP-10184 SUBTASKS AND RELATED JIRAS
|
||||||
|
|
||||||
HADOOP-10185. FileSystem API for ACLs. (cnauroth)
|
HADOOP-10185. FileSystem API for ACLs. (cnauroth)
|
||||||
|
|
|
@ -2251,13 +2251,13 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
root = (Element)resource;
|
root = (Element)resource;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (doc == null && root == null) {
|
if (root == null) {
|
||||||
if (quiet)
|
if (doc == null) {
|
||||||
|
if (quiet) {
|
||||||
return null;
|
return null;
|
||||||
|
}
|
||||||
throw new RuntimeException(resource + " not found");
|
throw new RuntimeException(resource + " not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (root == null) {
|
|
||||||
root = doc.getDocumentElement();
|
root = doc.getDocumentElement();
|
||||||
}
|
}
|
||||||
Properties toAddTo = properties;
|
Properties toAddTo = properties;
|
||||||
|
|
|
@ -131,15 +131,14 @@ public class ReconfigurationServlet extends HttpServlet {
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private Enumeration<String> getParams(HttpServletRequest req) {
|
private Enumeration<String> getParams(HttpServletRequest req) {
|
||||||
return (Enumeration<String>) req.getParameterNames();
|
return req.getParameterNames();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Apply configuratio changes after admin has approved them.
|
* Apply configuratio changes after admin has approved them.
|
||||||
*/
|
*/
|
||||||
private void applyChanges(PrintWriter out, Reconfigurable reconf,
|
private void applyChanges(PrintWriter out, Reconfigurable reconf,
|
||||||
HttpServletRequest req)
|
HttpServletRequest req) throws ReconfigurationException {
|
||||||
throws IOException, ReconfigurationException {
|
|
||||||
Configuration oldConf = reconf.getConf();
|
Configuration oldConf = reconf.getConf();
|
||||||
Configuration newConf = new Configuration();
|
Configuration newConf = new Configuration();
|
||||||
|
|
||||||
|
|
|
@ -47,18 +47,18 @@ public class WritableName {
|
||||||
|
|
||||||
/** Set the name that a class should be known as to something other than the
|
/** Set the name that a class should be known as to something other than the
|
||||||
* class name. */
|
* class name. */
|
||||||
public static synchronized void setName(Class writableClass, String name) {
|
public static synchronized void setName(Class<?> writableClass, String name) {
|
||||||
CLASS_TO_NAME.put(writableClass, name);
|
CLASS_TO_NAME.put(writableClass, name);
|
||||||
NAME_TO_CLASS.put(name, writableClass);
|
NAME_TO_CLASS.put(name, writableClass);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Add an alternate name for a class. */
|
/** Add an alternate name for a class. */
|
||||||
public static synchronized void addName(Class writableClass, String name) {
|
public static synchronized void addName(Class<?> writableClass, String name) {
|
||||||
NAME_TO_CLASS.put(name, writableClass);
|
NAME_TO_CLASS.put(name, writableClass);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return the name for a class. Default is {@link Class#getName()}. */
|
/** Return the name for a class. Default is {@link Class#getName()}. */
|
||||||
public static synchronized String getName(Class writableClass) {
|
public static synchronized String getName(Class<?> writableClass) {
|
||||||
String name = CLASS_TO_NAME.get(writableClass);
|
String name = CLASS_TO_NAME.get(writableClass);
|
||||||
if (name != null)
|
if (name != null)
|
||||||
return name;
|
return name;
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.util;
|
package org.apache.hadoop.util;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -26,6 +25,7 @@ import java.util.concurrent.LinkedBlockingQueue;
|
||||||
import java.util.concurrent.ThreadFactory;
|
import java.util.concurrent.ThreadFactory;
|
||||||
import java.util.concurrent.ThreadPoolExecutor;
|
import java.util.concurrent.ThreadPoolExecutor;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -68,7 +68,7 @@ public class AsyncDiskService {
|
||||||
*
|
*
|
||||||
* @param volumes The roots of the file system volumes.
|
* @param volumes The roots of the file system volumes.
|
||||||
*/
|
*/
|
||||||
public AsyncDiskService(String[] volumes) throws IOException {
|
public AsyncDiskService(String[] volumes) {
|
||||||
|
|
||||||
threadFactory = new ThreadFactory() {
|
threadFactory = new ThreadFactory() {
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -36,13 +36,13 @@ public class ClassUtil {
|
||||||
* @return a jar file that contains the class, or null.
|
* @return a jar file that contains the class, or null.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public static String findContainingJar(Class clazz) {
|
public static String findContainingJar(Class<?> clazz) {
|
||||||
ClassLoader loader = clazz.getClassLoader();
|
ClassLoader loader = clazz.getClassLoader();
|
||||||
String classFile = clazz.getName().replaceAll("\\.", "/") + ".class";
|
String classFile = clazz.getName().replaceAll("\\.", "/") + ".class";
|
||||||
try {
|
try {
|
||||||
for (Enumeration itr = loader.getResources(classFile);
|
for(final Enumeration<URL> itr = loader.getResources(classFile);
|
||||||
itr.hasMoreElements();) {
|
itr.hasMoreElements();) {
|
||||||
URL url = (URL) itr.nextElement();
|
final URL url = itr.nextElement();
|
||||||
if ("jar".equals(url.getProtocol())) {
|
if ("jar".equals(url.getProtocol())) {
|
||||||
String toReturn = url.getPath();
|
String toReturn = url.getPath();
|
||||||
if (toReturn.startsWith("file:")) {
|
if (toReturn.startsWith("file:")) {
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.fs.LocalFileSystem;
|
import org.apache.hadoop.fs.LocalFileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.util.Shell;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class that provides utility functions for checking disk problem
|
* Class that provides utility functions for checking disk problem
|
||||||
|
@ -35,9 +34,6 @@ import org.apache.hadoop.util.Shell;
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
@InterfaceStability.Unstable
|
@InterfaceStability.Unstable
|
||||||
public class DiskChecker {
|
public class DiskChecker {
|
||||||
|
|
||||||
private static final long SHELL_TIMEOUT = 10 * 1000;
|
|
||||||
|
|
||||||
public static class DiskErrorException extends IOException {
|
public static class DiskErrorException extends IOException {
|
||||||
public DiskErrorException(String msg) {
|
public DiskErrorException(String msg) {
|
||||||
super(msg);
|
super(msg);
|
||||||
|
|
|
@ -25,7 +25,6 @@ import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLClassLoader;
|
import java.net.URLClassLoader;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
|
@ -397,7 +396,8 @@ public class GenericOptionsParser {
|
||||||
if (!localFs.exists(path)) {
|
if (!localFs.exists(path)) {
|
||||||
throw new FileNotFoundException("File " + tmp + " does not exist.");
|
throw new FileNotFoundException("File " + tmp + " does not exist.");
|
||||||
}
|
}
|
||||||
finalPath = path.makeQualified(localFs).toString();
|
finalPath = path.makeQualified(localFs.getUri(),
|
||||||
|
localFs.getWorkingDirectory()).toString();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// check if the file exists in this file system
|
// check if the file exists in this file system
|
||||||
|
@ -408,7 +408,8 @@ public class GenericOptionsParser {
|
||||||
if (!fs.exists(path)) {
|
if (!fs.exists(path)) {
|
||||||
throw new FileNotFoundException("File " + tmp + " does not exist.");
|
throw new FileNotFoundException("File " + tmp + " does not exist.");
|
||||||
}
|
}
|
||||||
finalPath = path.makeQualified(fs).toString();
|
finalPath = path.makeQualified(fs.getUri(),
|
||||||
|
fs.getWorkingDirectory()).toString();
|
||||||
}
|
}
|
||||||
finalArr[i] = finalPath;
|
finalArr[i] = finalPath;
|
||||||
}
|
}
|
||||||
|
|
|
@ -170,8 +170,7 @@ public class HostsFileReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized void updateFileNames(String includesFile,
|
public synchronized void updateFileNames(String includesFile,
|
||||||
String excludesFile)
|
String excludesFile) {
|
||||||
throws IOException {
|
|
||||||
setIncludesFile(includesFile);
|
setIncludesFile(includesFile);
|
||||||
setExcludesFile(excludesFile);
|
setExcludesFile(excludesFile);
|
||||||
}
|
}
|
||||||
|
|
|
@ -245,7 +245,7 @@ public class LineReader implements Closeable {
|
||||||
}
|
}
|
||||||
} while (newlineLength == 0 && bytesConsumed < maxBytesToConsume);
|
} while (newlineLength == 0 && bytesConsumed < maxBytesToConsume);
|
||||||
|
|
||||||
if (bytesConsumed > (long)Integer.MAX_VALUE) {
|
if (bytesConsumed > Integer.MAX_VALUE) {
|
||||||
throw new IOException("Too many bytes before newline: " + bytesConsumed);
|
throw new IOException("Too many bytes before newline: " + bytesConsumed);
|
||||||
}
|
}
|
||||||
return (int)bytesConsumed;
|
return (int)bytesConsumed;
|
||||||
|
@ -343,7 +343,7 @@ public class LineReader implements Closeable {
|
||||||
}
|
}
|
||||||
} while (delPosn < recordDelimiterBytes.length
|
} while (delPosn < recordDelimiterBytes.length
|
||||||
&& bytesConsumed < maxBytesToConsume);
|
&& bytesConsumed < maxBytesToConsume);
|
||||||
if (bytesConsumed > (long) Integer.MAX_VALUE) {
|
if (bytesConsumed > Integer.MAX_VALUE) {
|
||||||
throw new IOException("Too many bytes before delimiter: " + bytesConsumed);
|
throw new IOException("Too many bytes before delimiter: " + bytesConsumed);
|
||||||
}
|
}
|
||||||
return (int) bytesConsumed;
|
return (int) bytesConsumed;
|
||||||
|
|
|
@ -99,7 +99,8 @@ public class ProgramDriver {
|
||||||
* @throws NoSuchMethodException
|
* @throws NoSuchMethodException
|
||||||
* @throws SecurityException
|
* @throws SecurityException
|
||||||
*/
|
*/
|
||||||
public void addClass (String name, Class mainClass, String description) throws Throwable {
|
public void addClass(String name, Class<?> mainClass, String description)
|
||||||
|
throws Throwable {
|
||||||
programs.put(name , new ProgramDescription(mainClass, description));
|
programs.put(name , new ProgramDescription(mainClass, description));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ public class Progress {
|
||||||
public synchronized Progress addPhase() {
|
public synchronized Progress addPhase() {
|
||||||
Progress phase = addNewPhase();
|
Progress phase = addNewPhase();
|
||||||
// set equal weightage for all phases
|
// set equal weightage for all phases
|
||||||
progressPerPhase = 1.0f / (float)phases.size();
|
progressPerPhase = 1.0f / phases.size();
|
||||||
fixedWeightageForAllPhases = true;
|
fixedWeightageForAllPhases = true;
|
||||||
return phase;
|
return phase;
|
||||||
}
|
}
|
||||||
|
@ -110,7 +110,7 @@ public class Progress {
|
||||||
addNewPhase();
|
addNewPhase();
|
||||||
}
|
}
|
||||||
// set equal weightage for all phases
|
// set equal weightage for all phases
|
||||||
progressPerPhase = 1.0f / (float)phases.size();
|
progressPerPhase = 1.0f / phases.size();
|
||||||
fixedWeightageForAllPhases = true;
|
fixedWeightageForAllPhases = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -78,7 +78,7 @@ public class RunJar {
|
||||||
try {
|
try {
|
||||||
Enumeration<JarEntry> entries = jar.entries();
|
Enumeration<JarEntry> entries = jar.entries();
|
||||||
while (entries.hasMoreElements()) {
|
while (entries.hasMoreElements()) {
|
||||||
JarEntry entry = (JarEntry)entries.nextElement();
|
final JarEntry entry = entries.nextElement();
|
||||||
if (!entry.isDirectory() &&
|
if (!entry.isDirectory() &&
|
||||||
unpackRegex.matcher(entry.getName()).matches()) {
|
unpackRegex.matcher(entry.getName()).matches()) {
|
||||||
InputStream in = jar.getInputStream(entry);
|
InputStream in = jar.getInputStream(entry);
|
||||||
|
|
|
@ -431,7 +431,7 @@ public class StringUtils {
|
||||||
ArrayList<String> strList = new ArrayList<String>();
|
ArrayList<String> strList = new ArrayList<String>();
|
||||||
int startIndex = 0;
|
int startIndex = 0;
|
||||||
int nextIndex = 0;
|
int nextIndex = 0;
|
||||||
while ((nextIndex = str.indexOf((int)separator, startIndex)) != -1) {
|
while ((nextIndex = str.indexOf(separator, startIndex)) != -1) {
|
||||||
strList.add(str.substring(startIndex, nextIndex));
|
strList.add(str.substring(startIndex, nextIndex));
|
||||||
startIndex = nextIndex + 1;
|
startIndex = nextIndex + 1;
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,18 +19,13 @@
|
||||||
package org.apache.hadoop.util;
|
package org.apache.hadoop.util;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URL;
|
import java.io.InputStream;
|
||||||
import java.net.URLDecoder;
|
import java.util.Properties;
|
||||||
import java.util.Enumeration;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.util.Properties;
|
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue