From 2cc8d2d65bfa76ef5da29cf10b6e38c1bf139719 Mon Sep 17 00:00:00 2001 From: Tsz-wo Sze Date: Tue, 25 Mar 2014 18:09:48 +0000 Subject: [PATCH] svn merge -c 1581437 from trunk for HADOOP-10426. Declare CreateOpts.getOpt(..) with generic type argument, removes unused FileContext.getFileStatus(..) and fixes various javac warnings. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1581440 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 4 + .../apache/hadoop/fs/AbstractFileSystem.java | 9 +- .../apache/hadoop/fs/FSDataInputStream.java | 5 +- .../org/apache/hadoop/fs/FileContext.java | 37 +------- .../java/org/apache/hadoop/fs/FilterFs.java | 3 +- .../java/org/apache/hadoop/fs/Options.java | 92 ++++++++----------- .../hadoop/fs/shell/CommandFactory.java | 3 +- .../apache/hadoop/fs/shell/CopyCommands.java | 10 +- .../org/apache/hadoop/fs/shell/Display.java | 7 +- .../org/apache/hadoop/fs/shell/PathData.java | 2 +- .../hadoop/ha/ActiveStandbyElector.java | 7 +- .../org/apache/hadoop/http/HttpConfig.java | 2 - .../org/apache/hadoop/http/HttpServer2.java | 5 +- .../apache/hadoop/io/AbstractMapWritable.java | 14 +-- .../org/apache/hadoop/io/MapWritable.java | 4 +- .../hadoop/fs/FileContextTestHelper.java | 24 ++--- .../hadoop/fs/FileContextTestWrapper.java | 6 +- .../hadoop/fs/FileSystemTestWrapper.java | 17 ++-- 18 files changed, 92 insertions(+), 159 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index cad8df4e1ed..4f6368bdd93 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -18,6 +18,10 @@ Release 2.5.0 - UNRELEASED HADOOP-10418. SaslRpcClient should not assume that remote principals are in the default_realm. (atm) + HADOOP-10426. Declare CreateOpts.getOpt(..) with generic type argument, + removes unused FileContext.getFileStatus(..) and fixes various javac + warnings. (szetszwo) + Release 2.4.0 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java index 6942758dd9a..d0303be71af 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java @@ -750,13 +750,12 @@ public void createSymlink(final Path target, final Path link, * Partially resolves the path. This is used during symlink resolution in * {@link FSLinkResolver}, and differs from the similarly named method * {@link FileContext#getLinkTarget(Path)}. + * @throws IOException subclass implementations may throw IOException */ public Path getLinkTarget(final Path f) throws IOException { - /* We should never get here. Any file system that threw an - * UnresolvedLinkException, causing this function to be called, - * needs to override this method. - */ - throw new AssertionError(); + throw new AssertionError("Implementation Error: " + getClass() + + " that threw an UnresolvedLinkException, causing this method to be" + + " called, needs to override this method."); } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java index a77ca437729..c8609d450f2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java @@ -33,7 +33,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class FSDataInputStream extends DataInputStream - implements Seekable, PositionedReadable, Closeable, + implements Seekable, PositionedReadable, ByteBufferReadable, HasFileDescriptor, CanSetDropBehind, CanSetReadahead, HasEnhancedByteBufferAccess { /** @@ -44,8 +44,7 @@ public class FSDataInputStream extends DataInputStream extendedReadBuffers = new IdentityHashStore(0); - public FSDataInputStream(InputStream in) - throws IOException { + public FSDataInputStream(InputStream in) { super(in); if( !(in instanceof Seekable) || !(in instanceof PositionedReadable) ) { throw new IllegalArgumentException( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index 949f2da1635..c889d7dfb39 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -662,8 +662,7 @@ public FSDataOutputStream create(final Path f, // If not, add a default Perms and apply umask; // AbstractFileSystem#create - CreateOpts.Perms permOpt = - (CreateOpts.Perms) CreateOpts.getOpt(CreateOpts.Perms.class, opts); + CreateOpts.Perms permOpt = CreateOpts.getOpt(CreateOpts.Perms.class, opts); FsPermission permission = (permOpt != null) ? permOpt.getValue() : FILE_DEFAULT_PERM; permission = permission.applyUMask(umask); @@ -1534,40 +1533,6 @@ public boolean exists(final Path f) throws AccessControlException, } } - /** - * Return a list of file status objects that corresponds to supplied paths - * excluding those non-existent paths. - * - * @param paths list of paths we want information from - * - * @return a list of FileStatus objects - * - * @throws AccessControlException If access is denied - * @throws IOException If an I/O error occurred - * - * Exceptions applicable to file systems accessed over RPC: - * @throws RpcClientException If an exception occurred in the RPC client - * @throws RpcServerException If an exception occurred in the RPC server - * @throws UnexpectedServerException If server implementation throws - * undeclared exception to RPC server - */ - private FileStatus[] getFileStatus(Path[] paths) - throws AccessControlException, IOException { - if (paths == null) { - return null; - } - ArrayList results = new ArrayList(paths.length); - for (int i = 0; i < paths.length; i++) { - try { - results.add(FileContext.this.getFileStatus(paths[i])); - } catch (FileNotFoundException fnfe) { - // ignoring - } - } - return results.toArray(new FileStatus[results.size()]); - } - - /** * Return the {@link ContentSummary} of path f. * @param f path diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java index c91088eab0d..2239040ca32 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java @@ -53,8 +53,7 @@ protected AbstractFileSystem getMyFs() { return myFs; } - protected FilterFs(AbstractFileSystem fs) throws IOException, - URISyntaxException { + protected FilterFs(AbstractFileSystem fs) throws URISyntaxException { super(fs.getUri(), fs.getUri().getScheme(), fs.getUri().getAuthority() != null, fs.getUriDefaultPort()); myFs = fs; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java index 8464e512704..e070943bb2f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java @@ -150,21 +150,25 @@ protected CreateParent(boolean createPar) { /** * Get an option of desired type - * @param theClass is the desired class of the opt + * @param clazz is the desired class of the opt * @param opts - not null - at least one opt must be passed * @return an opt from one of the opts of type theClass. * returns null if there isn't any */ - protected static CreateOpts getOpt(Class theClass, CreateOpts ...opts) { + static T getOpt(Class clazz, CreateOpts... opts) { if (opts == null) { throw new IllegalArgumentException("Null opt"); } - CreateOpts result = null; + T result = null; for (int i = 0; i < opts.length; ++i) { - if (opts[i].getClass() == theClass) { - if (result != null) - throw new IllegalArgumentException("multiple blocksize varargs"); - result = opts[i]; + if (opts[i].getClass() == clazz) { + if (result != null) { + throw new IllegalArgumentException("multiple opts varargs: " + clazz); + } + + @SuppressWarnings("unchecked") + T t = (T)opts[i]; + result = t; } } return result; @@ -175,14 +179,16 @@ protected static CreateOpts getOpt(Class theClass, Create * @param opts - the option is set into this array of opts * @return updated CreateOpts[] == opts + newValue */ - protected static CreateOpts[] setOpt(T newValue, - CreateOpts ...opts) { + static CreateOpts[] setOpt(final T newValue, + final CreateOpts... opts) { + final Class clazz = newValue.getClass(); boolean alreadyInOpts = false; if (opts != null) { for (int i = 0; i < opts.length; ++i) { - if (opts[i].getClass() == newValue.getClass()) { - if (alreadyInOpts) - throw new IllegalArgumentException("multiple opts varargs"); + if (opts[i].getClass() == clazz) { + if (alreadyInOpts) { + throw new IllegalArgumentException("multiple opts varargs: " + clazz); + } alreadyInOpts = true; opts[i] = newValue; } @@ -190,9 +196,12 @@ protected static CreateOpts[] setOpt(T newValue, } CreateOpts[] resultOpt = opts; if (!alreadyInOpts) { // no newValue in opt - CreateOpts[] newOpts = new CreateOpts[opts.length + 1]; - System.arraycopy(opts, 0, newOpts, 0, opts.length); - newOpts[opts.length] = newValue; + final int oldLength = opts == null? 0: opts.length; + CreateOpts[] newOpts = new CreateOpts[oldLength + 1]; + if (oldLength > 0) { + System.arraycopy(opts, 0, newOpts, 0, oldLength); + } + newOpts[oldLength] = newValue; resultOpt = newOpts; } return resultOpt; @@ -273,50 +282,29 @@ public static ChecksumOpt createDisabled() { */ public static ChecksumOpt processChecksumOpt(ChecksumOpt defaultOpt, ChecksumOpt userOpt, int userBytesPerChecksum) { - // The following is done to avoid unnecessary creation of new objects. - // tri-state variable: 0 default, 1 userBytesPerChecksum, 2 userOpt - short whichSize; - // true default, false userOpt - boolean useDefaultType; - + final boolean useDefaultType; + final DataChecksum.Type type; + if (userOpt != null + && userOpt.getChecksumType() != DataChecksum.Type.DEFAULT) { + useDefaultType = false; + type = userOpt.getChecksumType(); + } else { + useDefaultType = true; + type = defaultOpt.getChecksumType(); + } + // bytesPerChecksum - order of preference // user specified value in bytesPerChecksum // user specified value in checksumOpt // default. if (userBytesPerChecksum > 0) { - whichSize = 1; // userBytesPerChecksum - } else if (userOpt != null && userOpt.getBytesPerChecksum() > 0) { - whichSize = 2; // userOpt - } else { - whichSize = 0; // default - } - - // checksum type - order of preference - // user specified value in checksumOpt - // default. - if (userOpt != null && - userOpt.getChecksumType() != DataChecksum.Type.DEFAULT) { - useDefaultType = false; - } else { - useDefaultType = true; - } - - // Short out the common and easy cases - if (whichSize == 0 && useDefaultType) { - return defaultOpt; - } else if (whichSize == 2 && !useDefaultType) { - return userOpt; - } - - // Take care of the rest of combinations - DataChecksum.Type type = useDefaultType ? defaultOpt.getChecksumType() : - userOpt.getChecksumType(); - if (whichSize == 0) { - return new ChecksumOpt(type, defaultOpt.getBytesPerChecksum()); - } else if (whichSize == 1) { return new ChecksumOpt(type, userBytesPerChecksum); + } else if (userOpt != null && userOpt.getBytesPerChecksum() > 0) { + return !useDefaultType? userOpt + : new ChecksumOpt(type, userOpt.getBytesPerChecksum()); } else { - return new ChecksumOpt(type, userOpt.getBytesPerChecksum()); + return useDefaultType? defaultOpt + : new ChecksumOpt(type, defaultOpt.getBytesPerChecksum()); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java index f5d9d5a801d..dec83738118 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java @@ -24,7 +24,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.util.ReflectionUtils; @@ -35,7 +34,7 @@ @InterfaceAudience.Private @InterfaceStability.Unstable -public class CommandFactory extends Configured implements Configurable { +public class CommandFactory extends Configured { private Map> classMap = new HashMap>(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java index 21f9d2ca9e1..0e2283c9937 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java @@ -18,16 +18,20 @@ package org.apache.hadoop.fs.shell; -import java.io.*; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; -import java.util.Arrays; import java.util.LinkedList; import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.fs.*; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.PathIsDirectoryException; import org.apache.hadoop.io.IOUtils; /** Various commands for copy files */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java index 1d9d7d42c17..88cf566ca9c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java @@ -18,12 +18,12 @@ package org.apache.hadoop.fs.shell; import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.InputStream; import java.io.IOException; +import java.io.InputStream; import java.util.LinkedList; import java.util.zip.GZIPInputStream; +import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.FileReader; import org.apache.avro.generic.GenericDatumReader; @@ -31,7 +31,6 @@ import org.apache.avro.io.DatumWriter; import org.apache.avro.io.EncoderFactory; import org.apache.avro.io.JsonEncoder; -import org.apache.avro.Schema; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -252,7 +251,7 @@ protected static class AvroFileInputStream extends InputStream { private int pos; private byte[] buffer; private ByteArrayOutputStream output; - private FileReader fileReader; + private FileReader fileReader; private DatumWriter writer; private JsonEncoder encoder; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java index 84bb2347671..f34870eb628 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java @@ -570,7 +570,7 @@ private static URI stringToUri(String pathString) throws IOException { @Override public int compareTo(PathData o) { - return path.compareTo(((PathData)o).path); + return path.compareTo(o.path); } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index 995d8224345..9cc2ef77b96 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -1091,12 +1091,7 @@ private static boolean isSessionExpired(Code code) { } private static boolean shouldRetry(Code code) { - switch (code) { - case CONNECTIONLOSS: - case OPERATIONTIMEOUT: - return true; - } - return false; + return code == Code.CONNECTIONLOSS || code == Code.OPERATIONTIMEOUT; } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java index 15008addc8f..0d1e6e97346 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java @@ -19,8 +19,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; /** * Singleton to get access to Http related configuration. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index 67b89e1e5a7..d2664dcf2b0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -434,7 +434,7 @@ private static void addNoCacheFilter(WebAppContext ctxt) { * provided. This wrapper and all subclasses must create at least one * listener. */ - public Connector createBaseListener(Configuration conf) throws IOException { + public Connector createBaseListener(Configuration conf) { return HttpServer2.createDefaultChannelConnector(); } @@ -527,8 +527,7 @@ protected void addDefaultServlets() { addServlet("conf", "/conf", ConfServlet.class); } - public void addContext(Context ctxt, boolean isFiltered) - throws IOException { + public void addContext(Context ctxt, boolean isFiltered) { webServer.addHandler(ctxt); addNoCacheFilter(webAppContext); defaultContexts.put(ctxt, isFiltered); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java index 54903394feb..cc1e517eaa5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java @@ -48,11 +48,11 @@ public abstract class AbstractMapWritable implements Writable, Configurable { /* Class to id mappings */ @VisibleForTesting - Map classToIdMap = new ConcurrentHashMap(); + Map, Byte> classToIdMap = new ConcurrentHashMap, Byte>(); /* Id to Class mappings */ @VisibleForTesting - Map idToClassMap = new ConcurrentHashMap(); + Map> idToClassMap = new ConcurrentHashMap>(); /* The number of new classes (those not established by the constructor) */ private volatile byte newClasses = 0; @@ -65,7 +65,7 @@ byte getNewClasses() { /** * Used to add "predefined" classes and by Writable to copy "new" classes. */ - private synchronized void addToMap(Class clazz, byte id) { + private synchronized void addToMap(Class clazz, byte id) { if (classToIdMap.containsKey(clazz)) { byte b = classToIdMap.get(clazz); if (b != id) { @@ -74,7 +74,7 @@ private synchronized void addToMap(Class clazz, byte id) { } } if (idToClassMap.containsKey(id)) { - Class c = idToClassMap.get(id); + Class c = idToClassMap.get(id); if (!c.equals(clazz)) { throw new IllegalArgumentException("Id " + id + " exists but maps to " + c.getName() + " and not " + clazz.getName()); @@ -85,7 +85,7 @@ private synchronized void addToMap(Class clazz, byte id) { } /** Add a Class to the maps if it is not already present. */ - protected synchronized void addToMap(Class clazz) { + protected synchronized void addToMap(Class clazz) { if (classToIdMap.containsKey(clazz)) { return; } @@ -98,12 +98,12 @@ protected synchronized void addToMap(Class clazz) { } /** @return the Class class for the specified id */ - protected Class getClass(byte id) { + protected Class getClass(byte id) { return idToClassMap.get(id); } /** @return the id for the specified Class */ - protected byte getId(Class clazz) { + protected byte getId(Class clazz) { return classToIdMap.containsKey(clazz) ? classToIdMap.get(clazz) : -1; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java index 72c7098d7af..fec168b831e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java @@ -82,7 +82,7 @@ public boolean equals(Object obj) { } if (obj instanceof MapWritable) { - Map map = (Map) obj; + MapWritable map = (MapWritable) obj; if (size() != map.size()) { return false; } @@ -114,7 +114,6 @@ public Set keySet() { } @Override - @SuppressWarnings("unchecked") public Writable put(Writable key, Writable value) { addToMap(key.getClass()); addToMap(value.getClass()); @@ -163,7 +162,6 @@ public void write(DataOutput out) throws IOException { } } - @SuppressWarnings("unchecked") @Override public void readFields(DataInput in) throws IOException { super.readFields(in); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java index 0074b01f909..698d38d9c36 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java @@ -77,8 +77,7 @@ public Path getTestRootPath(FileContext fc, String pathString) { // the getAbsolutexxx method is needed because the root test dir // can be messed up by changing the working dir. - public String getAbsoluteTestRootDir(FileContext fc) - throws IOException { + public String getAbsoluteTestRootDir(FileContext fc) { if (absTestRootDir == null) { if (new Path(testRootDir).isAbsolute()) { absTestRootDir = testRootDir; @@ -90,12 +89,11 @@ public String getAbsoluteTestRootDir(FileContext fc) return absTestRootDir; } - public Path getAbsoluteTestRootPath(FileContext fc) throws IOException { + public Path getAbsoluteTestRootPath(FileContext fc) { return fc.makeQualified(new Path(getAbsoluteTestRootDir(fc))); } - public Path getDefaultWorkingDirectory(FileContext fc) - throws IOException { + public Path getDefaultWorkingDirectory(FileContext fc) { return getTestRootPath(fc, "/user/" + System.getProperty("user.name")) .makeQualified(fc.getDefaultFileSystem().getUri(), fc.getWorkingDirectory()); @@ -106,8 +104,7 @@ public Path getDefaultWorkingDirectory(FileContext fc) */ public static long createFile(FileContext fc, Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out = @@ -146,8 +143,7 @@ public static long createFileNonRecursive(FileContext fc, Path path) public static void appendToFile(FileContext fc, Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out; @@ -203,14 +199,11 @@ public static byte[] readFile(FileContext fc, Path path, int len) } public FileStatus containsPath(FileContext fc, Path path, - FileStatus[] dirList) - throws IOException { + FileStatus[] dirList) { return containsPath(getTestRootPath(fc, path.toString()), dirList); } - public static FileStatus containsPath(Path path, - FileStatus[] dirList) - throws IOException { + public static FileStatus containsPath(Path path, FileStatus[] dirList) { for(int i = 0; i < dirList.length; i ++) { if (path.equals(dirList[i].getPath())) return dirList[i]; @@ -219,8 +212,7 @@ public static FileStatus containsPath(Path path, } public FileStatus containsPath(FileContext fc, String path, - FileStatus[] dirList) - throws IOException { + FileStatus[] dirList) { return containsPath(fc, new Path(path), dirList); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java index e10b22edb7c..0dd1e9aa3e0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java @@ -62,8 +62,7 @@ public Path getDefaultWorkingDirectory() throws IOException { */ public long createFile(Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out = @@ -100,8 +99,7 @@ public long createFileNonRecursive(Path path) throws IOException { public void appendToFile(Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java index eb5df084b97..9a5f40edf67 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java @@ -63,8 +63,7 @@ public Path getDefaultWorkingDirectory() throws IOException { */ public long createFile(Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out = @@ -101,8 +100,7 @@ public long createFileNonRecursive(Path path) throws IOException { public void appendToFile(Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out; @@ -261,7 +259,7 @@ public FSDataOutputStream create(Path f, EnumSet createFlag, // Need to translate the FileContext-style options into FileSystem-style // Permissions with umask - CreateOpts.Perms permOpt = (CreateOpts.Perms) CreateOpts.getOpt( + CreateOpts.Perms permOpt = CreateOpts.getOpt( CreateOpts.Perms.class, opts); FsPermission umask = FsPermission.getUMask(fs.getConf()); FsPermission permission = (permOpt != null) ? permOpt.getValue() @@ -273,23 +271,22 @@ public FSDataOutputStream create(Path f, EnumSet createFlag, int bufferSize = fs.getConf().getInt( CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY, CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT); - CreateOpts.BufferSize bufOpt = (CreateOpts.BufferSize) CreateOpts.getOpt( + CreateOpts.BufferSize bufOpt = CreateOpts.getOpt( CreateOpts.BufferSize.class, opts); bufferSize = (bufOpt != null) ? bufOpt.getValue() : bufferSize; // replication short replication = fs.getDefaultReplication(f); CreateOpts.ReplicationFactor repOpt = - (CreateOpts.ReplicationFactor) CreateOpts.getOpt( - CreateOpts.ReplicationFactor.class, opts); + CreateOpts.getOpt(CreateOpts.ReplicationFactor.class, opts); replication = (repOpt != null) ? repOpt.getValue() : replication; // blockSize long blockSize = fs.getDefaultBlockSize(f); - CreateOpts.BlockSize blockOpt = (CreateOpts.BlockSize) CreateOpts.getOpt( + CreateOpts.BlockSize blockOpt = CreateOpts.getOpt( CreateOpts.BlockSize.class, opts); blockSize = (blockOpt != null) ? blockOpt.getValue() : blockSize; // Progressable Progressable progress = null; - CreateOpts.Progress progressOpt = (CreateOpts.Progress) CreateOpts.getOpt( + CreateOpts.Progress progressOpt = CreateOpts.getOpt( CreateOpts.Progress.class, opts); progress = (progressOpt != null) ? progressOpt.getValue() : progress; return fs.create(f, permission, overwrite, bufferSize, replication,