HADOOP-6537 Declare more detailed exceptions in FileContext and AbstractFileSystem

(Suresh Srinivas via Sanjay Radia)


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@918309 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Sanjay Radia 2010-03-03 02:48:49 +00:00
parent 6b9fb8c78b
commit 1ab5aa5279
10 changed files with 1166 additions and 359 deletions

View File

@ -185,6 +185,9 @@ Trunk (unreleased changes)
HADOOP-6599 Split existing RpcMetrics into RpcMetrics & RpcDetailedMetrics. HADOOP-6599 Split existing RpcMetrics into RpcMetrics & RpcDetailedMetrics.
(Suresh Srinivas via Sanjay Radia) (Suresh Srinivas via Sanjay Radia)
HADOOP-6537 Declare more detailed exceptions in FileContext and AbstractFileSystem
(Suresh Srinivas via Sanjay Radia)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..). HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..).

View File

@ -0,0 +1,35 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop;
/**
* Indicates that a method has been passed illegal or invalid argument. This
* exception is thrown instead of IllegalArgumentException to differentiate the
* exception thrown in Hadoop implementation from the one thrown in JDK.
*/
public class HadoopIllegalArgumentException extends IllegalArgumentException {
private static final long serialVersionUID = 1L;
/**
* Constructs exception with the specified detail message.
* @param message detailed message.
*/
public HadoopIllegalArgumentException(final String message) {
super(message);
}
}

View File

@ -31,6 +31,7 @@ import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -38,29 +39,31 @@ import org.apache.hadoop.fs.FileSystem.Statistics;
import org.apache.hadoop.fs.Options.CreateOpts; import org.apache.hadoop.fs.Options.CreateOpts;
import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.InvalidPathException;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
/** /**
* This class provides an interface for implementors of a Hadoop filesystem * This class provides an interface for implementors of a Hadoop file system
* (analogous to the VFS of Unix). Applications do not access this class; * (analogous to the VFS of Unix). Applications do not access this class;
* instead they access files across all filesystems using {@link FileContext}. * instead they access files across all file systems using {@link FileContext}.
* *
* Pathnames passed to AbstractFileSystem can be fully qualified URI that * Pathnames passed to AbstractFileSystem can be fully qualified URI that
* matches the "this" filesystem (ie same scheme and authority) * matches the "this" file system (ie same scheme and authority)
* or a Slash-relative name that is assumed to be relative * or a Slash-relative name that is assumed to be relative
* to the root of the "this" filesystem . * to the root of the "this" file system .
*/ */
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving /*Evolving for a release,to be changed to Stable */ @InterfaceStability.Evolving /*Evolving for a release,to be changed to Stable */
public abstract class AbstractFileSystem { public abstract class AbstractFileSystem {
static final Log LOG = LogFactory.getLog(AbstractFileSystem.class); static final Log LOG = LogFactory.getLog(AbstractFileSystem.class);
/** Recording statistics per a filesystem class. */ /** Recording statistics per a file system class. */
private static final Map<Class<? extends AbstractFileSystem>, Statistics> private static final Map<Class<? extends AbstractFileSystem>, Statistics>
STATISTICS_TABLE = STATISTICS_TABLE =
new IdentityHashMap<Class<? extends AbstractFileSystem>, Statistics>(); new IdentityHashMap<Class<? extends AbstractFileSystem>, Statistics>();
/** Cache of constructors for each filesystem class. */ /** Cache of constructors for each file system class. */
private static final Map<Class<?>, Constructor<?>> CONSTRUCTOR_CACHE = private static final Map<Class<?>, Constructor<?>> CONSTRUCTOR_CACHE =
new ConcurrentHashMap<Class<?>, Constructor<?>>(); new ConcurrentHashMap<Class<?>, Constructor<?>>();
@ -118,21 +121,25 @@ public abstract class AbstractFileSystem {
} }
/** /**
* Create a file system instance for the specified uri using the conf. * Create a file system instance for the specified uri using the conf. The
* The conf is used to find the class name that implements the filesystem. * conf is used to find the class name that implements the file system. The
* The conf is also passed to the filesystem for its configuration. * conf is also passed to the file system for its configuration.
* @param uri *
* @param conf * @param uri URI of the file system
* @return * @param conf Configuration for the file system
* @throws IOException *
* @return Returns the file system for the given URI
*
* @throws UnsupportedFileSystemException file system for <code>uri</code> is
* not found
*/ */
private static AbstractFileSystem createFileSystem(URI uri, private static AbstractFileSystem createFileSystem(URI uri, Configuration conf)
Configuration conf) throws IOException { throws UnsupportedFileSystemException {
Class<?> clazz = conf.getClass("fs.AbstractFileSystem." + Class<?> clazz = conf.getClass("fs.AbstractFileSystem." +
uri.getScheme() + ".impl", null); uri.getScheme() + ".impl", null);
if (clazz == null) { if (clazz == null) {
throw new IOException("No AbstractFileSystem for scheme: " throw new UnsupportedFileSystemException(
+ uri.getScheme()); "No AbstractFileSystem for scheme: " + uri.getScheme());
} }
return (AbstractFileSystem) newInstance(clazz, uri, conf); return (AbstractFileSystem) newInstance(clazz, uri, conf);
} }
@ -159,7 +166,7 @@ public abstract class AbstractFileSystem {
} }
} }
protected static synchronized void printStatistics() throws IOException { protected static synchronized void printStatistics() {
for (Map.Entry<Class<? extends AbstractFileSystem>, Statistics> pair: for (Map.Entry<Class<? extends AbstractFileSystem>, Statistics> pair:
STATISTICS_TABLE.entrySet()) { STATISTICS_TABLE.entrySet()) {
System.out.println(" FileSystem " + pair.getKey().getName() + System.out.println(" FileSystem " + pair.getKey().getName() +
@ -167,20 +174,25 @@ public abstract class AbstractFileSystem {
} }
} }
/** /**
* The main factory method for creating a filesystem. * The main factory method for creating a file system. Get a file system for
* Get a filesystem for the URI's scheme and authority. * the URI's scheme and authority. The scheme of the <code>uri</code>
* The scheme of the URI determines a configuration property name, * determines a configuration property name,
* <tt>fs.AbstractFileSystem.<i>scheme</i>.impl</tt> whose value names * <tt>fs.AbstractFileSystem.<i>scheme</i>.impl</tt> whose value names the
* the AbstractFileSystem class. * AbstractFileSystem class.
* The entire URI and conf is passed to the AbstractFileSystem factory *
* method. * The entire URI and conf is passed to the AbstractFileSystem factory method.
*
* @param uri for the file system to be created. * @param uri for the file system to be created.
* @param conf which is passed to the filesystem impl. * @param conf which is passed to the file system impl.
*
* @return file system for the given URI.
*
* @throws UnsupportedFileSystemException if the file system for
* <code>uri</code> is not supported.
*/ */
static AbstractFileSystem get(final URI uri, final Configuration conf) static AbstractFileSystem get(final URI uri, final Configuration conf)
throws IOException { throws UnsupportedFileSystemException {
return createFileSystem(uri, conf); return createFileSystem(uri, conf);
} }
@ -191,10 +203,12 @@ public abstract class AbstractFileSystem {
* @param supportedScheme the scheme supported by the implementor * @param supportedScheme the scheme supported by the implementor
* @param authorityNeeded if true then theURI must have authority, if false * @param authorityNeeded if true then theURI must have authority, if false
* then the URI must have null authority. * then the URI must have null authority.
* @throws URISyntaxException *
* @throws URISyntaxException <code>uri</code> has syntax error
*/ */
protected AbstractFileSystem(final URI uri, final String supportedScheme, protected AbstractFileSystem(final URI uri, final String supportedScheme,
final boolean authorityNeeded, final int defaultPort) throws URISyntaxException { final boolean authorityNeeded, final int defaultPort)
throws URISyntaxException {
myUri = getUri(uri, supportedScheme, authorityNeeded, defaultPort); myUri = getUri(uri, supportedScheme, authorityNeeded, defaultPort);
statistics = getStatistics(supportedScheme, getClass()); statistics = getStatistics(supportedScheme, getClass());
} }
@ -202,46 +216,48 @@ public abstract class AbstractFileSystem {
protected void checkScheme(URI uri, String supportedScheme) { protected void checkScheme(URI uri, String supportedScheme) {
String scheme = uri.getScheme(); String scheme = uri.getScheme();
if (scheme == null) { if (scheme == null) {
throw new IllegalArgumentException("Uri without scheme: " + uri); throw new HadoopIllegalArgumentException("Uri without scheme: " + uri);
} }
if (!scheme.equals(supportedScheme)) { if (!scheme.equals(supportedScheme)) {
throw new IllegalArgumentException("Uri scheme " + uri throw new HadoopIllegalArgumentException("Uri scheme " + uri
+ " does not match the scheme " + supportedScheme); + " does not match the scheme " + supportedScheme);
} }
} }
/** /**
* Get the URI for the file system based on the given URI. The path, query * Get the URI for the file system based on the given URI. The path, query
* part of the given URI is stripped out and default filesystem port is used * part of the given URI is stripped out and default file system port is used
* to form the URI. * to form the URI.
* *
* @param uri FileSystem URI. * @param uri FileSystem URI.
* @param authorityNeeded if true authority cannot be null in the URI. If * @param authorityNeeded if true authority cannot be null in the URI. If
* false authority must be null. * false authority must be null.
* @param defaultPort default port to use if port is not specified in the URI. * @param defaultPort default port to use if port is not specified in the URI.
*
* @return URI of the file system * @return URI of the file system
* @throws URISyntaxException *
* @throws URISyntaxException <code>uri</code> has syntax error
*/ */
private URI getUri(URI uri, String supportedScheme, private URI getUri(URI uri, String supportedScheme,
boolean authorityNeeded, int defaultPort) throws URISyntaxException { boolean authorityNeeded, int defaultPort) throws URISyntaxException {
checkScheme(uri, supportedScheme); checkScheme(uri, supportedScheme);
// A filesystem implementation that requires authority must always // A file system implementation that requires authority must always
// specify default port // specify default port
if (defaultPort < 0 && authorityNeeded) { if (defaultPort < 0 && authorityNeeded) {
throw new IllegalArgumentException( throw new HadoopIllegalArgumentException(
"FileSystem implementation error - default port " + defaultPort "FileSystem implementation error - default port " + defaultPort
+ " is not valid"); + " is not valid");
} }
String authority = uri.getAuthority(); String authority = uri.getAuthority();
if (!authorityNeeded) { if (!authorityNeeded) {
if (authority != null) { if (authority != null) {
throw new IllegalArgumentException("Scheme with non-null authority: " throw new HadoopIllegalArgumentException("Scheme with non-null authority: "
+ uri); + uri);
} }
return new URI(supportedScheme + ":///"); return new URI(supportedScheme + ":///");
} }
if (authority == null) { if (authority == null) {
throw new IllegalArgumentException("Uri without authority: " + uri); throw new HadoopIllegalArgumentException("Uri without authority: " + uri);
} }
int port = uri.getPort(); int port = uri.getPort();
port = port == -1 ? defaultPort : port; port = port == -1 ? defaultPort : port;
@ -249,15 +265,17 @@ public abstract class AbstractFileSystem {
} }
/** /**
* The default port of this filesystem. * The default port of this file system.
* @return default port of this filesystem's Uri scheme *
* A uri with a port of -1 => default port; * @return default port of this file system's Uri scheme
* A uri with a port of -1 => default port;
*/ */
protected abstract int getUriDefaultPort(); protected abstract int getUriDefaultPort();
/** /**
* Returns a URI whose scheme and authority identify this FileSystem. * Returns a URI whose scheme and authority identify this FileSystem.
* @return the uri of this filesystem. *
* @return the uri of this file system.
*/ */
protected URI getUri() { protected URI getUri() {
return myUri; return myUri;
@ -269,6 +287,8 @@ public abstract class AbstractFileSystem {
* If the path is fully qualified URI, then its scheme and authority * If the path is fully qualified URI, then its scheme and authority
* matches that of this file system. Otherwise the path must be * matches that of this file system. Otherwise the path must be
* slash-relative name. * slash-relative name.
*
* @throws InvalidPathException if the path is invalid
*/ */
protected void checkPath(Path path) { protected void checkPath(Path path) {
URI uri = path.toUri(); URI uri = path.toUri();
@ -279,10 +299,10 @@ public abstract class AbstractFileSystem {
if (path.isUriPathAbsolute()) { if (path.isUriPathAbsolute()) {
return; return;
} }
throw new IllegalArgumentException("relative paths not allowed:" + throw new InvalidPathException("relative paths not allowed:" +
path); path);
} else { } else {
throw new IllegalArgumentException( throw new InvalidPathException(
"Path without scheme with non-null autorhrity:" + path); "Path without scheme with non-null autorhrity:" + path);
} }
} }
@ -295,8 +315,8 @@ public abstract class AbstractFileSystem {
(thisAuthority != null && (thisAuthority != null &&
!thisAuthority.equalsIgnoreCase(thatAuthority)) || !thisAuthority.equalsIgnoreCase(thatAuthority)) ||
(thisAuthority == null && thatAuthority != null)) { (thisAuthority == null && thatAuthority != null)) {
throw new IllegalArgumentException("Wrong FS: " + path + throw new InvalidPathException("Wrong FS: " + path + ", expected: "
", expected: "+this.getUri()); + this.getUri());
} }
int thisPort = this.getUri().getPort(); int thisPort = this.getUri().getPort();
@ -305,42 +325,46 @@ public abstract class AbstractFileSystem {
thatPort = this.getUriDefaultPort(); thatPort = this.getUriDefaultPort();
} }
if (thisPort != thatPort) { if (thisPort != thatPort) {
throw new IllegalArgumentException("Wrong FS: "+path+ throw new InvalidPathException("Wrong FS: " + path + ", expected: "
", expected: "+this.getUri()); + this.getUri());
} }
} }
/** /**
* Get the path-part of a pathname. Checks that URI matches this filesystem * Get the path-part of a pathname. Checks that URI matches this file system
* and that the path-part is a valid name. * and that the path-part is a valid name.
* @param p *
* @param p path
*
* @return path-part of the Path p * @return path-part of the Path p
*/ */
protected String getUriPath(final Path p) { protected String getUriPath(final Path p) {
checkPath(p); checkPath(p);
String s = p.toUri().getPath(); String s = p.toUri().getPath();
if (!isValidName(s)) { if (!isValidName(s)) {
throw new IllegalArgumentException("Path part " + s + " from URI" + throw new InvalidPathException("Path part " + s + " from URI" + p
p + " is not a valid filename."); + " is not a valid filename.");
} }
return s; return s;
} }
/** /**
* Some file systems like LocalFileSystem have an initial workingDir * Some file systems like LocalFileSystem have an initial workingDir
* that we use as the starting workingDir. For other file systems * that is used as the starting workingDir. For other file systems
* like HDFS there is no built in notion of an initial workingDir. * like HDFS there is no built in notion of an initial workingDir.
* *
* @return the initial workingDir if the filesystem if it has such a notion * @return the initial workingDir if the file system has such a notion
* otherwise return a null. * otherwise return a null.
*/ */
protected Path getInitialWorkingDirectory() { protected Path getInitialWorkingDirectory() {
return null; return null;
} }
/** /**
* Return the current user's home directory in this filesystem. * Return the current user's home directory in this file system.
* The default implementation returns "/user/$USER/". * The default implementation returns "/user/$USER/".
*
* @return current user's home directory.
*/ */
protected Path getHomeDirectory() { protected Path getHomeDirectory() {
return new Path("/user/"+System.getProperty("user.name")).makeQualified( return new Path("/user/"+System.getProperty("user.name")).makeQualified(
@ -349,8 +373,10 @@ public abstract class AbstractFileSystem {
/** /**
* Return a set of server default configuration values. * Return a set of server default configuration values.
*
* @return server default configuration values * @return server default configuration values
* @throws IOException *
* @throws IOException an I/O error occurred
*/ */
protected abstract FsServerDefaults getServerDefaults() throws IOException; protected abstract FsServerDefaults getServerDefaults() throws IOException;
@ -361,8 +387,10 @@ public abstract class AbstractFileSystem {
* (i.e. umask has been applied). * (i.e. umask has been applied).
*/ */
protected final FSDataOutputStream create(final Path f, protected final FSDataOutputStream create(final Path f,
final EnumSet<CreateFlag> createFlag, Options.CreateOpts... opts) final EnumSet<CreateFlag> createFlag, Options.CreateOpts... opts)
throws IOException, UnresolvedLinkException { throws AccessControlException, FileAlreadyExistsException,
FileNotFoundException, ParentNotDirectoryException,
UnsupportedFileSystemException, UnresolvedLinkException, IOException {
checkPath(f); checkPath(f);
int bufferSize = -1; int bufferSize = -1;
short replication = -1; short replication = -1;
@ -375,46 +403,53 @@ public abstract class AbstractFileSystem {
for (CreateOpts iOpt : opts) { for (CreateOpts iOpt : opts) {
if (CreateOpts.BlockSize.class.isInstance(iOpt)) { if (CreateOpts.BlockSize.class.isInstance(iOpt)) {
if (blockSize != -1) { if (blockSize != -1) {
throw new IllegalArgumentException("multiple varargs of same kind"); throw new HadoopIllegalArgumentException(
"BlockSize option is set multiple times");
} }
blockSize = ((CreateOpts.BlockSize) iOpt).getValue(); blockSize = ((CreateOpts.BlockSize) iOpt).getValue();
} else if (CreateOpts.BufferSize.class.isInstance(iOpt)) { } else if (CreateOpts.BufferSize.class.isInstance(iOpt)) {
if (bufferSize != -1) { if (bufferSize != -1) {
throw new IllegalArgumentException("multiple varargs of same kind"); throw new HadoopIllegalArgumentException(
"BufferSize option is set multiple times");
} }
bufferSize = ((CreateOpts.BufferSize) iOpt).getValue(); bufferSize = ((CreateOpts.BufferSize) iOpt).getValue();
} else if (CreateOpts.ReplicationFactor.class.isInstance(iOpt)) { } else if (CreateOpts.ReplicationFactor.class.isInstance(iOpt)) {
if (replication != -1) { if (replication != -1) {
throw new IllegalArgumentException("multiple varargs of same kind"); throw new HadoopIllegalArgumentException(
"ReplicationFactor option is set multiple times");
} }
replication = ((CreateOpts.ReplicationFactor) iOpt).getValue(); replication = ((CreateOpts.ReplicationFactor) iOpt).getValue();
} else if (CreateOpts.BytesPerChecksum.class.isInstance(iOpt)) { } else if (CreateOpts.BytesPerChecksum.class.isInstance(iOpt)) {
if (bytesPerChecksum != -1) { if (bytesPerChecksum != -1) {
throw new IllegalArgumentException("multiple varargs of same kind"); throw new HadoopIllegalArgumentException(
"BytesPerChecksum option is set multiple times");
} }
bytesPerChecksum = ((CreateOpts.BytesPerChecksum) iOpt).getValue(); bytesPerChecksum = ((CreateOpts.BytesPerChecksum) iOpt).getValue();
} else if (CreateOpts.Perms.class.isInstance(iOpt)) { } else if (CreateOpts.Perms.class.isInstance(iOpt)) {
if (permission != null) { if (permission != null) {
throw new IllegalArgumentException("multiple varargs of same kind"); throw new HadoopIllegalArgumentException(
"Perms option is set multiple times");
} }
permission = ((CreateOpts.Perms) iOpt).getValue(); permission = ((CreateOpts.Perms) iOpt).getValue();
} else if (CreateOpts.Progress.class.isInstance(iOpt)) { } else if (CreateOpts.Progress.class.isInstance(iOpt)) {
if (progress != null) { if (progress != null) {
throw new IllegalArgumentException("multiple varargs of same kind"); throw new HadoopIllegalArgumentException(
"Progress option is set multiple times");
} }
progress = ((CreateOpts.Progress) iOpt).getValue(); progress = ((CreateOpts.Progress) iOpt).getValue();
} else if (CreateOpts.CreateParent.class.isInstance(iOpt)) { } else if (CreateOpts.CreateParent.class.isInstance(iOpt)) {
if (createParent != null) { if (createParent != null) {
throw new IllegalArgumentException("multiple varargs of same kind"); throw new HadoopIllegalArgumentException(
"CreateParent option is set multiple times");
} }
createParent = ((CreateOpts.CreateParent) iOpt).getValue(); createParent = ((CreateOpts.CreateParent) iOpt).getValue();
} else { } else {
throw new IllegalArgumentException("Unkown CreateOpts of type " + throw new HadoopIllegalArgumentException("Unkown CreateOpts of type " +
iOpt.getClass().getName()); iOpt.getClass().getName());
} }
} }
if (permission == null) { if (permission == null) {
throw new IllegalArgumentException("no permission supplied"); throw new HadoopIllegalArgumentException("no permission supplied");
} }
@ -441,7 +476,7 @@ public abstract class AbstractFileSystem {
} }
if (blockSize % bytesPerChecksum != 0) { if (blockSize % bytesPerChecksum != 0) {
throw new IllegalArgumentException( throw new HadoopIllegalArgumentException(
"blockSize should be a multiple of checksumsize"); "blockSize should be a multiple of checksumsize");
} }
@ -455,64 +490,70 @@ public abstract class AbstractFileSystem {
* have been declared explicitly. * have been declared explicitly.
*/ */
protected abstract FSDataOutputStream createInternal(Path f, protected abstract FSDataOutputStream createInternal(Path f,
EnumSet<CreateFlag> flag, FsPermission absolutePermission, int bufferSize, EnumSet<CreateFlag> flag, FsPermission absolutePermission,
short replication, long blockSize, Progressable progress, int bufferSize, short replication, long blockSize, Progressable progress,
int bytesPerChecksum, boolean createParent) int bytesPerChecksum, boolean createParent)
throws IOException, UnresolvedLinkException; throws AccessControlException, FileAlreadyExistsException,
FileNotFoundException, ParentNotDirectoryException,
UnsupportedFileSystemException, UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path * {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path
* f must be fully qualified and the permission is absolute (ie umask has been * f must be fully qualified and the permission is absolute (i.e.
* applied). * umask has been applied).
*/ */
protected abstract void mkdir(final Path dir, protected abstract void mkdir(final Path dir, final FsPermission permission,
final FsPermission permission, final boolean createParent) final boolean createParent) throws AccessControlException,
throws IOException, UnresolvedLinkException; FileAlreadyExistsException, FileNotFoundException,
UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#delete(Path, boolean)} except that Path f must be for * {@link FileContext#delete(Path, boolean)} except that Path f must be for
* this filesystem. * this file system.
*/ */
protected abstract boolean delete(final Path f, final boolean recursive) protected abstract boolean delete(final Path f, final boolean recursive)
throws IOException, UnresolvedLinkException; throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#open(Path)} except that Path f must be for this * {@link FileContext#open(Path)} except that Path f must be for this
* filesystem. * file system.
*/ */
protected FSDataInputStream open(final Path f) protected FSDataInputStream open(final Path f) throws AccessControlException,
throws IOException, UnresolvedLinkException { FileNotFoundException, UnresolvedLinkException, IOException {
return open(f, getServerDefaults().getFileBufferSize()); return open(f, getServerDefaults().getFileBufferSize());
} }
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#open(Path, int)} except that Path f must be for this * {@link FileContext#open(Path, int)} except that Path f must be for this
* filesystem. * file system.
* @throws UnresolvedLinkException
*/ */
protected abstract FSDataInputStream open(final Path f, int bufferSize) protected abstract FSDataInputStream open(final Path f, int bufferSize)
throws IOException, UnresolvedLinkException; throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#setReplication(Path, short)} except that Path f must be * {@link FileContext#setReplication(Path, short)} except that Path f must be
* for this filesystem. * for this file system.
*/ */
protected abstract boolean setReplication(final Path f, protected abstract boolean setReplication(final Path f,
final short replication) throws IOException, UnresolvedLinkException; final short replication) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
* f must be for this filesystem. * f must be for this file system.
*/ */
protected final void rename(final Path src, final Path dst, protected final void rename(final Path src, final Path dst,
final Options.Rename... options) final Options.Rename... options) throws AccessControlException,
throws IOException, UnresolvedLinkException { FileAlreadyExistsException, FileNotFoundException,
ParentNotDirectoryException, UnresolvedLinkException, IOException {
boolean overwrite = false; boolean overwrite = false;
if (null != options) { if (null != options) {
for (Rename option : options) { for (Rename option : options) {
@ -527,22 +568,26 @@ public abstract class AbstractFileSystem {
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
* f must be for this filesystem and NO OVERWRITE is performed. * f must be for this file system and NO OVERWRITE is performed.
* *
* Filesystems that do not have a built in overwrite need implement only this * File systems that do not have a built in overwrite need implement only this
* method and can take advantage of the default impl of the other * method and can take advantage of the default impl of the other
* {@link #renameInternal(Path, Path, boolean)} * {@link #renameInternal(Path, Path, boolean)}
*/ */
protected abstract void renameInternal(final Path src, final Path dst) protected abstract void renameInternal(final Path src, final Path dst)
throws IOException, UnresolvedLinkException; throws AccessControlException, FileAlreadyExistsException,
FileNotFoundException, ParentNotDirectoryException,
UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
* f must be for this filesystem. * f must be for this file system.
*/ */
protected void renameInternal(final Path src, final Path dst, protected void renameInternal(final Path src, final Path dst,
boolean overwrite) throws IOException, UnresolvedLinkException { boolean overwrite) throws AccessControlException,
FileAlreadyExistsException, FileNotFoundException,
ParentNotDirectoryException, UnresolvedLinkException, IOException {
// Default implementation deals with overwrite in a non-atomic way // Default implementation deals with overwrite in a non-atomic way
final FileStatus srcStatus = getFileLinkStatus(src); final FileStatus srcStatus = getFileLinkStatus(src);
if (srcStatus == null) { if (srcStatus == null) {
@ -619,35 +664,38 @@ public abstract class AbstractFileSystem {
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#setPermission(Path, FsPermission)} except that Path f * {@link FileContext#setPermission(Path, FsPermission)} except that Path f
* must be for this filesystem. * must be for this file system.
*/ */
protected abstract void setPermission(final Path f, protected abstract void setPermission(final Path f,
final FsPermission permission) final FsPermission permission) throws AccessControlException,
throws IOException, UnresolvedLinkException; FileNotFoundException, UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#setOwner(Path, String, String)} except that Path f must * {@link FileContext#setOwner(Path, String, String)} except that Path f must
* be for this filesystem. * be for this file system.
*/ */
protected abstract void setOwner(final Path f, final String username, protected abstract void setOwner(final Path f, final String username,
final String groupname) throws IOException, UnresolvedLinkException; final String groupname) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#setTimes(Path, long, long)} except that Path f must be * {@link FileContext#setTimes(Path, long, long)} except that Path f must be
* for this filesystem. * for this file system.
*/ */
protected abstract void setTimes(final Path f, final long mtime, protected abstract void setTimes(final Path f, final long mtime,
final long atime) throws IOException, UnresolvedLinkException; final long atime) throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#getFileChecksum(Path)} except that Path f must be for * {@link FileContext#getFileChecksum(Path)} except that Path f must be for
* this filesystem. * this file system.
*/ */
protected abstract FileChecksum getFileChecksum(final Path f) protected abstract FileChecksum getFileChecksum(final Path f)
throws IOException, UnresolvedLinkException; throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
@ -655,8 +703,9 @@ public abstract class AbstractFileSystem {
* except that an UnresolvedLinkException may be thrown if a symlink is * except that an UnresolvedLinkException may be thrown if a symlink is
* encountered in the path. * encountered in the path.
*/ */
protected abstract FileStatus getFileStatus(final Path f) protected abstract FileStatus getFileStatus(final Path f)
throws IOException, UnresolvedLinkException; throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
@ -667,26 +716,27 @@ public abstract class AbstractFileSystem {
* equivalent to {@link AbstractFileSystem#getFileStatus(Path)}. * equivalent to {@link AbstractFileSystem#getFileStatus(Path)}.
*/ */
protected FileStatus getFileLinkStatus(final Path f) protected FileStatus getFileLinkStatus(final Path f)
throws IOException, UnresolvedLinkException { throws AccessControlException, FileNotFoundException,
UnsupportedFileSystemException, IOException {
return getFileStatus(f); return getFileStatus(f);
} }
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#getFileBlockLocations(Path, long, long)} except that * {@link FileContext#getFileBlockLocations(Path, long, long)} except that
* Path f must be for this filesystem. * Path f must be for this file system.
*/ */
protected abstract BlockLocation[] getFileBlockLocations(final Path f, protected abstract BlockLocation[] getFileBlockLocations(final Path f,
final long start, final long len) final long start, final long len) throws AccessControlException,
throws IOException, UnresolvedLinkException; FileNotFoundException, UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#getFsStatus(Path)} except that Path f must be for this * {@link FileContext#getFsStatus(Path)} except that Path f must be for this
* filesystem. * file system.
*/ */
protected FsStatus getFsStatus(final Path f) protected FsStatus getFsStatus(final Path f) throws AccessControlException,
throws IOException, UnresolvedLinkException { FileNotFoundException, UnresolvedLinkException, IOException {
// default impl gets FsStatus of root // default impl gets FsStatus of root
return getFsStatus(); return getFsStatus();
} }
@ -695,21 +745,23 @@ public abstract class AbstractFileSystem {
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#getFsStatus(Path)}. * {@link FileContext#getFsStatus(Path)}.
*/ */
protected abstract FsStatus getFsStatus() throws IOException; protected abstract FsStatus getFsStatus() throws AccessControlException,
FileNotFoundException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#listStatus(Path)} except that Path f must be for this * {@link FileContext#listStatus(Path)} except that Path f must be for this
* filesystem. * file system.
*/ */
protected abstract FileStatus[] listStatus(final Path f) protected abstract FileStatus[] listStatus(final Path f)
throws IOException, UnresolvedLinkException; throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException;
/** /**
* The specification of this method matches that of * The specification of this method matches that of
* {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f * {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f
* must be for this filesystem. * must be for this file system.
*/ */
protected abstract void setVerifyChecksum(final boolean verifyChecksum) protected abstract void setVerifyChecksum(final boolean verifyChecksum)
throws IOException; throws AccessControlException, IOException;
} }

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import org.apache.hadoop.HadoopIllegalArgumentException;
/**
* Path string is invalid either because it has invalid characters or due to
* other file system specific reasons.
*/
public class InvalidPathException extends HadoopIllegalArgumentException {
private static final long serialVersionUID = 1L;
/**
* Constructs exception with the specified detail message.
*
* @param path invalid path.
*/
public InvalidPathException(final String path) {
super("Invalid path name " + path);
}
/**
* Constructs exception with the specified detail message.
*
* @param path invalid path.
* @param reason Reason <code>path</code> is invalid
*/
public InvalidPathException(final String path, final String reason) {
super("Invalid path " + path
+ (reason == null ? "" : ". (" + reason + ")"));
}
}

View File

@ -0,0 +1,35 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import java.io.IOException;
/**
* File system for a given file system name/scheme is not supported
*/
public class UnsupportedFileSystemException extends IOException {
private static final long serialVersionUID = 1L;
/**
* Constructs exception with the specified detail message.
* @param message exception message.
*/
UnsupportedFileSystemException(final String message) {
super(message);
}
}

View File

@ -0,0 +1,47 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ipc;
/**
* Indicates an exception in the RPC client
*/
public class RpcClientException extends RpcException {
private static final long serialVersionUID = 1L;
/**
* Constructs exception with the specified detail message.
*
* @param messages detailed message.
*/
RpcClientException(final String message) {
super(message);
}
/**
* Constructs exception with the specified detail message and cause.
*
* @param message message.
* @param cause that cause this exception
* @param cause the cause (can be retried by the {@link #getCause()} method).
* (A <tt>null</tt> value is permitted, and indicates that the cause
* is nonexistent or unknown.)
*/
RpcClientException(final String message, final Throwable cause) {
super(message, cause);
}
}

View File

@ -0,0 +1,49 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ipc;
import java.io.IOException;
/**
* Indicates an exception during the execution of remote procedure call.
*/
public class RpcException extends IOException {
private static final long serialVersionUID = 1L;
/**
* Constructs exception with the specified detail message.
*
* @param messages detailed message.
*/
RpcException(final String message) {
super(message);
}
/**
* Constructs exception with the specified detail message and cause.
*
* @param message message.
* @param cause that cause this exception
* @param cause the cause (can be retried by the {@link #getCause()} method).
* (A <tt>null</tt> value is permitted, and indicates that the cause
* is nonexistent or unknown.)
*/
RpcException(final String message, final Throwable cause) {
super(message, cause);
}
}

View File

@ -0,0 +1,47 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ipc;
/**
* Indicates an exception on the RPC server
*/
public class RpcServerException extends RpcException {
private static final long serialVersionUID = 1L;
/**
* Constructs exception with the specified detail message.
*
* @param messages detailed message.
*/
RpcServerException(final String message) {
super(message);
}
/**
* Constructs exception with the specified detail message and cause.
*
* @param message message.
* @param cause that cause this exception
* @param cause the cause (can be retried by the {@link #getCause()} method).
* (A <tt>null</tt> value is permitted, and indicates that the cause
* is nonexistent or unknown.)
*/
RpcServerException(final String message, final Throwable cause) {
super(message, cause);
}
}

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ipc;
/**
* Indicates that the RPC server encountered an undeclared exception from the
* service
*/
public class UnexpectedServerException extends RpcException {
private static final long serialVersionUID = 1L;
/**
* Constructs exception with the specified detail message.
*
* @param messages detailed message.
*/
UnexpectedServerException(final String message) {
super(message);
}
/**
* Constructs exception with the specified detail message and cause.
*
* @param message message.
* @param cause that cause this exception
* @param cause the cause (can be retried by the {@link #getCause()} method).
* (A <tt>null</tt> value is permitted, and indicates that the cause
* is nonexistent or unknown.)
*/
UnexpectedServerException(final String message, final Throwable cause) {
super(message, cause);
}
}