From a5290c9eca69027cff2448d05fee6983cbb54cd7 Mon Sep 17 00:00:00 2001 From: Tsz-wo Sze Date: Tue, 10 May 2011 21:29:34 +0000 Subject: [PATCH] HADOOP-7271. Standardize shell command error messages. Contributed by Daryn Sharp git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1101653 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 3 + src/java/org/apache/hadoop/fs/FsShell.java | 133 ++++++---------- .../apache/hadoop/fs/FsShellPermissions.java | 6 - .../org/apache/hadoop/fs/shell/Command.java | 18 +-- .../org/apache/hadoop/fs/shell/Count.java | 7 - .../org/apache/hadoop/fs/shell/Display.java | 20 +-- src/java/org/apache/hadoop/fs/shell/Ls.java | 6 - .../org/apache/hadoop/fs/shell/Mkdir.java | 9 +- .../org/apache/hadoop/fs/shell/PathData.java | 3 +- .../hadoop/fs/shell/PathExceptions.java | 147 ++++++++++++++++++ .../hadoop/fs/shell/SetReplication.java | 11 +- src/java/org/apache/hadoop/fs/shell/Tail.java | 9 +- .../hadoop/fs/TestFsShellReturnCode.java | 2 +- .../hadoop/fs/shell/TestPathExceptions.java | 56 +++++++ 14 files changed, 275 insertions(+), 155 deletions(-) create mode 100644 src/java/org/apache/hadoop/fs/shell/PathExceptions.java create mode 100644 src/test/core/org/apache/hadoop/fs/shell/TestPathExceptions.java diff --git a/CHANGES.txt b/CHANGES.txt index d141262a6d6..9bb4bb57a81 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -130,6 +130,9 @@ Trunk (unreleased changes) HADOOP-7238. Refactor the cat and text commands to conform to new FsCommand class. (Daryn Sharp via szetszwo) + HADOOP-7271. Standardize shell command error messages. (Daryn Sharp + via szetszwo) + OPTIMIZATIONS BUG FIXES diff --git a/src/java/org/apache/hadoop/fs/FsShell.java b/src/java/org/apache/hadoop/fs/FsShell.java index a5734a6801b..273f06f75b8 100644 --- a/src/java/org/apache/hadoop/fs/FsShell.java +++ b/src/java/org/apache/hadoop/fs/FsShell.java @@ -28,6 +28,8 @@ import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.TimeZone; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -38,6 +40,8 @@ import org.apache.hadoop.fs.shell.Command; import org.apache.hadoop.fs.shell.CommandFactory; import org.apache.hadoop.fs.shell.CommandFormat; import org.apache.hadoop.fs.shell.FsCommand; +import org.apache.hadoop.fs.shell.PathData; +import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; @@ -207,7 +211,7 @@ public class FsShell extends Configured implements Tool { } FileStatus[] srcs = srcFS.globStatus(srcpath); if (null == srcs) { - throw new IOException(srcpath + ": No such file or directory"); + throw new PathNotFoundException(srcstr); } boolean dstIsDir = dst.isDirectory(); if (srcs.length > 1 && !dstIsDir) { @@ -323,7 +327,7 @@ public class FsShell extends Configured implements Tool { final Path srcPath = new Path(path); final FileSystem srcFs = srcPath.getFileSystem(getConf()); if (! srcFs.exists(srcPath)) { - throw new FileNotFoundException("Cannot access "+srcPath.toString()); + throw new PathNotFoundException(path); } final FsStatus stats = srcFs.getStatus(srcPath); final int PercentUsed = (int)(100.0f * (float)stats.getUsed() / (float)stats.getCapacity()); @@ -379,8 +383,7 @@ public class FsShell extends Configured implements Tool { } if ((statusToPrint == null) || ((statusToPrint.length == 0) && (!srcFs.exists(srcPath)))){ - throw new FileNotFoundException("Cannot access " + src - + ": No such file or directory."); + throw new PathNotFoundException(src); } if (!summary) { @@ -463,15 +466,20 @@ public class FsShell extends Configured implements Tool { if (!argv[i].startsWith("-") || argv[i].length() > 2) throw new IOException("Not a flag: " + argv[i]); char flag = argv[i].toCharArray()[1]; - Path f = new Path(argv[++i]); - FileSystem srcFs = f.getFileSystem(getConf()); + PathData item = new PathData(argv[++i], getConf()); + + if ((flag != 'e') && !item.exists) { + // TODO: it's backwards compat, but why is this throwing an exception? + // it's not like the shell test cmd + throw new PathNotFoundException(item.toString()); + } switch(flag) { case 'e': - return srcFs.exists(f) ? 0 : 1; + return item.exists ? 0 : 1; case 'z': - return srcFs.getFileStatus(f).getLen() == 0 ? 0 : 1; + return (item.stat.getLen() == 0) ? 0 : 1; case 'd': - return srcFs.getFileStatus(f).isDirectory() ? 0 : 1; + return item.stat.isDirectory() ? 0 : 1; default: throw new IOException("Unknown flag: " + flag); } @@ -492,7 +500,7 @@ public class FsShell extends Configured implements Tool { FileSystem srcFs = srcPath.getFileSystem(getConf()); FileStatus glob[] = srcFs.globStatus(srcPath); if (null == glob) - throw new IOException("cannot stat `" + src + "': No such file or directory"); + throw new PathNotFoundException(src); for (FileStatus f : glob) { StringBuilder buf = new StringBuilder(); for (int i = 0; i < fmt.length; ++i) { @@ -565,8 +573,7 @@ public class FsShell extends Configured implements Tool { try { srcFstatus = fs.getFileStatus(srcs[i]); } catch(FileNotFoundException e) { - throw new FileNotFoundException(srcs[i] + - ": No such file or directory"); + throw new PathNotFoundException(srcs[i].toString()); } try { dstFstatus = fs.getFileStatus(dst); @@ -637,10 +644,9 @@ public class FsShell extends Configured implements Tool { LOG.debug("Error renaming " + argv[i], e); // // IO exception encountered locally. - // + // exitCode = -1; - System.err.println(cmd.substring(1) + ": " + - e.getLocalizedMessage()); + displayError(cmd, e); } } return exitCode; @@ -705,30 +711,10 @@ public class FsShell extends Configured implements Tool { // issue the copy to the fs // copy(argv[i], dest, conf); - } catch (RemoteException e) { - LOG.debug("Error copying " + argv[i], e); - // - // This is a error returned by hadoop server. Print - // out the first line of the error mesage. - // - exitCode = -1; - try { - String[] content; - content = e.getLocalizedMessage().split("\n"); - System.err.println(cmd.substring(1) + ": " + - content[0]); - } catch (Exception ex) { - System.err.println(cmd.substring(1) + ": " + - ex.getLocalizedMessage()); - } } catch (IOException e) { LOG.debug("Error copying " + argv[i], e); - // - // IO exception encountered locally. - // exitCode = -1; - System.err.println(cmd.substring(1) + ": " + - e.getLocalizedMessage()); + displayError(cmd, e); } } return exitCode; @@ -767,8 +753,7 @@ public class FsShell extends Configured implements Tool { fs = srcFs.getFileStatus(src); } catch (FileNotFoundException fnfe) { // Have to re-throw so that console output is as expected - throw new FileNotFoundException("cannot remove " - + src + ": No such file or directory."); + throw new PathNotFoundException(src.toString()); } if (fs.isDirectory() && !recursive) { @@ -1061,34 +1046,10 @@ public class FsShell extends Configured implements Tool { } else if ("-touchz".equals(cmd)) { touchz(argv[i]); } - } catch (RemoteException e) { - LOG.debug("Error", e); - // - // This is a error returned by hadoop server. Print - // out the first line of the error message. - // - exitCode = -1; - try { - String[] content; - content = e.getLocalizedMessage().split("\n"); - System.err.println(cmd.substring(1) + ": " + - content[0]); - } catch (Exception ex) { - System.err.println(cmd.substring(1) + ": " + - ex.getLocalizedMessage()); - } } catch (IOException e) { LOG.debug("Error", e); - // - // IO exception encountered locally. - // exitCode = -1; - String content = e.getLocalizedMessage(); - if (content != null) { - content = content.split("\n")[0]; - } - System.err.println(cmd.substring(1) + ": " + - content); + displayError(cmd, e); } } return exitCode; @@ -1306,38 +1267,36 @@ public class FsShell extends Configured implements Tool { exitCode = -1; System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage()); printUsage(cmd); - } catch (RemoteException e) { - LOG.debug("Error", e); - // - // This is a error returned by hadoop server. Print - // out the first line of the error mesage, ignore the stack trace. - exitCode = -1; - try { - String[] content; - content = e.getLocalizedMessage().split("\n"); - System.err.println(cmd.substring(1) + ": " + - content[0]); - } catch (Exception ex) { - System.err.println(cmd.substring(1) + ": " + - ex.getLocalizedMessage()); - } - } catch (IOException e) { - LOG.debug("Error", e); - // - // IO exception encountered locally. - // - exitCode = -1; - System.err.println(cmd.substring(1) + ": " + - e.getLocalizedMessage()); } catch (Exception re) { LOG.debug("Error", re); exitCode = -1; - System.err.println(cmd.substring(1) + ": " + re.getLocalizedMessage()); + displayError(cmd, re); } finally { } return exitCode; } + // TODO: this is a quick workaround to accelerate the integration of + // redesigned commands. this will be removed this once all commands are + // converted. this change will avoid having to change the hdfs tests + // every time a command is converted to use path-based exceptions + private static Pattern[] fnfPatterns = { + Pattern.compile("File (.*) does not exist\\."), + Pattern.compile("File does not exist: (.*)"), + Pattern.compile("`(.*)': specified destination directory doest not exist") + }; + private void displayError(String cmd, Exception e) { + String message = e.getLocalizedMessage().split("\n")[0]; + for (Pattern pattern : fnfPatterns) { + Matcher matcher = pattern.matcher(message); + if (matcher.matches()) { + message = new PathNotFoundException(matcher.group(1)).getMessage(); + break; + } + } + System.err.println(cmd.substring(1) + ": " + message); + } + public void close() throws IOException { if (fs != null) { fs.close(); diff --git a/src/java/org/apache/hadoop/fs/FsShellPermissions.java b/src/java/org/apache/hadoop/fs/FsShellPermissions.java index f4c528181c5..839f954f92d 100644 --- a/src/java/org/apache/hadoop/fs/FsShellPermissions.java +++ b/src/java/org/apache/hadoop/fs/FsShellPermissions.java @@ -53,12 +53,6 @@ public class FsShellPermissions extends FsCommand { factory.addClass(Chgrp.class, "-chgrp"); } - @Override - protected String getFnfText(Path path) { - // TODO: printing the path twice is silly for backwards compatibility - return "could not get status for '"+path+"': File does not exist: "+path; - } - /** * The pattern is almost as flexible as mode allowed by chmod shell command. * The main restriction is that we recognize only rwxXt. To reduce errors we diff --git a/src/java/org/apache/hadoop/fs/shell/Command.java b/src/java/org/apache/hadoop/fs/shell/Command.java index aaeefea9c43..31dbb59e920 100644 --- a/src/java/org/apache/hadoop/fs/shell/Command.java +++ b/src/java/org/apache/hadoop/fs/shell/Command.java @@ -32,6 +32,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException; import org.apache.hadoop.util.StringUtils; /** @@ -199,7 +200,7 @@ abstract public class Command extends Configured { PathData[] items = PathData.expandAsGlob(arg, getConf()); if (items.length == 0) { // it's a glob that failed to match - throw new FileNotFoundException(getFnfText(new Path(arg))); + throw new PathNotFoundException(arg); } return Arrays.asList(items); } @@ -261,20 +262,7 @@ abstract public class Command extends Configured { * @throws IOException if anything else goes wrong... */ protected void processNonexistentPath(PathData item) throws IOException { - // TODO: this should be more posix-like: ex. "No such file or directory" - throw new FileNotFoundException(getFnfText(item.path)); - } - - /** - * TODO: A crutch until the text is standardized across commands... - * Eventually an exception that takes the path as an argument will - * replace custom text, until then, commands can supply custom text - * for backwards compatibility - * @param path the thing that doesn't exist - * @returns String in printf format - */ - protected String getFnfText(Path path) { - return path + ": No such file or directory"; + throw new PathNotFoundException(item.toString()); } /** diff --git a/src/java/org/apache/hadoop/fs/shell/Count.java b/src/java/org/apache/hadoop/fs/shell/Count.java index c63bf1d5526..6d945196bab 100644 --- a/src/java/org/apache/hadoop/fs/shell/Count.java +++ b/src/java/org/apache/hadoop/fs/shell/Count.java @@ -26,7 +26,6 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FsShell; -import org.apache.hadoop.fs.Path; /** * Count the number of directories, files, bytes, quota, and remaining quota. @@ -87,10 +86,4 @@ public class Count extends FsCommand { ContentSummary summary = src.fs.getContentSummary(src.path); out.println(summary.toString(showQuotas) + src.path); } - - // TODO: remove when the error is commonized... - @Override - protected String getFnfText(Path path) { - return "Can not find listing for " + path; - } } diff --git a/src/java/org/apache/hadoop/fs/shell/Display.java b/src/java/org/apache/hadoop/fs/shell/Display.java index a4263674c8a..312527b98e3 100644 --- a/src/java/org/apache/hadoop/fs/shell/Display.java +++ b/src/java/org/apache/hadoop/fs/shell/Display.java @@ -27,8 +27,8 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.IOUtils; @@ -51,20 +51,6 @@ class Display extends FsCommand { factory.addClass(Text.class, "-text"); } - @Override - protected String getFnfText(Path path) { - // TODO: this is a pretty inconsistent way to output the path...!! - // but, it's backwards compatible - try { - FileSystem fs = path.getFileSystem(getConf()); - path = fs.makeQualified(path); - } catch (IOException e) { - // shouldn't happen, so just use path as-is - displayWarning("can't fully qualify "+path); - } - return "File does not exist: " + path.toUri().getPath(); - } - /** * Displays file content to stdout */ @@ -87,6 +73,10 @@ class Display extends FsCommand { @Override protected void processPath(PathData item) throws IOException { + if (item.stat.isDirectory()) { + throw new PathIsDirectoryException(item.toString()); + } + item.fs.setVerifyChecksum(verifyChecksum); printToStdout(getInputStream(item)); } diff --git a/src/java/org/apache/hadoop/fs/shell/Ls.java b/src/java/org/apache/hadoop/fs/shell/Ls.java index 61e5d0c4e73..cb752e2ec03 100644 --- a/src/java/org/apache/hadoop/fs/shell/Ls.java +++ b/src/java/org/apache/hadoop/fs/shell/Ls.java @@ -126,12 +126,6 @@ class Ls extends FsCommand { return Math.max(n, (value != null) ? String.valueOf(value).length() : 0); } - // TODO: remove when the error is commonized... - @Override - protected String getFnfText(Path path) { - return "Cannot access " + path.toUri() + ": No such file or directory."; - } - @Override protected int exitCodeForError() { return -1; } diff --git a/src/java/org/apache/hadoop/fs/shell/Mkdir.java b/src/java/org/apache/hadoop/fs/shell/Mkdir.java index 0edb40bb40e..effbbfebb03 100644 --- a/src/java/org/apache/hadoop/fs/shell/Mkdir.java +++ b/src/java/org/apache/hadoop/fs/shell/Mkdir.java @@ -23,6 +23,9 @@ import java.util.LinkedList; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException; +import org.apache.hadoop.fs.shell.PathExceptions.PathIOException; +import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException; /** * Create the given dir @@ -49,16 +52,16 @@ class Mkdir extends FsCommand { @Override protected void processPath(PathData item) throws IOException { if (item.stat.isDirectory()) { - throw new IOException("cannot create directory " + item + ": File exists"); + throw new PathExistsException(item.toString()); } else { - throw new IOException(item + " exists but is not a directory"); + throw new PathIsNotDirectoryException(item.toString()); } } @Override protected void processNonexistentPath(PathData item) throws IOException { if (!item.fs.mkdirs(item.path)) { - throw new IOException("failed to create " + item); + throw new PathIOException(item.toString()); } } } diff --git a/src/java/org/apache/hadoop/fs/shell/PathData.java b/src/java/org/apache/hadoop/fs/shell/PathData.java index 6d88fffad8b..f0ac04ea532 100644 --- a/src/java/org/apache/hadoop/fs/shell/PathData.java +++ b/src/java/org/apache/hadoop/fs/shell/PathData.java @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException; /** * Encapsulates a Path (path), its FileStatus (stat), and its FileSystem (fs). @@ -140,7 +141,7 @@ public class PathData { */ public PathData[] getDirectoryContents() throws IOException { if (!stat.isDirectory()) { - throw new IOException(path + ": Not a directory"); + throw new PathIsNotDirectoryException(string); } FileStatus[] stats = fs.listStatus(path); diff --git a/src/java/org/apache/hadoop/fs/shell/PathExceptions.java b/src/java/org/apache/hadoop/fs/shell/PathExceptions.java new file mode 100644 index 00000000000..c9d540ffcf9 --- /dev/null +++ b/src/java/org/apache/hadoop/fs/shell/PathExceptions.java @@ -0,0 +1,147 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.shell; + +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.fs.Path; + +/** + * Standardized posix/linux style exceptions for path related errors. + * Returns an IOException with the format "path: standard error string". + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable + +public class PathExceptions { + + /** EIO */ + public static class PathIOException extends IOException { + static final long serialVersionUID = 0L; + private static final String EIO = "Input/output error"; + // NOTE: this really should be a Path, but a Path is buggy and won't + // return the exact string used to construct the path, and it mangles + // uris with no authority + private String path; + + /** + * Constructor a generic I/O error exception + * @param path for the exception + */ + public PathIOException(String path) { + this(path, EIO, null); + } + + /** + * Appends the text of a Throwable to the default error message + * @param path for the exception + * @param cause a throwable to extract the error message + */ + public PathIOException(String path, Throwable cause) { + this(path, EIO, cause); + } + + /** + * Avoid using this method. Use a subclass of PathIOException if + * possible. + * @param path for the exception + * @param error custom string to use an the error text + */ + public PathIOException(String path, String error) { + this(path, error, null); + } + + protected PathIOException(String path, String error, Throwable cause) { + super(error, cause); + this.path = path; + } + + @Override + public String getMessage() { + String message = "`" + path + "': " + super.getMessage(); + if (getCause() != null) { + message += ": " + getCause().getMessage(); + } + return message; + } + + /** @return Path that generated the exception */ + public Path getPath() { return new Path(path); } + } + + /** ENOENT */ + public static class PathNotFoundException extends PathIOException { + static final long serialVersionUID = 0L; + /** @param path for the exception */ + public PathNotFoundException(String path) { + super(path, "No such file or directory"); + } + } + + /** EEXISTS */ + public static class PathExistsException extends PathIOException { + static final long serialVersionUID = 0L; + /** @param path for the exception */ + public PathExistsException(String path) { + super(path, "File exists"); + } + + protected PathExistsException(String path, String error) { + super(path, error); + } + } + + /** EISDIR */ + public static class PathIsDirectoryException extends PathExistsException { + static final long serialVersionUID = 0L; + /** @param path for the exception */ + public PathIsDirectoryException(String path) { + super(path, "Is a directory"); + } + } + + /** ENOTDIR */ + public static class PathIsNotDirectoryException extends PathExistsException { + static final long serialVersionUID = 0L; + /** @param path for the exception */ + public PathIsNotDirectoryException(String path) { + super(path, "Is not a directory"); + } + } + + /** EACCES */ + public static class PathAccessDeniedException extends PathIOException { + static final long serialVersionUID = 0L; + /** @param path for the exception */ + public PathAccessDeniedException(String path) { + super(path, "Permission denied"); + } + } + + /** EPERM */ + public static class PathPermissionException extends PathIOException { + static final long serialVersionUID = 0L; + /** @param path for the exception */ + public PathPermissionException(String path) { + super(path, "Operation not permitted"); + } + } +} diff --git a/src/java/org/apache/hadoop/fs/shell/SetReplication.java b/src/java/org/apache/hadoop/fs/shell/SetReplication.java index 495a669008b..a7097170cae 100644 --- a/src/java/org/apache/hadoop/fs/shell/SetReplication.java +++ b/src/java/org/apache/hadoop/fs/shell/SetReplication.java @@ -25,7 +25,7 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.BlockLocation; -import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.shell.PathExceptions.PathIOException; /** * Modifies the replication factor @@ -76,7 +76,9 @@ public class SetReplication extends FsCommand { @Override protected void processPath(PathData item) throws IOException { - if (item.stat.isSymlink()) throw new IOException("Symlinks unsupported"); + if (item.stat.isSymlink()) { + throw new PathIOException(item.toString(), "Symlinks unsupported"); + } if (item.stat.isFile()) { if (!item.fs.setReplication(item.path, newRep)) { @@ -124,9 +126,4 @@ public class SetReplication extends FsCommand { out.println(" done"); } } - - @Override - protected String getFnfText(Path path) { - return "File does not exist: " + path; - } } \ No newline at end of file diff --git a/src/java/org/apache/hadoop/fs/shell/Tail.java b/src/java/org/apache/hadoop/fs/shell/Tail.java index d59b7b14796..fda7b99c522 100644 --- a/src/java/org/apache/hadoop/fs/shell/Tail.java +++ b/src/java/org/apache/hadoop/fs/shell/Tail.java @@ -25,7 +25,7 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException; import org.apache.hadoop.io.IOUtils; /** @@ -67,7 +67,7 @@ class Tail extends FsCommand { @Override protected void processPath(PathData item) throws IOException { if (item.stat.isDirectory()) { - throw new IOException("Source must be a file."); + throw new PathIsDirectoryException(item.toString()); } long offset = dumpFromOffset(item, startingOffset); @@ -100,9 +100,4 @@ class Tail extends FsCommand { } return offset; } - - @Override - protected String getFnfText(Path path) { - return "File does not exist: " + path; - } } diff --git a/src/test/core/org/apache/hadoop/fs/TestFsShellReturnCode.java b/src/test/core/org/apache/hadoop/fs/TestFsShellReturnCode.java index 3c2b1df3802..fc115969122 100644 --- a/src/test/core/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/src/test/core/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -288,7 +288,7 @@ public class TestFsShellReturnCode { results = bytes.toString(); assertTrue("Return code should be -1", run == -1); assertTrue(" Null is coming when source path is invalid. ",!results.contains("get: null")); - assertTrue(" Not displaying the intended message ",results.contains("get: "+args[1]+": No such file or directory")); + assertTrue(" Not displaying the intended message ",results.contains("get: `"+args[1]+"': No such file or directory")); } finally { IOUtils.closeStream(out); System.setErr(oldErr); diff --git a/src/test/core/org/apache/hadoop/fs/shell/TestPathExceptions.java b/src/test/core/org/apache/hadoop/fs/shell/TestPathExceptions.java new file mode 100644 index 00000000000..f3b7e88a41e --- /dev/null +++ b/src/test/core/org/apache/hadoop/fs/shell/TestPathExceptions.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.shell; + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.shell.PathExceptions.PathIOException; +import org.junit.Test; + +public class TestPathExceptions { + + protected String path = "some/file"; + protected String error = "KABOOM"; + + @Test + public void testWithDefaultString() throws Exception { + PathIOException pe = new PathIOException(path); + assertEquals(new Path(path), pe.getPath()); + assertEquals("`" + path + "': Input/output error", pe.getMessage()); + } + + @Test + public void testWithThrowable() throws Exception { + IOException ioe = new IOException("KABOOM"); + PathIOException pe = new PathIOException(path, ioe); + assertEquals(new Path(path), pe.getPath()); + assertEquals("`" + path + "': Input/output error: " + error, pe.getMessage()); + } + + @Test + public void testWithCustomString() throws Exception { + PathIOException pe = new PathIOException(path, error); + assertEquals(new Path(path), pe.getPath()); + assertEquals("`" + path + "': " + error, pe.getMessage()); + } + +}