diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt index c9b84d9aa66..afd3afe92cb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt @@ -51,6 +51,9 @@ HDFS-4949 (Unreleased) HDFS-5119. Persist CacheManager state in the edit log. (Contributed by Andrew Wang) + HDFS-5190. Move cache pool related CLI commands to CacheAdmin. + (Contributed by Andrew Wang) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java index 8ea0939f9de..c37d86d7c51 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java @@ -2294,11 +2294,11 @@ public class DFSClient implements java.io.Closeable { } } - public void removePathBasedCacheDescriptor(PathBasedCacheDescriptor descriptor) + public void removePathBasedCacheDescriptor(long id) throws IOException { checkOpen(); try { - namenode.removePathBasedCacheDescriptor(descriptor.getEntryId()); + namenode.removePathBasedCacheDescriptor(id); } catch (RemoteException re) { throw re.unwrapRemoteException(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java index 11d5fb05bed..2ece7640a77 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java @@ -1602,7 +1602,7 @@ public class DistributedFileSystem extends FileSystem { */ public void removePathBasedCacheDescriptor(PathBasedCacheDescriptor descriptor) throws IOException { - dfs.removePathBasedCacheDescriptor(descriptor); + dfs.removePathBasedCacheDescriptor(descriptor.getEntryId()); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java index 9e1000934c3..bb5e07848b0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java @@ -469,7 +469,7 @@ public final class CacheManager { while (iter.hasNext()) { Entry entry = iter.next(); if (entry.getValue().getPool() == pool) { - entriesById.remove(entry.getValue().getEntryId()); + entriesByPath.remove(entry.getValue().getPath()); iter.remove(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java index 0ba9023d923..f0a71c595b2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java @@ -21,24 +21,76 @@ import java.io.IOException; import java.util.LinkedList; import java.util.List; +import org.apache.commons.lang.WordUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.RemoteIterator; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DistributedFileSystem; +import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException; +import org.apache.hadoop.hdfs.protocol.CachePoolInfo; import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor; import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective; +import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException; +import org.apache.hadoop.hdfs.server.namenode.CachePool; import org.apache.hadoop.hdfs.tools.TableListing.Justification; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.util.Tool; + +import com.google.common.base.Joiner; /** * This class implements command-line operations on the HDFS Cache. */ @InterfaceAudience.Private -public class CacheAdmin { - private static Configuration conf = new Configuration(); +public class CacheAdmin extends Configured implements Tool { - private static DistributedFileSystem getDFS() throws IOException { + /** + * Maximum length for printed lines + */ + private static final int MAX_LINE_WIDTH = 80; + + public CacheAdmin() { + this(null); + } + + public CacheAdmin(Configuration conf) { + super(conf); + } + + @Override + public int run(String[] args) throws IOException { + if (args.length == 0) { + printUsage(false); + return 1; + } + Command command = determineCommand(args[0]); + if (command == null) { + System.err.println("Can't understand command '" + args[0] + "'"); + if (!args[0].startsWith("-")) { + System.err.println("Command names must start with dashes."); + } + printUsage(false); + return 1; + } + List argsList = new LinkedList(); + for (int j = 1; j < args.length; j++) { + argsList.add(args[j]); + } + return command.run(getConf(), argsList); + } + + public static void main(String[] argsArray) throws IOException { + CacheAdmin cacheAdmin = new CacheAdmin(new Configuration()); + System.exit(cacheAdmin.run(argsArray)); + } + + private static DistributedFileSystem getDFS(Configuration conf) + throws IOException { FileSystem fs = FileSystem.get(conf); if (!(fs instanceof DistributedFileSystem)) { throw new IllegalArgumentException("FileSystem " + fs.getUri() + @@ -47,37 +99,55 @@ public class CacheAdmin { return (DistributedFileSystem)fs; } + /** + * NN exceptions contain the stack trace as part of the exception message. + * When it's a known error, pretty-print the error and squish the stack trace. + */ + private static String prettifyException(Exception e) { + return e.getClass().getSimpleName() + ": " + + e.getLocalizedMessage().split("\n")[0]; + } + + private static TableListing getOptionDescriptionListing() { + TableListing listing = new TableListing.Builder() + .addField("").addField("", true) + .wrapWidth(MAX_LINE_WIDTH).hideHeaders().build(); + return listing; + } + interface Command { String getName(); String getShortUsage(); String getLongUsage(); - int run(List args) throws IOException; + int run(Configuration conf, List args) throws IOException; } private static class AddPathBasedCacheDirectiveCommand implements Command { @Override public String getName() { - return "-addPath"; + return "-addDirective"; } @Override public String getShortUsage() { - return "[-addPath -path -pool ]\n"; + return "[" + getName() + " -path -pool ]\n"; } @Override public String getLongUsage() { - return getShortUsage() + - "Adds a new PathBasedCache directive.\n" + - " The new path to cache.\n" + - " Paths may be either directories or files.\n" + - " The pool which this directive will reside in.\n" + - " You must have write permission on the cache pool in order\n" + - " to add new entries to it.\n"; + TableListing listing = getOptionDescriptionListing(); + listing.addRow("", "A path to cache. The path can be " + + "a directory or a file."); + listing.addRow("", "The pool to which the directive will be " + + "added. You must have write permission on the cache pool " + + "in order to add new directives."); + return getShortUsage() + "\n" + + "Add a new PathBasedCache directive.\n\n" + + listing.toString(); } @Override - public int run(List args) throws IOException { + public int run(Configuration conf, List args) throws IOException { String path = StringUtils.popOptionWithArgument("-path", args); if (path == null) { System.err.println("You must specify a path with -path."); @@ -93,14 +163,20 @@ public class CacheAdmin { return 1; } - DistributedFileSystem dfs = getDFS(); + DistributedFileSystem dfs = getDFS(conf); PathBasedCacheDirective directive = new PathBasedCacheDirective(path, poolName); - PathBasedCacheDescriptor descriptor = - dfs.addPathBasedCacheDirective(directive); - System.out.println("Added PathBasedCache entry " - + descriptor.getEntryId()); + try { + PathBasedCacheDescriptor descriptor = + dfs.addPathBasedCacheDirective(directive); + System.out.println("Added PathBasedCache entry " + + descriptor.getEntryId()); + } catch (AddPathBasedCacheDirectiveException e) { + System.err.println(prettifyException(e)); + return 2; + } + return 0; } } @@ -108,32 +184,41 @@ public class CacheAdmin { private static class RemovePathBasedCacheDirectiveCommand implements Command { @Override public String getName() { - return "-removePath"; + return "-removeDirective"; } @Override public String getShortUsage() { - return "[-removePath ]\n"; + return "[" + getName() + " ]\n"; } @Override public String getLongUsage() { - return getShortUsage() + - "Remove a cache directive.\n" + - " The id of the cache directive to remove.\n" + - " You must have write permission on the pool where the\n" + - " directive resides in order to remove it. To see a list\n" + - " of PathBasedCache directive IDs, use the -list command.\n"; + TableListing listing = getOptionDescriptionListing(); + listing.addRow("", "The id of the cache directive to remove. " + + "You must have write permission on the pool of the " + + "directive in order to remove it. To see a list " + + "of PathBasedCache directive IDs, use the -list command."); + return getShortUsage() + "\n" + + "Remove a cache directive.\n\n" + + listing.toString(); } @Override - public int run(List args) throws IOException { + public int run(Configuration conf, List args) throws IOException { String idString= StringUtils.popFirstNonOption(args); if (idString == null) { System.err.println("You must specify a directive ID to remove."); return 1; } - long id = Long.valueOf(idString); + long id; + try { + id = Long.valueOf(idString); + } catch (NumberFormatException e) { + System.err.println("Invalid directive ID " + idString + ": expected " + + "a numeric value."); + return 1; + } if (id <= 0) { System.err.println("Invalid directive ID " + id + ": ids must " + "be greater than 0."); @@ -141,12 +226,17 @@ public class CacheAdmin { } if (!args.isEmpty()) { System.err.println("Can't understand argument: " + args.get(0)); + System.err.println("Usage is " + getShortUsage()); return 1; } - DistributedFileSystem dfs = getDFS(); - dfs.removePathBasedCacheDescriptor(new PathBasedCacheDescriptor(id, null, - null)); - System.out.println("Removed PathBasedCache directive " + id); + DistributedFileSystem dfs = getDFS(conf); + try { + dfs.getClient().removePathBasedCacheDescriptor(id); + System.out.println("Removed PathBasedCache directive " + id); + } catch (RemovePathBasedCacheDescriptorException e) { + System.err.println(prettifyException(e)); + return 2; + } return 0; } } @@ -154,31 +244,30 @@ public class CacheAdmin { private static class ListPathBasedCacheDirectiveCommand implements Command { @Override public String getName() { - return "-listPaths"; + return "-listDirectives"; } @Override public String getShortUsage() { - return "[-listPaths [-path ] [-pool ]]\n"; + return "[" + getName() + " [-path ] [-pool ]]\n"; } @Override public String getLongUsage() { - return getShortUsage() + - "List PathBasedCache directives.\n" + - " If a -path argument is given, we will list only\n" + - " PathBasedCache entries with this path.\n" + - " Note that if there is a PathBasedCache directive for \n" + - " in a cache pool that we don't have read access for, it\n" + - " not be listed. If there are unreadable cache pools, a\n" + - " message will be printed.\n" + - " may be incomplete.\n" + - " If a -pool argument is given, we will list only path\n" + - " cache entries in that pool.\n"; + TableListing listing = getOptionDescriptionListing(); + listing.addRow("", "List only " + + "PathBasedCache directives with this path. " + + "Note that if there is a PathBasedCache directive for " + + "in a cache pool that we don't have read access for, it " + + "will not be listed."); + listing.addRow("", "List only path cache directives in that pool."); + return getShortUsage() + "\n" + + "List PathBasedCache directives.\n\n" + + listing.toString(); } @Override - public int run(List args) throws IOException { + public int run(Configuration conf, List args) throws IOException { String pathFilter = StringUtils.popOptionWithArgument("-path", args); String poolFilter = StringUtils.popOptionWithArgument("-pool", args); if (!args.isEmpty()) { @@ -186,11 +275,11 @@ public class CacheAdmin { return 1; } TableListing tableListing = new TableListing.Builder(). - addField("ID", Justification.RIGHT). + addField("ID", Justification.LEFT). addField("POOL", Justification.LEFT). addField("PATH", Justification.LEFT). build(); - DistributedFileSystem dfs = getDFS(); + DistributedFileSystem dfs = getDFS(conf); RemoteIterator iter = dfs.listPathBasedCacheDescriptors(poolFilter, pathFilter); int numEntries = 0; @@ -205,12 +294,325 @@ public class CacheAdmin { System.out.print(String.format("Found %d entr%s\n", numEntries, numEntries == 1 ? "y" : "ies")); if (numEntries > 0) { - System.out.print(tableListing.build()); + System.out.print(tableListing); } return 0; } } + private static class AddCachePoolCommand implements Command { + + private static final String NAME = "-addPool"; + + @Override + public String getName() { + return NAME; + } + + @Override + public String getShortUsage() { + return "[" + NAME + " [-owner ] " + + "[-group ] [-mode ] [-weight ]]\n"; + } + + @Override + public String getLongUsage() { + TableListing listing = getOptionDescriptionListing(); + + listing.addRow("", "Name of the new pool."); + listing.addRow("", "Username of the owner of the pool. " + + "Defaults to the current user."); + listing.addRow("", "Group of the pool. " + + "Defaults to the primary group name of the current user."); + listing.addRow("", "UNIX-style permissions for the pool. " + + "Permissions are specified in octal, e.g. 0755. " + + "By default, this is set to " + String.format("0%03o", + FsPermission.getCachePoolDefault().toShort())); + listing.addRow("", "Weight of the pool. " + + "This is a relative measure of the importance of the pool used " + + "during cache resource management. By default, it is set to " + + CachePool.DEFAULT_WEIGHT); + + return getShortUsage() + "\n" + + "Add a new cache pool.\n\n" + + listing.toString(); + } + + @Override + public int run(Configuration conf, List args) throws IOException { + String owner = StringUtils.popOptionWithArgument("-owner", args); + if (owner == null) { + owner = UserGroupInformation.getCurrentUser().getShortUserName(); + } + String group = StringUtils.popOptionWithArgument("-group", args); + if (group == null) { + group = UserGroupInformation.getCurrentUser().getGroupNames()[0]; + } + String modeString = StringUtils.popOptionWithArgument("-mode", args); + int mode; + if (modeString == null) { + mode = FsPermission.getCachePoolDefault().toShort(); + } else { + mode = Integer.parseInt(modeString, 8); + } + String weightString = StringUtils.popOptionWithArgument("-weight", args); + int weight; + if (weightString == null) { + weight = CachePool.DEFAULT_WEIGHT; + } else { + weight = Integer.parseInt(weightString); + } + String name = StringUtils.popFirstNonOption(args); + if (name == null) { + System.err.println("You must specify a name when creating a " + + "cache pool."); + return 1; + } + if (!args.isEmpty()) { + System.err.print("Can't understand arguments: " + + Joiner.on(" ").join(args) + "\n"); + System.err.println("Usage is " + getShortUsage()); + return 1; + } + DistributedFileSystem dfs = getDFS(conf); + CachePoolInfo info = new CachePoolInfo(name). + setOwnerName(owner). + setGroupName(group). + setMode(new FsPermission((short)mode)). + setWeight(weight); + try { + dfs.addCachePool(info); + } catch (IOException e) { + throw new RemoteException(e.getClass().getName(), e.getMessage()); + } + System.out.println("Successfully added cache pool " + name + "."); + return 0; + } + } + + private static class ModifyCachePoolCommand implements Command { + + @Override + public String getName() { + return "-modifyPool"; + } + + @Override + public String getShortUsage() { + return "[" + getName() + " [-owner ] " + + "[-group ] [-mode ] [-weight ]]\n"; + } + + @Override + public String getLongUsage() { + TableListing listing = getOptionDescriptionListing(); + + listing.addRow("", "Name of the pool to modify."); + listing.addRow("", "Username of the owner of the pool"); + listing.addRow("", "Groupname of the group of the pool."); + listing.addRow("", "Unix-style permissions of the pool in octal."); + listing.addRow("", "Weight of the pool."); + + return getShortUsage() + "\n" + + WordUtils.wrap("Modifies the metadata of an existing cache pool. " + + "See usage of " + AddCachePoolCommand.NAME + " for more details", + MAX_LINE_WIDTH) + "\n\n" + + listing.toString(); + } + + @Override + public int run(Configuration conf, List args) throws IOException { + String owner = StringUtils.popOptionWithArgument("-owner", args); + String group = StringUtils.popOptionWithArgument("-group", args); + String modeString = StringUtils.popOptionWithArgument("-mode", args); + Integer mode = (modeString == null) ? + null : Integer.parseInt(modeString, 8); + String weightString = StringUtils.popOptionWithArgument("-weight", args); + Integer weight = (weightString == null) ? + null : Integer.parseInt(weightString); + String name = StringUtils.popFirstNonOption(args); + if (name == null) { + System.err.println("You must specify a name when creating a " + + "cache pool."); + return 1; + } + if (!args.isEmpty()) { + System.err.print("Can't understand arguments: " + + Joiner.on(" ").join(args) + "\n"); + System.err.println("Usage is " + getShortUsage()); + return 1; + } + boolean changed = false; + CachePoolInfo info = new CachePoolInfo(name); + if (owner != null) { + info.setOwnerName(owner); + changed = true; + } + if (group != null) { + info.setGroupName(group); + changed = true; + } + if (mode != null) { + info.setMode(new FsPermission(mode.shortValue())); + changed = true; + } + if (weight != null) { + info.setWeight(weight); + changed = true; + } + if (!changed) { + System.err.println("You must specify at least one attribute to " + + "change in the cache pool."); + return 1; + } + DistributedFileSystem dfs = getDFS(conf); + try { + dfs.modifyCachePool(info); + } catch (IOException e) { + throw new RemoteException(e.getClass().getName(), e.getMessage()); + } + System.out.print("Successfully modified cache pool " + name); + String prefix = " to have "; + if (owner != null) { + System.out.print(prefix + "owner name " + owner); + prefix = " and "; + } + if (group != null) { + System.out.print(prefix + "group name " + group); + prefix = " and "; + } + if (mode != null) { + System.out.print(prefix + "mode " + new FsPermission(mode.shortValue())); + prefix = " and "; + } + if (weight != null) { + System.out.print(prefix + "weight " + weight); + prefix = " and "; + } + System.out.print("\n"); + return 0; + } + } + + private static class RemoveCachePoolCommand implements Command { + + @Override + public String getName() { + return "-removePool"; + } + + @Override + public String getShortUsage() { + return "[" + getName() + " ]\n"; + } + + @Override + public String getLongUsage() { + return getShortUsage() + "\n" + + WordUtils.wrap("Remove a cache pool. This also uncaches paths " + + "associated with the pool.\n\n", MAX_LINE_WIDTH) + + " Name of the cache pool to remove.\n"; + } + + @Override + public int run(Configuration conf, List args) throws IOException { + String name = StringUtils.popFirstNonOption(args); + if (name == null) { + System.err.println("You must specify a name when deleting a " + + "cache pool."); + return 1; + } + if (!args.isEmpty()) { + System.err.print("Can't understand arguments: " + + Joiner.on(" ").join(args) + "\n"); + System.err.println("Usage is " + getShortUsage()); + return 1; + } + DistributedFileSystem dfs = getDFS(conf); + try { + dfs.removeCachePool(name); + } catch (IOException e) { + throw new RemoteException(e.getClass().getName(), e.getMessage()); + } + System.out.println("Successfully removed cache pool " + name + "."); + return 0; + } + } + + private static class ListCachePoolsCommand implements Command { + + @Override + public String getName() { + return "-listPools"; + } + + @Override + public String getShortUsage() { + return "[" + getName() + " [name]]\n"; + } + + @Override + public String getLongUsage() { + TableListing listing = getOptionDescriptionListing(); + listing.addRow("[name]", "If specified, list only the named cache pool."); + + return getShortUsage() + "\n" + + WordUtils.wrap("Display information about one or more cache pools, " + + "e.g. name, owner, group, permissions, etc.", MAX_LINE_WIDTH) + + "\n\n" + + listing.toString(); + } + + @Override + public int run(Configuration conf, List args) throws IOException { + String name = StringUtils.popFirstNonOption(args); + if (!args.isEmpty()) { + System.err.print("Can't understand arguments: " + + Joiner.on(" ").join(args) + "\n"); + System.err.println("Usage is " + getShortUsage()); + return 1; + } + DistributedFileSystem dfs = getDFS(conf); + TableListing listing = new TableListing.Builder(). + addField("NAME", Justification.LEFT). + addField("OWNER", Justification.LEFT). + addField("GROUP", Justification.LEFT). + addField("MODE", Justification.LEFT). + addField("WEIGHT", Justification.LEFT). + build(); + int numResults = 0; + try { + RemoteIterator iter = dfs.listCachePools(); + while (iter.hasNext()) { + CachePoolInfo info = iter.next(); + if (name == null || info.getPoolName().equals(name)) { + listing.addRow(new String[] { + info.getPoolName(), + info.getOwnerName(), + info.getGroupName(), + info.getMode().toString(), + info.getWeight().toString(), + }); + ++numResults; + if (name != null) { + break; + } + } + } + } catch (IOException e) { + throw new RemoteException(e.getClass().getName(), e.getMessage()); + } + System.out.print(String.format("Found %d result%s.\n", numResults, + (numResults == 1 ? "" : "s"))); + if (numResults > 0) { + System.out.print(listing); + } + // If there are no results, we return 1 (failure exit code); + // otherwise we return 0 (success exit code). + return (numResults == 0) ? 1 : 0; + } + } + private static class HelpCommand implements Command { @Override public String getName() { @@ -224,15 +626,17 @@ public class CacheAdmin { @Override public String getLongUsage() { - return getShortUsage() + - "Get detailed help about a command.\n" + - " The command to get detailed help for. If no " + - " command-name is specified, we will print detailed help " + - " about all commands"; + TableListing listing = getOptionDescriptionListing(); + listing.addRow("", "The command for which to get " + + "detailed help. If no command is specified, print detailed help for " + + "all commands"); + return getShortUsage() + "\n" + + "Get detailed help about a command.\n\n" + + listing.toString(); } @Override - public int run(List args) throws IOException { + public int run(Configuration conf, List args) throws IOException { if (args.size() == 0) { for (Command command : COMMANDS) { System.err.println(command.getLongUsage()); @@ -255,6 +659,7 @@ public class CacheAdmin { System.err.print(separator + c.getName()); separator = ", "; } + System.err.print("\n"); return 1; } System.err.print(command.getLongUsage()); @@ -266,6 +671,10 @@ public class CacheAdmin { new AddPathBasedCacheDirectiveCommand(), new RemovePathBasedCacheDirectiveCommand(), new ListPathBasedCacheDirectiveCommand(), + new AddCachePoolCommand(), + new ModifyCachePoolCommand(), + new RemoveCachePoolCommand(), + new ListCachePoolsCommand(), new HelpCommand(), }; @@ -290,25 +699,4 @@ public class CacheAdmin { } return null; } - - public static void main(String[] argsArray) throws IOException { - if (argsArray.length == 0) { - printUsage(false); - System.exit(1); - } - Command command = determineCommand(argsArray[0]); - if (command == null) { - System.err.println("Can't understand command '" + argsArray[0] + "'"); - if (!argsArray[0].startsWith("-")) { - System.err.println("Command names must start with dashes."); - } - printUsage(false); - System.exit(1); - } - List args = new LinkedList(); - for (int j = 1; j < argsArray.length; j++) { - args.add(argsArray[j]); - } - System.exit(command.run(args)); - } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java index e4b3b8cb056..98691df6a57 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java @@ -24,7 +24,6 @@ import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.LinkedList; import java.util.List; import java.util.TreeSet; @@ -37,8 +36,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.FsStatus; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.RemoteIterator; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.shell.Command; import org.apache.hadoop.fs.shell.CommandFormat; import org.apache.hadoop.hdfs.DFSConfigKeys; @@ -47,17 +44,14 @@ import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.NameNodeProxies; -import org.apache.hadoop.hdfs.protocol.CachePoolInfo; import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.SnapshotException; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; -import org.apache.hadoop.hdfs.server.namenode.CachePool; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.TransferFsImage; -import org.apache.hadoop.hdfs.tools.TableListing.Justification; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.NetUtils; @@ -68,8 +62,6 @@ import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; -import com.google.common.base.Joiner; - /** * This class provides some DFS administrative access shell commands. */ @@ -463,230 +455,6 @@ public class DFSAdmin extends FsShell { return exitCode; } - final private static String ADD_CACHE_POOL_USAGE = - "-addCachePool [-owner ] " + - "[-group ] [-mode ] [-weight ]"; - - public int addCachePool(String argsArray[], int idx) throws IOException { - List args= new LinkedList(); - for (int i = idx; i < argsArray.length; i++) { - args.add(argsArray[i]); - } - String owner = StringUtils.popOptionWithArgument("-owner", args); - if (owner == null) { - owner = UserGroupInformation.getCurrentUser().getShortUserName(); - } - String group = StringUtils.popOptionWithArgument("-group", args); - if (group == null) { - group = UserGroupInformation.getCurrentUser().getGroupNames()[0]; - } - String modeString = StringUtils.popOptionWithArgument("-mode", args); - int mode; - if (modeString == null) { - mode = FsPermission.getCachePoolDefault().toShort(); - } else { - mode = Integer.parseInt(modeString, 8); - } - String weightString = StringUtils.popOptionWithArgument("-weight", args); - int weight; - if (weightString == null) { - weight = CachePool.DEFAULT_WEIGHT; - } else { - weight = Integer.parseInt(weightString); - } - String name = StringUtils.popFirstNonOption(args); - if (name == null) { - System.err.println("You must specify a name when creating a " + - "cache pool."); - return 1; - } - if (!args.isEmpty()) { - System.err.print("Can't understand arguments: " + - Joiner.on(" ").join(args) + "\n"); - System.err.println("Usage is " + ADD_CACHE_POOL_USAGE); - return 1; - } - DistributedFileSystem dfs = getDFS(); - CachePoolInfo info = new CachePoolInfo(name). - setOwnerName(owner). - setGroupName(group). - setMode(new FsPermission((short)mode)). - setWeight(weight); - try { - dfs.addCachePool(info); - } catch (IOException e) { - throw new RemoteException(e.getClass().getName(), e.getMessage()); - } - System.out.println("Successfully added cache pool " + name + "."); - return 0; - } - - final private static String MODIFY_CACHE_POOL_USAGE = - "-modifyCachePool [-owner ] " + - "[-group ] [-mode ] [-weight ]"; - - public int modifyCachePool(String argsArray[], int idx) throws IOException { - List args = new LinkedList(); - for (int i = idx; i < argsArray.length; i++) { - args.add(argsArray[i]); - } - String owner = StringUtils.popOptionWithArgument("-owner", args); - String group = StringUtils.popOptionWithArgument("-group", args); - String modeString = StringUtils.popOptionWithArgument("-mode", args); - Integer mode = (modeString == null) ? - null : Integer.parseInt(modeString, 8); - String weightString = StringUtils.popOptionWithArgument("-weight", args); - Integer weight = (weightString == null) ? - null : Integer.parseInt(weightString); - String name = StringUtils.popFirstNonOption(args); - if (name == null) { - System.err.println("You must specify a name when creating a " + - "cache pool."); - return 1; - } - if (!args.isEmpty()) { - System.err.print("Can't understand arguments: " + - Joiner.on(" ").join(args) + "\n"); - System.err.println("usage is " + MODIFY_CACHE_POOL_USAGE); - return 1; - } - boolean changed = false; - CachePoolInfo info = new CachePoolInfo(name); - if (owner != null) { - info.setOwnerName(owner); - changed = true; - } - if (group != null) { - info.setGroupName(group); - changed = true; - } - if (mode != null) { - info.setMode(new FsPermission(mode.shortValue())); - changed = true; - } - if (weight != null) { - info.setWeight(weight); - changed = true; - } - if (!changed) { - System.err.println("You must specify at least one attribute to " + - "change in the cache pool."); - return 1; - } - DistributedFileSystem dfs = getDFS(); - try { - dfs.modifyCachePool(info); - } catch (IOException e) { - throw new RemoteException(e.getClass().getName(), e.getMessage()); - } - System.out.print("Successfully modified cache pool " + name); - String prefix = " to have "; - if (owner != null) { - System.out.print(prefix + "owner name " + owner); - prefix = "and "; - } - if (group != null) { - System.out.print(prefix + "group name " + group); - prefix = "and "; - } - if (mode != null) { - System.out.print(prefix + "mode " + new FsPermission(mode.shortValue())); - prefix = "and "; - } - if (weight != null) { - System.out.print(prefix + "weight " + weight); - prefix = "and "; - } - System.out.print("\n"); - return 0; - } - - final private static String REMOVE_CACHE_POOL_USAGE = - "-removeCachePool "; - - public int removeCachePool(String argsArray[], int idx) throws IOException { - List args = new LinkedList(); - for (int i = idx; i < argsArray.length; i++) { - args.add(argsArray[i]); - } - String name = StringUtils.popFirstNonOption(args); - if (name == null) { - System.err.println("You must specify a name when deleting a " + - "cache pool."); - return 1; - } - if (!args.isEmpty()) { - System.err.print("Can't understand arguments: " + - Joiner.on(" ").join(args) + "\n"); - System.err.println("Usage is " + REMOVE_CACHE_POOL_USAGE); - return 1; - } - DistributedFileSystem dfs = getDFS(); - try { - dfs.removeCachePool(name); - } catch (IOException e) { - dfs.removeCachePool(name); - throw new RemoteException(e.getClass().getName(), e.getMessage()); - } - System.out.println("Successfully removed cache pool " + name + "."); - return 0; - } - - final private static String LIST_CACHE_POOLS_USAGE = - "-listCachePools] [-verbose] [name]"; - - public int listCachePools(String argsArray[], int idx) throws IOException { - List args = new LinkedList(); - for (int i = idx; i < argsArray.length; i++) { - args.add(argsArray[i]); - } - String name = StringUtils.popFirstNonOption(args); - if (!args.isEmpty()) { - System.err.print("Can't understand arguments: " + - Joiner.on(" ").join(args) + "\n"); - System.err.println("usage is " + LIST_CACHE_POOLS_USAGE); - return 1; - } - DistributedFileSystem dfs = getDFS(); - TableListing listing = new TableListing.Builder(). - addField("NAME", Justification.LEFT). - addField("OWNER", Justification.LEFT). - addField("GROUP", Justification.LEFT). - addField("MODE", Justification.LEFT). - addField("WEIGHT", Justification.RIGHT). - build(); - int numResults = 0; - try { - RemoteIterator iter = dfs.listCachePools(); - while (iter.hasNext()) { - CachePoolInfo info = iter.next(); - if (name == null || info.getPoolName().equals(name)) { - listing.addRow(new String[] { - info.getPoolName(), - info.getOwnerName(), - info.getGroupName(), - info.getMode().toString(), - info.getWeight().toString(), - }); - ++numResults; - if (name != null) { - break; - } - } - } - } catch (IOException e) { - throw new RemoteException(e.getClass().getName(), e.getMessage()); - } - System.out.print(String.format("Found %d result%s.\n", numResults, - (numResults == 1 ? "" : "s"))); - if (numResults > 0) { - System.out.print(listing.build()); - } - // If there are no results, we return 1 (failure exit code); - // otherwise we return 0 (success exit code). - return (numResults == 0) ? 1 : 0; - } - public int rollEdits() throws IOException { DistributedFileSystem dfs = getDFS(); long txid = dfs.rollEdits(); @@ -814,10 +582,6 @@ public class DFSAdmin extends FsShell { "\t[-fetchImage ]\n" + "\t[-allowSnapshot ]\n" + "\t[-disallowSnapshot ]\n" + - "\t[" + ADD_CACHE_POOL_USAGE + "]\n" + - "\t[" + MODIFY_CACHE_POOL_USAGE + "]\n" + - "\t[" + REMOVE_CACHE_POOL_USAGE + "]\n" + - "\t[" + LIST_CACHE_POOLS_USAGE + "]\n" + "\t[-help [cmd]]\n"; String report ="-report: \tReports basic filesystem information and statistics.\n"; @@ -915,42 +679,6 @@ public class DFSAdmin extends FsShell { String disallowSnapshot = "-disallowSnapshot :\n" + "\tDo not allow snapshots to be taken on a directory any more.\n"; - String addCachePool = ADD_CACHE_POOL_USAGE + ": \n" + - "\tAdd a new cache pool.\n" + - "\t is the name of the new pool. It must not already be used.\n" + - "\t is the owner of the pool. It defaults to the current\n" + - "\tuser name.\n" + - "\t is the group of the pool. It defaults to the primary\n" + - "\tgroup name of the current user.\n" + - "\t is the mode of the pool. This is a UNIX-style numeric mode\n" + - "\targument, supplied as an octal number. For example, mode 0755\n" + - "\tgrants the owner all permissions, and grants everyone else\n" + - "\tonly read and list permissions.\n" + - "\tThe mode defaults to " + - String.format("0%03o", - FsPermission.getCachePoolDefault().toShort()) + "\n" + - "\t is the weight of the pool. This determines what share \n" + - "\tof cluster resources the pool will get. It defaults to " + - CachePool.DEFAULT_WEIGHT + "\n"; - - String modifyCachePool = MODIFY_CACHE_POOL_USAGE + ": \n" + - "\tAdd a new cache pool with the given name.\n" + - "\t is the name of the pool to modify.\n" + - "\t is the new owner of the pool.\n" + - "\t is the new group of the pool.\n" + - "\t is the new mode of the pool.\n" + - "\t is the new weight of the pool.\n"; - - String removeCachePool = REMOVE_CACHE_POOL_USAGE + ": \n" + - "\tRemove a cache pool.\n" + - "\t is the name of the pool to remove.\n"; - - String listCachePools = " -listCachePools [-name ] [-verbose]\n" + - "\tList cache pools.\n" + - "\tIf is specified, we will list only the cache pool with\n" + - "\tthat name. If is specified, we will list detailed\n" + - "\tinformation about each pool\n"; - String help = "-help [cmd]: \tDisplays help for the given command or all commands if none\n" + "\t\tis specified.\n"; @@ -998,14 +726,6 @@ public class DFSAdmin extends FsShell { System.out.println(allowSnapshot); } else if ("disallowSnapshot".equalsIgnoreCase(cmd)) { System.out.println(disallowSnapshot); - } else if ("addCachePool".equalsIgnoreCase(cmd)) { - System.out.println(addCachePool); - } else if ("modifyCachePool".equalsIgnoreCase(cmd)) { - System.out.println(modifyCachePool); - } else if ("removeCachePool".equalsIgnoreCase(cmd)) { - System.out.println(removeCachePool); - } else if ("listCachePools".equalsIgnoreCase(cmd)) { - System.out.println(listCachePools); } else if ("help".equals(cmd)) { System.out.println(help); } else { @@ -1032,13 +752,6 @@ public class DFSAdmin extends FsShell { System.out.println(fetchImage); System.out.println(allowSnapshot); System.out.println(disallowSnapshot); - System.out.println(addCachePool); - System.out.println(modifyCachePool); - System.out.println(removeCachePool); - System.out.println(listCachePools); - - System.out.println(disallowSnapshot); - System.out.println(help); System.out.println(); ToolRunner.printGenericCommandUsage(System.out); @@ -1275,18 +988,6 @@ public class DFSAdmin extends FsShell { } else if ("-fetchImage".equals(cmd)) { System.err.println("Usage: java DFSAdmin" + " [-fetchImage ]"); - } else if ("-addCachePool".equals(cmd)) { - System.err.println("Usage: java DFSAdmin" - + " [" + ADD_CACHE_POOL_USAGE + "]"); - } else if ("-modifyCachePool".equals(cmd)) { - System.err.println("Usage: java DFSAdmin" - + " [" + MODIFY_CACHE_POOL_USAGE + "]"); - } else if ("-removeCachePool".equals(cmd)) { - System.err.println("Usage: java DFSAdmin" - + " [" + REMOVE_CACHE_POOL_USAGE + "]"); - } else if ("-listCachePools".equals(cmd)) { - System.err.println("Usage: java DFSAdmin" - + " [" + LIST_CACHE_POOLS_USAGE + "]"); } else { System.err.println("Usage: java DFSAdmin"); System.err.println("Note: Administrative commands can only be run as the HDFS superuser."); @@ -1312,10 +1013,6 @@ public class DFSAdmin extends FsShell { System.err.println(" ["+ClearSpaceQuotaCommand.USAGE+"]"); System.err.println(" [-setBalancerBandwidth ]"); System.err.println(" [-fetchImage ]"); - System.err.println(" [" + ADD_CACHE_POOL_USAGE + "]"); - System.err.println(" [" + MODIFY_CACHE_POOL_USAGE + "]"); - System.err.println(" [" + REMOVE_CACHE_POOL_USAGE + "]"); - System.err.println(" [" + LIST_CACHE_POOLS_USAGE + "]"); System.err.println(" [-help [cmd]]"); System.err.println(); ToolRunner.printGenericCommandUsage(System.err); @@ -1488,14 +1185,6 @@ public class DFSAdmin extends FsShell { exitCode = setBalancerBandwidth(argv, i); } else if ("-fetchImage".equals(cmd)) { exitCode = fetchImage(argv, i); - } else if ("-addCachePool".equals(cmd)) { - exitCode = addCachePool(argv, i); - } else if ("-modifyCachePool".equals(cmd)) { - exitCode = modifyCachePool(argv, i); - } else if ("-removeCachePool".equals(cmd)) { - exitCode = removeCachePool(argv, i); - } else if ("-listCachePools".equals(cmd)) { - exitCode = listCachePools(argv, i); } else if ("-help".equals(cmd)) { if (i < argv.length) { printHelp(argv[i]); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java index aded360a428..857111d551e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/TableListing.java @@ -17,13 +17,23 @@ */ package org.apache.hadoop.hdfs.tools; +import java.util.ArrayList; import java.util.LinkedList; import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.WordUtils; import org.apache.hadoop.classification.InterfaceAudience; /** * This class implements a "table listing" with column headers. + * + * Example: + * + * NAME OWNER GROUP MODE WEIGHT + * pool1 andrew andrew rwxr-xr-x 100 + * pool2 andrew andrew rwxr-xr-x 100 + * pool3 andrew andrew rwxr-xr-x 100 + * */ @InterfaceAudience.Private public class TableListing { @@ -33,39 +43,80 @@ public class TableListing { } private static class Column { - private final LinkedList rows; + private final ArrayList rows; private final Justification justification; - private int maxLength; + private final boolean wrap; - Column(String title, Justification justification) { - this.rows = new LinkedList(); + private int wrapWidth = Integer.MAX_VALUE; + private int maxWidth; + + Column(String title, Justification justification, boolean wrap) { + this.rows = new ArrayList(); this.justification = justification; - this.maxLength = 0; + this.wrap = wrap; + this.maxWidth = 0; addRow(title); } private void addRow(String val) { - if ((val.length() + 1) > maxLength) { - maxLength = val.length() + 1; + if ((val.length() + 1) > maxWidth) { + maxWidth = val.length() + 1; + } + // Ceiling at wrapWidth, because it'll get wrapped + if (maxWidth > wrapWidth) { + maxWidth = wrapWidth; } rows.add(val); } - String getRow(int i) { - String raw = rows.get(i); - int paddingLength = maxLength - raw.length(); - String padding = (paddingLength <= 0) ? "" : - StringUtils.repeat(" ", paddingLength); - if (justification == Justification.LEFT) { - return raw + padding; - } else { - return padding + raw; + private int getMaxWidth() { + return maxWidth; + } + + private void setWrapWidth(int width) { + wrapWidth = width; + // Ceiling the maxLength at wrapWidth + if (maxWidth > wrapWidth) { + maxWidth = wrapWidth; } + // Else we need to traverse through and find the real maxWidth + else { + maxWidth = 0; + for (int i=0; i maxWidth) { + maxWidth = length; + } + } + } + } + + /** + * Return the ith row of the column as a set of wrapped strings, each at + * most wrapWidth in length. + */ + String[] getRow(int idx) { + String raw = rows.get(idx); + // Line-wrap if it's too long + String[] lines = new String[] {raw}; + if (wrap) { + lines = WordUtils.wrap(lines[0], wrapWidth, "\n", true).split("\n"); + } + for (int i=0; i columns = new LinkedList(); + private boolean showHeader = true; + private int wrapWidth = Integer.MAX_VALUE; /** * Create a new Builder. @@ -74,14 +125,63 @@ public class TableListing { } /** - * Add a new field to the Table under construction. - * - * @param title Field title. - * @param leftJustified Whether or not the field is left justified. - * @return this. + * See {@link #addField(String, Justification, boolean) + */ + public Builder addField(String title) { + return addField(title, Justification.LEFT, false); + } + + /** + * See {@link #addField(String, Justification, boolean) */ public Builder addField(String title, Justification justification) { - columns.add(new Column(title, justification)); + return addField(title, justification, false); + } + + /** + * See {@link #addField(String, Justification, boolean) + */ + public Builder addField(String title, boolean wrap) { + return addField(title, Justification.LEFT, wrap); + } + + /** + * Add a new field to the Table under construction. + * + * @param title Field title. + * @param justification Right or left justification. Defaults to left. + * @Param wrapWidth Width at which to auto-wrap the content of the cell. + * Defaults to Integer.MAX_VALUE. + * @return This Builder object + */ + public Builder addField(String title, Justification justification, + boolean wrap) { + columns.add(new Column(title, justification, wrap)); + return this; + } + + /** + * Whether to hide column headers in table output + */ + public Builder hideHeaders() { + this.showHeader = false; + return this; + } + + /** + * Whether to show column headers in table output. This is the default. + */ + public Builder showHeaders() { + this.showHeader = true; + return this; + } + + /** + * Set the maximum width of a row in the TableListing. Must have one or + * more wrappable fields for this to take effect. + */ + public Builder wrapWidth(int width) { + this.wrapWidth = width; return this; } @@ -89,17 +189,22 @@ public class TableListing { * Create a new TableListing. */ public TableListing build() { - return new TableListing(columns.toArray(new Column[0])); + return new TableListing(columns.toArray(new Column[0]), showHeader, + wrapWidth); } } private final Column columns[]; private int numRows; + private boolean showHeader; + private int wrapWidth; - TableListing(Column columns[]) { + TableListing(Column columns[], boolean showHeader, int wrapWidth) { this.columns = columns; this.numRows = 0; + this.showHeader = showHeader; + this.wrapWidth = wrapWidth; } /** @@ -107,7 +212,7 @@ public class TableListing { * * @param row The row of objects to add-- one per column. */ - public void addRow(String row[]) { + public void addRow(String... row) { if (row.length != columns.length) { throw new RuntimeException("trying to add a row with " + row.length + " columns, but we have " + columns.length + " columns."); @@ -118,19 +223,67 @@ public class TableListing { numRows++; } - /** - * Convert the table to a string. - */ - public String build() { + @Override + public String toString() { StringBuilder builder = new StringBuilder(); - for (int i = 0; i < numRows + 1; i++) { - String prefix = ""; - for (int j = 0; j < columns.length; j++) { - builder.append(prefix); - prefix = " "; - builder.append(columns[j].getRow(i)); + // Calculate the widths of each column based on their maxWidths and + // the wrapWidth for the entire table + int width = (columns.length-1)*2; // inter-column padding + for (int i=0; i wrapWidth) { + boolean modified = false; + for (int i=0; i 4) { + column.setWrapWidth(maxWidth-1); + modified = true; + width -= 1; + if (width <= wrapWidth) { + break; + } + } + } + } + if (!modified) { + break; + } + } + + int startrow = 0; + if (!showHeader) { + startrow = 1; + } + String[][] columnLines = new String[columns.length][]; + for (int i = startrow; i < numRows + 1; i++) { + int maxColumnLines = 0; + for (int j = 0; j < columns.length; j++) { + columnLines[j] = columns[j].getRow(i); + if (columnLines[j].length > maxColumnLines) { + maxColumnLines = columnLines[j].length; + } + } + + for (int c = 0; c < maxColumnLines; c++) { + // First column gets no left-padding + String prefix = ""; + for (int j = 0; j < columns.length; j++) { + // Prepend padding + builder.append(prefix); + prefix = " "; + if (columnLines[j].length > c) { + builder.append(columnLines[j][c]); + } else { + builder.append(StringUtils.repeat(" ", columns[j].maxWidth)); + } + } + builder.append("\n"); } - builder.append("\n"); } return builder.toString(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java new file mode 100644 index 00000000000..f25c4fe01ab --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java @@ -0,0 +1,141 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.cli; + +import static org.junit.Assert.assertTrue; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.cli.util.CLICommand; +import org.apache.hadoop.cli.util.CLICommandCacheAdmin; +import org.apache.hadoop.cli.util.CLICommandTypes; +import org.apache.hadoop.cli.util.CLITestCmd; +import org.apache.hadoop.cli.util.CacheAdminCmdExecutor; +import org.apache.hadoop.cli.util.CommandExecutor; +import org.apache.hadoop.cli.util.CommandExecutor.Result; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.DistributedFileSystem; +import org.apache.hadoop.hdfs.HDFSPolicyProvider; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.tools.CacheAdmin; +import org.apache.hadoop.security.authorize.PolicyProvider; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.xml.sax.SAXException; + +public class TestCacheAdminCLI extends CLITestHelper { + + public static final Log LOG = LogFactory.getLog(TestCacheAdminCLI.class); + + protected MiniDFSCluster dfsCluster = null; + protected FileSystem fs = null; + protected String namenode = null; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + conf.setClass(PolicyProvider.POLICY_PROVIDER_CONFIG, + HDFSPolicyProvider.class, PolicyProvider.class); + + // Many of the tests expect a replication value of 1 in the output + conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); + + dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); + + dfsCluster.waitClusterUp(); + namenode = conf.get(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "file:///"); + username = System.getProperty("user.name"); + + fs = dfsCluster.getFileSystem(); + assertTrue("Not a HDFS: "+fs.getUri(), + fs instanceof DistributedFileSystem); + } + + @After + @Override + public void tearDown() throws Exception { + if (fs != null) { + fs.close(); + } + if (dfsCluster != null) { + dfsCluster.shutdown(); + } + Thread.sleep(2000); + super.tearDown(); + } + + @Override + protected String getTestFile() { + return "testCacheAdminConf.xml"; + } + + @Override + protected TestConfigFileParser getConfigParser() { + return new TestConfigFileParserCacheAdmin(); + } + + private class TestConfigFileParserCacheAdmin extends + CLITestHelper.TestConfigFileParser { + @Override + public void endElement(String uri, String localName, String qName) + throws SAXException { + if (qName.equals("cache-admin-command")) { + if (testCommands != null) { + testCommands.add(new CLITestCmdCacheAdmin(charString, + new CLICommandCacheAdmin())); + } else if (cleanupCommands != null) { + cleanupCommands.add(new CLITestCmdCacheAdmin(charString, + new CLICommandCacheAdmin())); + } + } else { + super.endElement(uri, localName, qName); + } + } + } + + private class CLITestCmdCacheAdmin extends CLITestCmd { + + public CLITestCmdCacheAdmin(String str, CLICommandTypes type) { + super(str, type); + } + + @Override + public CommandExecutor getExecutor(String tag) + throws IllegalArgumentException { + if (getType() instanceof CLICommandCacheAdmin) { + return new CacheAdminCmdExecutor(tag, new CacheAdmin(conf)); + } + return super.getExecutor(tag); + } + } + + @Override + protected Result execute(CLICommand cmd) throws Exception { + return cmd.getExecutor("").executeCommand(cmd.getCmd()); + } + + @Test + @Override + public void testAll () { + super.testAll(); + } +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CLICommandCacheAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CLICommandCacheAdmin.java new file mode 100644 index 00000000000..e9bf182c992 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CLICommandCacheAdmin.java @@ -0,0 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.cli.util; + +public class CLICommandCacheAdmin implements CLICommandTypes { +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CacheAdminCmdExecutor.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CacheAdminCmdExecutor.java new file mode 100644 index 00000000000..922020faf84 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/util/CacheAdminCmdExecutor.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.cli.util; + +import org.apache.hadoop.hdfs.tools.CacheAdmin; +import org.apache.hadoop.util.ToolRunner; + +public class CacheAdminCmdExecutor extends CommandExecutor { + protected String namenode = null; + protected CacheAdmin admin = null; + + public CacheAdminCmdExecutor(String namenode, CacheAdmin admin) { + this.namenode = namenode; + this.admin = admin; + } + + @Override + protected void execute(final String cmd) throws Exception { + String[] args = getCommandAsArgs(cmd, "NAMENODE", this.namenode); + ToolRunner.run(admin, args); + } +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml new file mode 100644 index 00000000000..0153b72d7d7 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml @@ -0,0 +1,211 @@ + + + + + + + + test + + + + + + Testing basic usage + + + + + + + + SubstringComparator + Usage: bin/hdfs cacheadmin [COMMAND] + + + + + + Testing listing no cache pools + + -listPools + + + + + + SubstringComparator + Found 0 results. + + + + + + Testing adding a cache pool + + -addPool foo + + + -removePool foo + + + + SubstringComparator + Successfully added cache pool foo. + + + + + + Testing modifying a cache pool + + -addPool poolparty -owner alice -group alicegroup -mode 0000 -weight 50 + -modifyPool poolparty -owner bob -group bobgroup -mode 0777 -weight 51 + -listPools + + + -removePool poolparty + + + + SubstringComparator + poolparty bob bobgroup rwxrwxrwx 51 + + + + + + Testing deleting a cache pool + + -addPool foo + -removePool foo + + + + + + SubstringComparator + Successfully removed cache pool foo. + + + + + + Testing listing all cache pools + + -addPool foo -owner bob -group bob -mode 0664 + -addPool bar -owner alice -group alicegroup -mode 0755 + -listPools + + + -removePool foo + -removePool bar + + + + SubstringComparator + Found 2 results. + + + SubstringComparator + bar alice alicegroup rwxr-xr-x 100 + + + SubstringComparator + foo bob bob rw-rw-r-- 100 + + + + + + Testing listing a single cache pool + + -addPool foo -owner bob -group bob -mode 0664 + -addPool bar -owner alice -group alicegroup -mode 0755 + -listPools foo + + + -removePool foo + -removePool bar + + + + SubstringComparator + Found 1 result. + + + SubstringComparator + foo bob bob rw-rw-r-- 100 + + + + + + Testing creating cache paths + + -addPool pool1 + -addPath -path /foo -pool pool1 + -addPath -path /bar -pool pool1 + -listPaths -pool pool1 + + + -removePool pool1 + + + + SubstringComparator + Found 2 entries + + + SubstringComparator + 1 pool1 /foo + + + SubstringComparator + 2 pool1 /bar + + + + + + Testing removing cache paths + + -addPool pool1 + -addPath -path /foo -pool pool1 + -addPath -path /bar -pool pool1 + -removePool pool1 + -listPaths -pool pool1 + + + + + + SubstringComparator + Found 0 entries + + + + + + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml index 92cabe270bf..490885b6c23 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml @@ -16521,70 +16521,5 @@ - - - Testing listing no cache pools - - -fs NAMENODE -listCachePools - - - - - - SubstringComparator - Found 0 results. - - - - - - Testing adding a cache pool - - -fs NAMENODE -addCachePool foo - - - -fs NAMENODE -removeCachePool foo - - - - SubstringComparator - Successfully added cache pool foo. - - - - - - Testing deleting a cache pool - - -fs NAMENODE -addCachePool foo - -fs NAMENODE -removeCachePool foo - - - - - - SubstringComparator - Successfully removed cache pool foo. - - - - - - Testing listing a cache pool - - -fs NAMENODE -addCachePool foo -owner bob -group bob -mode 0664 - -fs NAMENODE -listCachePools foo - - - -fs NAMENODE -removeCachePool foo - - - - SubstringComparator - bob bob rw-rw-r-- 100 - - - -