From c8ba7039b7973edb133e0eb89856f40fad1085d6 Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Fri, 16 May 2014 01:19:17 +0000 Subject: [PATCH] HDFS-6345. DFS.listCacheDirectives() should allow filtering based on cache directive ID. (wang) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1595087 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 + .../hdfs/protocol/CacheDirectiveIterator.java | 70 ++++++++++++++++++- .../hdfs/server/namenode/CacheManager.java | 26 +++++-- .../apache/hadoop/hdfs/tools/CacheAdmin.java | 10 ++- .../server/namenode/TestCacheDirectives.java | 6 ++ .../src/test/resources/testCacheAdminConf.xml | 24 +++++++ 6 files changed, 131 insertions(+), 8 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index c5be00f8400..c9a3a31c189 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -112,6 +112,9 @@ Release 2.5.0 - UNRELEASED HDFS-5683. Better audit log messages for caching operations. (Abhiraj Butala via wang) + HDFS-6345. DFS.listCacheDirectives() should allow filtering based on + cache directive ID. (wang) + OPTIMIZATIONS HDFS-6214. Webhdfs has poor throughput for files >2GB (daryn) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java index 1ef5c538e45..676106de10f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java @@ -23,6 +23,10 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.BatchedRemoteIterator; +import org.apache.hadoop.fs.InvalidRequestException; +import org.apache.hadoop.ipc.RemoteException; + +import com.google.common.base.Preconditions; /** * CacheDirectiveIterator is a remote iterator that iterates cache directives. @@ -33,7 +37,7 @@ import org.apache.hadoop.fs.BatchedRemoteIterator; public class CacheDirectiveIterator extends BatchedRemoteIterator { - private final CacheDirectiveInfo filter; + private CacheDirectiveInfo filter; private final ClientProtocol namenode; public CacheDirectiveIterator(ClientProtocol namenode, @@ -43,10 +47,72 @@ public class CacheDirectiveIterator this.filter = filter; } + private static CacheDirectiveInfo removeIdFromFilter(CacheDirectiveInfo filter) { + CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(filter); + builder.setId(null); + return builder.build(); + } + + /** + * Used for compatibility when communicating with a server version that + * does not support filtering directives by ID. + */ + private static class SingleEntry implements + BatchedEntries { + + private final CacheDirectiveEntry entry; + + public SingleEntry(final CacheDirectiveEntry entry) { + this.entry = entry; + } + + @Override + public CacheDirectiveEntry get(int i) { + if (i > 0) { + return null; + } + return entry; + } + + @Override + public int size() { + return 1; + } + + @Override + public boolean hasMore() { + return false; + } + } + @Override public BatchedEntries makeRequest(Long prevKey) throws IOException { - return namenode.listCacheDirectives(prevKey, filter); + BatchedEntries entries = null; + try { + entries = namenode.listCacheDirectives(prevKey, filter); + } catch (IOException e) { + if (e.getMessage().contains("Filtering by ID is unsupported")) { + // Retry case for old servers, do the filtering client-side + long id = filter.getId(); + filter = removeIdFromFilter(filter); + // Using id - 1 as prevId should get us a window containing the id + // This is somewhat brittle, since it depends on directives being + // returned in order of ascending ID. + entries = namenode.listCacheDirectives(id - 1, filter); + for (int i=0; i replies = new ArrayList(NUM_PRE_ALLOCATED_ENTRIES); int numReplies = 0; @@ -711,6 +721,14 @@ public final class CacheManager { } CacheDirective curDirective = cur.getValue(); CacheDirectiveInfo info = cur.getValue().toInfo(); + + // If the requested ID is present, it should be the first item. + // Hitting this case means the ID is not present, or we're on the second + // item and should break out. + if (id != null && + !(info.getId().equals(id))) { + break; + } if (filter.getPool() != null && !info.getPool().equals(filter.getPool())) { continue; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java index b7ac968dede..3325570c7d0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java @@ -503,19 +503,21 @@ public class CacheAdmin extends Configured implements Tool { @Override public String getShortUsage() { - return "[" + getName() + " [-stats] [-path ] [-pool ]]\n"; + return "[" + getName() + + " [-stats] [-path ] [-pool ] [-id ]\n"; } @Override public String getLongUsage() { TableListing listing = getOptionDescriptionListing(); + listing.addRow("-stats", "List path-based cache directive statistics."); listing.addRow("", "List only " + "cache directives with this path. " + "Note that if there is a cache directive for " + "in a cache pool that we don't have read access for, it " + "will not be listed."); listing.addRow("", "List only path cache directives in that pool."); - listing.addRow("-stats", "List path-based cache directive statistics."); + listing.addRow("", "List the cache directive with this id."); return getShortUsage() + "\n" + "List cache directives.\n\n" + listing.toString(); @@ -534,6 +536,10 @@ public class CacheAdmin extends Configured implements Tool { builder.setPool(poolFilter); } boolean printStats = StringUtils.popOption("-stats", args); + String idFilter = StringUtils.popOptionWithArgument("-id", args); + if (idFilter != null) { + builder.setId(Long.parseLong(idFilter)); + } if (!args.isEmpty()) { System.err.println("Can't understand argument: " + args.get(0)); return 1; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectives.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectives.java index ecc738aadcd..ed124603f3d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectives.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectives.java @@ -477,6 +477,12 @@ public class TestCacheDirectives { iter = dfs.listCacheDirectives( new CacheDirectiveInfo.Builder().setPool("pool2").build()); validateListAll(iter, betaId); + iter = dfs.listCacheDirectives( + new CacheDirectiveInfo.Builder().setId(alphaId2).build()); + validateListAll(iter, alphaId2); + iter = dfs.listCacheDirectives( + new CacheDirectiveInfo.Builder().setId(relativeId).build()); + validateListAll(iter, relativeId); dfs.removeCacheDirective(betaId); iter = dfs.listCacheDirectives( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml index 13f1b9ae14d..058eec59aa5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml @@ -519,5 +519,29 @@ + + + Testing listing a single cache directive + + -addPool pool1 + -addDirective -path /foo -pool pool1 -ttl 2d + -addDirective -path /bar -pool pool1 -ttl 24h + -addDirective -path /baz -replication 2 -pool pool1 -ttl 60m + -listDirectives -stats -id 30 + + + -removePool pool1 + + + + SubstringComparator + Found 1 entry + + + SubstringComparator + 30 pool1 1 + + +