From 8111c3af6b06d6a814e235ad90af5860632d2c25 Mon Sep 17 00:00:00 2001 From: Chris Nauroth Date: Fri, 11 Oct 2013 19:44:20 +0000 Subject: [PATCH] HDFS-5224. Refactor PathBasedCache* methods to use a Path rather than a String. Contributed by Chris Nauroth. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-4949@1531406 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-hdfs/CHANGES-HDFS-4949.txt | 3 + .../hadoop/hdfs/DistributedFileSystem.java | 27 +++++- .../AddPathBasedCacheDirectiveException.java | 8 +- .../protocol/PathBasedCacheDescriptor.java | 3 +- .../protocol/PathBasedCacheDirective.java | 69 ++++++++++++--- .../hdfs/protocol/PathBasedCacheEntry.java | 4 +- ...amenodeProtocolServerSideTranslatorPB.java | 15 +++- .../ClientNamenodeProtocolTranslatorPB.java | 5 +- .../hdfs/server/namenode/CacheManager.java | 16 ++-- .../hdfs/server/namenode/FSEditLog.java | 2 +- .../hdfs/server/namenode/FSEditLogLoader.java | 7 +- .../apache/hadoop/hdfs/tools/CacheAdmin.java | 13 ++- ...amenodeProtocolServerSideTranslatorPB.java | 56 +++++++++++++ .../TestCacheReplicationManager.java | 18 ++-- .../namenode/OfflineEditsViewerHelper.java | 5 +- .../namenode/TestPathBasedCacheRequests.java | 84 +++++++++++-------- 16 files changed, 248 insertions(+), 87 deletions(-) create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestClientNamenodeProtocolServerSideTranslatorPB.java diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt index 54e697a3fd5..6107b64e068 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt @@ -57,6 +57,9 @@ HDFS-4949 (Unreleased) HDFS-5304. Expose if a block replica is cached in getFileBlockLocations. (Contributed by Andrew Wang) + HDFS-5224. Refactor PathBasedCache* methods to use a Path rather than a + String. (cnauroth) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java index 2ece7640a77..90c9ebca23f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java @@ -1591,7 +1591,12 @@ public class DistributedFileSystem extends FileSystem { */ public PathBasedCacheDescriptor addPathBasedCacheDirective( PathBasedCacheDirective directive) throws IOException { - return dfs.addPathBasedCacheDirective(directive); + Path path = new Path(getPathName(fixRelativePart(directive.getPath()))). + makeQualified(getUri(), getWorkingDirectory()); + return dfs.addPathBasedCacheDirective(new PathBasedCacheDirective.Builder(). + setPath(path). + setPool(directive.getPool()). + build()); } /** @@ -1614,8 +1619,24 @@ public class DistributedFileSystem extends FileSystem { * @return A RemoteIterator which returns PathBasedCacheDescriptor objects. */ public RemoteIterator listPathBasedCacheDescriptors( - String pool, String path) throws IOException { - return dfs.listPathBasedCacheDescriptors(pool, path); + String pool, final Path path) throws IOException { + String pathName = path != null ? getPathName(fixRelativePart(path)) : null; + final RemoteIterator iter = + dfs.listPathBasedCacheDescriptors(pool, pathName); + return new RemoteIterator() { + @Override + public boolean hasNext() throws IOException { + return iter.hasNext(); + } + + @Override + public PathBasedCacheDescriptor next() throws IOException { + PathBasedCacheDescriptor desc = iter.next(); + Path qualPath = desc.getPath().makeQualified(getUri(), path); + return new PathBasedCacheDescriptor(desc.getEntryId(), qualPath, + desc.getPool()); + } + }; } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java index c077f9c90be..a59463dae95 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/AddPathBasedCacheDirectiveException.java @@ -33,12 +33,8 @@ public abstract class AddPathBasedCacheDirectiveException extends IOException { extends AddPathBasedCacheDirectiveException { private static final long serialVersionUID = 1L; - public EmptyPathError(String msg) { - super(msg); - } - - public EmptyPathError(PathBasedCacheDirective directive) { - this("empty path in directive " + directive); + public EmptyPathError() { + super("empty path in directive"); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDescriptor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDescriptor.java index 2d27942c373..26c1eaa12ee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDescriptor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDescriptor.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.protocol; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.fs.Path; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; @@ -32,7 +33,7 @@ import com.google.common.base.Preconditions; public final class PathBasedCacheDescriptor extends PathBasedCacheDirective { private final long entryId; - public PathBasedCacheDescriptor(long entryId, String path, String pool) { + public PathBasedCacheDescriptor(long entryId, Path path, String pool) { super(path, pool); Preconditions.checkArgument(entryId > 0); this.entryId = entryId; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java index 1f60616fc19..15b5bbd3960 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheDirective.java @@ -25,8 +25,8 @@ import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSUtil; -import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError; import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError; import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError; @@ -36,21 +36,54 @@ import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.Inval @InterfaceStability.Evolving @InterfaceAudience.Public public class PathBasedCacheDirective { - private final String path; - private final String pool; + /** + * A builder for creating new PathBasedCacheDirective instances. + */ + public static class Builder { - public PathBasedCacheDirective(String path, String pool) { - Preconditions.checkNotNull(path); - Preconditions.checkNotNull(pool); - this.path = path; - this.pool = pool; + private Path path; + private String pool; + + /** + * Builds a new PathBasedCacheDirective populated with the set properties. + * + * @return New PathBasedCacheDirective. + */ + public PathBasedCacheDirective build() { + return new PathBasedCacheDirective(path, pool); + } + + /** + * Sets the path used in this request. + * + * @param path The path used in this request. + * @return This builder, for call chaining. + */ + public Builder setPath(Path path) { + this.path = path; + return this; + } + + /** + * Sets the pool used in this request. + * + * @param pool The pool used in this request. + * @return This builder, for call chaining. + */ + public Builder setPool(String pool) { + this.pool = pool; + return this; + } } + private final Path path; + private final String pool; + /** * @return The path used in this request. */ - public String getPath() { + public Path getPath() { return path; } @@ -68,10 +101,7 @@ public class PathBasedCacheDirective { * If this PathBasedCacheDirective is not valid. */ public void validate() throws IOException { - if (path.isEmpty()) { - throw new EmptyPathError(this); - } - if (!DFSUtil.isValidName(path)) { + if (!DFSUtil.isValidName(path.toUri().getPath())) { throw new InvalidPathNameError(this); } if (pool.isEmpty()) { @@ -108,4 +138,17 @@ public class PathBasedCacheDirective { append(" }"); return builder.toString(); } + + /** + * Protected constructor. Callers use Builder to create new instances. + * + * @param path The path used in this request. + * @param pool The pool used in this request. + */ + protected PathBasedCacheDirective(Path path, String pool) { + Preconditions.checkNotNull(path); + Preconditions.checkNotNull(pool); + this.path = path; + this.pool = pool; + } }; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java index b4bd1545e3c..2c40885da3c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/PathBasedCacheEntry.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hdfs.protocol; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.server.namenode.CachePool; import com.google.common.base.Preconditions; @@ -65,6 +66,7 @@ public final class PathBasedCacheEntry { } public PathBasedCacheDescriptor getDescriptor() { - return new PathBasedCacheDescriptor(entryId, path, pool.getPoolName()); + return new PathBasedCacheDescriptor(entryId, new Path(path), + pool.getPoolName()); } }; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java index 272286572a8..8c4b6441cba 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java @@ -25,8 +25,10 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.Options.Rename; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError; import org.apache.hadoop.hdfs.protocol.CachePoolInfo; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks; @@ -176,6 +178,8 @@ import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenReque import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto; import org.apache.hadoop.security.token.Token; +import org.apache.commons.lang.StringUtils; + import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; @@ -1035,8 +1039,13 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements throws ServiceException { try { PathBasedCacheDirectiveProto proto = request.getDirective(); - PathBasedCacheDirective directive = - new PathBasedCacheDirective(proto.getPath(), proto.getPool()); + if (StringUtils.isEmpty(proto.getPath())) { + throw new EmptyPathError(); + } + PathBasedCacheDirective directive = new PathBasedCacheDirective.Builder(). + setPath(new Path(proto.getPath())). + setPool(proto.getPool()). + build(); PathBasedCacheDescriptor descriptor = server.addPathBasedCacheDirective(directive); AddPathBasedCacheDirectiveResponseProto.Builder builder = @@ -1080,7 +1089,7 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements builder.addElements( ListPathBasedCacheDescriptorsElementProto.newBuilder(). setId(directive.getEntryId()). - setPath(directive.getPath()). + setPath(directive.getPath().toUri().getPath()). setPool(directive.getPool())); prevId = directive.getEntryId(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java index 6c84c0460ce..ea3dfc19ce7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java @@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.ParentNotDirectoryException; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.permission.FsPermission; @@ -1009,7 +1010,7 @@ public class ClientNamenodeProtocolTranslatorPB implements AddPathBasedCacheDirectiveRequestProto.Builder builder = AddPathBasedCacheDirectiveRequestProto.newBuilder(); builder.setDirective(PathBasedCacheDirectiveProto.newBuilder() - .setPath(directive.getPath()) + .setPath(directive.getPath().toUri().getPath()) .setPool(directive.getPool()) .build()); AddPathBasedCacheDirectiveResponseProto result = @@ -1047,7 +1048,7 @@ public class ClientNamenodeProtocolTranslatorPB implements ListPathBasedCacheDescriptorsElementProto elementProto = response.getElements(i); return new PathBasedCacheDescriptor(elementProto.getId(), - elementProto.getPath(), elementProto.getPool()); + new Path(elementProto.getPath()), elementProto.getPool()); } @Override diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java index a7d9f0698a1..c639bf2e492 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java @@ -36,6 +36,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError; @@ -138,7 +139,7 @@ public final class CacheManager { private synchronized PathBasedCacheEntry findEntry(PathBasedCacheDirective directive) { List existing = - entriesByPath.get(directive.getPath()); + entriesByPath.get(directive.getPath().toUri().getPath()); if (existing == null) { return null; } @@ -246,8 +247,8 @@ public final class CacheManager { CachePool pool = cachePools.get(directive.getPool()); // Add a new entry with the next available ID. PathBasedCacheEntry entry; - entry = new PathBasedCacheEntry(getNextEntryId(), directive.getPath(), - pool); + entry = new PathBasedCacheEntry(getNextEntryId(), + directive.getPath().toUri().getPath(), pool); unprotectedAddEntry(entry); @@ -303,7 +304,7 @@ public final class CacheManager { assert namesystem.hasWriteLock(); PathBasedCacheEntry existing = entriesById.get(id); // Remove the corresponding entry in entriesByPath. - String path = existing.getDescriptor().getPath(); + String path = existing.getDescriptor().getPath().toUri().getPath(); List entries = entriesByPath.get(path); if (entries == null || !entries.remove(existing)) { throw new UnexpectedRemovePathBasedCacheDescriptorException(id); @@ -315,10 +316,11 @@ public final class CacheManager { // Set the path as uncached in the namesystem try { - INode node = dir.getINode(existing.getDescriptor().getPath()); + INode node = dir.getINode(existing.getDescriptor().getPath().toUri(). + getPath()); if (node != null && node.isFile()) { - namesystem.setCacheReplicationInt(existing.getDescriptor().getPath(), - (short) 0); + namesystem.setCacheReplicationInt(existing.getDescriptor().getPath(). + toUri().getPath(), (short) 0); } } catch (IOException e) { LOG.warn("removeDescriptor " + id + ": failure while setting cache" diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java index 10aad74e03a..9ae790efba1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java @@ -958,7 +958,7 @@ public class FSEditLog implements LogsPurgeable { boolean toLogRpcIds) { AddPathBasedCacheDirectiveOp op = AddPathBasedCacheDirectiveOp.getInstance( cache.get()) - .setPath(directive.getPath()) + .setPath(directive.getPath().toUri().getPath()) .setPool(directive.getPool()); logRpcIds(op, toLogRpcIds); logEdit(op); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java index bd13ca4af79..61cc2d0ba4f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java @@ -30,6 +30,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; @@ -641,8 +642,10 @@ public class FSEditLogLoader { } case OP_ADD_PATH_BASED_CACHE_DIRECTIVE: { AddPathBasedCacheDirectiveOp addOp = (AddPathBasedCacheDirectiveOp) op; - PathBasedCacheDirective d = new PathBasedCacheDirective(addOp.path, - addOp.pool); + PathBasedCacheDirective d = new PathBasedCacheDirective.Builder(). + setPath(new Path(addOp.path)). + setPool(addOp.pool). + build(); PathBasedCacheDescriptor descriptor = fsNamesys.getCacheManager().unprotectedAddDirective(d); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java index f0a71c595b2..c4633c137c5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java @@ -26,6 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DistributedFileSystem; @@ -164,8 +165,10 @@ public class CacheAdmin extends Configured implements Tool { } DistributedFileSystem dfs = getDFS(conf); - PathBasedCacheDirective directive = - new PathBasedCacheDirective(path, poolName); + PathBasedCacheDirective directive = new PathBasedCacheDirective.Builder(). + setPath(new Path(path)). + setPool(poolName). + build(); try { PathBasedCacheDescriptor descriptor = @@ -281,12 +284,14 @@ public class CacheAdmin extends Configured implements Tool { build(); DistributedFileSystem dfs = getDFS(conf); RemoteIterator iter = - dfs.listPathBasedCacheDescriptors(poolFilter, pathFilter); + dfs.listPathBasedCacheDescriptors(poolFilter, pathFilter != null ? + new Path(pathFilter) : null); int numEntries = 0; while (iter.hasNext()) { PathBasedCacheDescriptor entry = iter.next(); String row[] = new String[] { - "" + entry.getEntryId(), entry.getPool(), entry.getPath(), + "" + entry.getEntryId(), entry.getPool(), + entry.getPath().toUri().getPath(), }; tableListing.addRow(row); numEntries++; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestClientNamenodeProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestClientNamenodeProtocolServerSideTranslatorPB.java new file mode 100644 index 00000000000..4cf53253c18 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestClientNamenodeProtocolServerSideTranslatorPB.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.protocolPB; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import org.junit.Test; + +import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError; +import org.apache.hadoop.hdfs.protocol.ClientProtocol; +import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AddPathBasedCacheDirectiveRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.PathBasedCacheDirectiveProto; + +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; + +public class TestClientNamenodeProtocolServerSideTranslatorPB { + + @Test + public void testAddPathBasedCacheDirectiveEmptyPathError() throws Exception { + ClientProtocol server = mock(ClientProtocol.class); + RpcController controller = mock(RpcController.class); + AddPathBasedCacheDirectiveRequestProto request = + AddPathBasedCacheDirectiveRequestProto.newBuilder(). + setDirective(PathBasedCacheDirectiveProto.newBuilder(). + setPath(""). + setPool("pool"). + build()). + build(); + ClientNamenodeProtocolServerSideTranslatorPB translator = + new ClientNamenodeProtocolServerSideTranslatorPB(server); + try { + translator.addPathBasedCacheDirective(controller, request); + fail("Expected ServiceException"); + } catch (ServiceException e) { + assertNotNull(e.getCause()); + assertTrue(e.getCause() instanceof EmptyPathError); + } + } +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestCacheReplicationManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestCacheReplicationManager.java index ee9cc96adf5..369cc376b58 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestCacheReplicationManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestCacheReplicationManager.java @@ -152,12 +152,14 @@ public class TestCacheReplicationManager { waitForExpectedNumCachedBlocks(expected); // Cache and check each path in sequence for (int i=0; i dit = dfs.listPathBasedCacheDescriptors(null, null); @@ -219,7 +223,7 @@ public class TestCacheReplicationManager { assertTrue("Unexpected # of cache entries: " + i, dit.hasNext()); PathBasedCacheDescriptor cd = dit.next(); assertEquals(i+1, cd.getEntryId()); - assertEquals(entryPrefix + i, cd.getPath()); + assertEquals(entryPrefix + i, cd.getPath().toUri().getPath()); assertEquals(pool, cd.getPool()); } assertFalse("Unexpected # of cache descriptors found", dit.hasNext()); @@ -243,7 +247,7 @@ public class TestCacheReplicationManager { assertTrue("Unexpected # of cache entries: " + i, dit.hasNext()); PathBasedCacheDescriptor cd = dit.next(); assertEquals(i+1, cd.getEntryId()); - assertEquals(entryPrefix + i, cd.getPath()); + assertEquals(entryPrefix + i, cd.getPath().toUri().getPath()); assertEquals(pool, cd.getPool()); } assertFalse("Unexpected # of cache descriptors found", dit.hasNext()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java index b753b8d05ff..cd08e98f258 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java @@ -243,7 +243,10 @@ public class OfflineEditsViewerHelper { .setWeight(1989)); // OP_ADD_PATH_BASED_CACHE_DIRECTIVE 33 PathBasedCacheDescriptor descriptor = - dfs.addPathBasedCacheDirective(new PathBasedCacheDirective("/bar", pool)); + dfs.addPathBasedCacheDirective(new PathBasedCacheDirective.Builder(). + setPath(new Path("/bar")). + setPool(pool). + build()); // OP_REMOVE_PATH_BASED_CACHE_DESCRIPTOR 34 dfs.removePathBasedCacheDescriptor(descriptor); // OP_REMOVE_CACHE_POOL 37 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java index d58343fe24d..7685c11ef78 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathBasedCacheRequests.java @@ -31,13 +31,13 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError; import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError; import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError; import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError; @@ -312,12 +312,18 @@ public class TestPathBasedCacheRequests { proto.addCachePool(new CachePoolInfo("pool4"). setMode(new FsPermission((short)0))); - PathBasedCacheDirective alpha = - new PathBasedCacheDirective("/alpha", "pool1"); - PathBasedCacheDirective beta = - new PathBasedCacheDirective("/beta", "pool2"); - PathBasedCacheDirective delta = - new PathBasedCacheDirective("/delta", "pool1"); + PathBasedCacheDirective alpha = new PathBasedCacheDirective.Builder(). + setPath(new Path("/alpha")). + setPool("pool1"). + build(); + PathBasedCacheDirective beta = new PathBasedCacheDirective.Builder(). + setPath(new Path("/beta")). + setPool("pool2"). + build(); + PathBasedCacheDirective delta = new PathBasedCacheDirective.Builder(). + setPath(new Path("/delta")). + setPool("pool1"). + build(); PathBasedCacheDescriptor alphaD = addAsUnprivileged(alpha); PathBasedCacheDescriptor alphaD2 = addAsUnprivileged(alpha); @@ -326,21 +332,20 @@ public class TestPathBasedCacheRequests { PathBasedCacheDescriptor betaD = addAsUnprivileged(beta); try { - addAsUnprivileged(new PathBasedCacheDirective("", "pool3")); - fail("expected an error when adding an empty path"); - } catch (IOException ioe) { - assertTrue(ioe instanceof EmptyPathError); - } - - try { - addAsUnprivileged(new PathBasedCacheDirective("/unicorn", "no_such_pool")); + addAsUnprivileged(new PathBasedCacheDirective.Builder(). + setPath(new Path("/unicorn")). + setPool("no_such_pool"). + build()); fail("expected an error when adding to a non-existent pool."); } catch (IOException ioe) { assertTrue(ioe instanceof InvalidPoolNameError); } try { - addAsUnprivileged(new PathBasedCacheDirective("/blackhole", "pool4")); + addAsUnprivileged(new PathBasedCacheDirective.Builder(). + setPath(new Path("/blackhole")). + setPool("pool4"). + build()); fail("expected an error when adding to a pool with " + "mode 0 (no permissions for anyone)."); } catch (IOException ioe) { @@ -348,43 +353,49 @@ public class TestPathBasedCacheRequests { } try { - addAsUnprivileged(new PathBasedCacheDirective("//illegal/path/", "pool1")); + addAsUnprivileged(new PathBasedCacheDirective.Builder(). + setPath(new Path("/illegal:path/")). + setPool("pool1"). + build()); fail("expected an error when adding a malformed path " + "to the cache directives."); - } catch (IOException ioe) { - assertTrue(ioe instanceof InvalidPathNameError); + } catch (IllegalArgumentException e) { + // expected } try { - addAsUnprivileged(new PathBasedCacheDirective("/emptypoolname", "")); + addAsUnprivileged(new PathBasedCacheDirective.Builder(). + setPath(new Path("/emptypoolname")). + setPool(""). + build()); Assert.fail("expected an error when adding a PathBasedCache " + "directive with an empty pool name."); } catch (IOException ioe) { Assert.assertTrue(ioe instanceof InvalidPoolNameError); } - try { - addAsUnprivileged(new PathBasedCacheDirective("bogus", "pool1")); - Assert.fail("expected an error when adding a PathBasedCache " + - "directive with a non-absolute path name."); - } catch (IOException ioe) { - Assert.assertTrue(ioe instanceof InvalidPathNameError); - } - PathBasedCacheDescriptor deltaD = addAsUnprivileged(delta); + // We expect the following to succeed, because DistributedFileSystem + // qualifies the path. + PathBasedCacheDescriptor relativeD = addAsUnprivileged( + new PathBasedCacheDirective.Builder(). + setPath(new Path("relative")). + setPool("pool1"). + build()); + RemoteIterator iter; - iter = proto.listPathBasedCacheDescriptors(0, null, null); - validateListAll(iter, alphaD, betaD, deltaD); - iter = proto.listPathBasedCacheDescriptors(0, "pool3", null); + iter = dfs.listPathBasedCacheDescriptors(null, null); + validateListAll(iter, alphaD, betaD, deltaD, relativeD); + iter = dfs.listPathBasedCacheDescriptors("pool3", null); Assert.assertFalse(iter.hasNext()); - iter = proto.listPathBasedCacheDescriptors(0, "pool1", null); - validateListAll(iter, alphaD, deltaD); - iter = proto.listPathBasedCacheDescriptors(0, "pool2", null); + iter = dfs.listPathBasedCacheDescriptors("pool1", null); + validateListAll(iter, alphaD, deltaD, relativeD); + iter = dfs.listPathBasedCacheDescriptors("pool2", null); validateListAll(iter, betaD); dfs.removePathBasedCacheDescriptor(betaD); - iter = proto.listPathBasedCacheDescriptors(0, "pool2", null); + iter = dfs.listPathBasedCacheDescriptors("pool2", null); Assert.assertFalse(iter.hasNext()); try { @@ -409,7 +420,8 @@ public class TestPathBasedCacheRequests { dfs.removePathBasedCacheDescriptor(alphaD); dfs.removePathBasedCacheDescriptor(deltaD); - iter = proto.listPathBasedCacheDescriptors(0, null, null); + dfs.removePathBasedCacheDescriptor(relativeD); + iter = dfs.listPathBasedCacheDescriptors(null, null); assertFalse(iter.hasNext()); } }