HDFS-5213. Separate PathBasedCacheEntry and PathBasedCacheDirectiveWithId. Contributed by Colin Patrick McCabe.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-4949@1524561 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Wang 2013-09-18 20:43:40 +00:00
parent 85c2036029
commit e202d4d154
18 changed files with 483 additions and 309 deletions

View File

@ -39,6 +39,9 @@ HDFS-4949 (Unreleased)
HDFS-5197. Document dfs.cachereport.intervalMsec in hdfs-default.xml.
(cnauroth)
HDFS-5213. Separate PathBasedCacheEntry and PathBasedCacheDirectiveWithId.
(Contributed by Colin Patrick McCabe)
OPTIMIZATIONS
BUG FIXES

View File

@ -200,9 +200,9 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
public static final String DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES =
"dfs.namenode.list.cache.pools.num.responses";
public static final int DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES_DEFAULT = 100;
public static final String DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES =
"dfs.namenode.list.cache.directives.num.responses";
public static final int DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES_DEFAULT = 100;
public static final String DFS_NAMENODE_LIST_CACHE_DESCRIPTORS_NUM_RESPONSES =
"dfs.namenode.list.cache.descriptors.num.responses";
public static final int DFS_NAMENODE_LIST_CACHE_DESCRIPTORS_NUM_RESPONSES_DEFAULT = 100;
// Whether to enable datanode's stale state detection and usage for reads
public static final String DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY = "dfs.namenode.avoid.read.stale.datanode";

View File

@ -68,7 +68,7 @@
import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
@ -1591,7 +1591,7 @@ public Boolean next(final FileSystem fs, final Path p)
* PathBasedCache entry, or an IOException describing why the directive
* could not be added.
*/
public List<Fallible<PathBasedCacheEntry>>
public List<Fallible<PathBasedCacheDescriptor>>
addPathBasedCacheDirective(List<PathBasedCacheDirective> directives)
throws IOException {
return dfs.namenode.addPathBasedCacheDirectives(directives);
@ -1605,8 +1605,8 @@ public Boolean next(final FileSystem fs, final Path p)
* ID, or an IOException describing why the ID could not be removed.
*/
public List<Fallible<Long>>
removePathBasedCacheEntries(List<Long> ids) throws IOException {
return dfs.namenode.removePathBasedCacheEntries(ids);
removePathBasedCacheDescriptors(List<Long> ids) throws IOException {
return dfs.namenode.removePathBasedCacheDescriptors(ids);
}
/**
@ -1615,11 +1615,11 @@ public Boolean next(final FileSystem fs, final Path p)
*
* @param pool The cache pool to list, or null to list all pools.
* @param path The path name to list, or null to list all paths.
* @return A RemoteIterator which returns PathBasedCacheEntry objects.
* @return A RemoteIterator which returns PathBasedCacheDescriptor objects.
*/
public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(
public RemoteIterator<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(
String pool, String path) throws IOException {
return dfs.namenode.listPathBasedCacheEntries(0, pool, path);
return dfs.namenode.listPathBasedCacheDescriptors(0, pool, path);
}
/**

View File

@ -75,6 +75,16 @@ public PoolWritePermissionDeniedError(PathBasedCacheDirective directive) {
}
}
public static class PathAlreadyExistsInPoolError
extends AddPathBasedCacheDirectiveException {
private static final long serialVersionUID = 1L;
public PathAlreadyExistsInPoolError(PathBasedCacheDirective directive) {
super("path " + directive.getPath() + " already exists in pool " +
directive.getPool(), directive);
}
}
public static class UnexpectedAddPathBasedCacheDirectiveException
extends AddPathBasedCacheDirectiveException {
private static final long serialVersionUID = 1L;

View File

@ -1106,7 +1106,7 @@ public SnapshotDiffReport getSnapshotDiffReport(String snapshotRoot,
* could not be added.
*/
@AtMostOnce
public List<Fallible<PathBasedCacheEntry>>
public List<Fallible<PathBasedCacheDescriptor>>
addPathBasedCacheDirectives(List<PathBasedCacheDirective> directives)
throws IOException;
@ -1118,7 +1118,7 @@ public SnapshotDiffReport getSnapshotDiffReport(String snapshotRoot,
* ID, or an IOException describing why the ID could not be removed.
*/
@AtMostOnce
public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
public List<Fallible<Long>> removePathBasedCacheDescriptors(List<Long> ids)
throws IOException;
/**
@ -1126,13 +1126,13 @@ public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
* from the server.
*
* @param prevId The last listed entry ID, or -1 if this is the first call to
* listPathBasedCacheEntries.
* listPathBasedCacheDescriptors.
* @param pool The cache pool to list, or null to list all pools.
* @param path The path name to list, or null to list all paths.
* @return A RemoteIterator which returns PathBasedCacheEntry objects.
* @return A RemoteIterator which returns PathBasedCacheDescriptor objects.
*/
@Idempotent
public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(long prevId,
public RemoteIterator<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(long prevId,
String pool, String path) throws IOException;
/**

View File

@ -0,0 +1,77 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import com.google.common.base.Preconditions;
/**
* A directive in a cache pool that includes an identifying ID number.
*/
@InterfaceStability.Evolving
@InterfaceAudience.Public
public final class PathBasedCacheDescriptor extends PathBasedCacheDirective {
private final long entryId;
public PathBasedCacheDescriptor(long entryId, String path, String pool) {
super(path, pool);
Preconditions.checkArgument(entryId > 0);
this.entryId = entryId;
}
public long getEntryId() {
return entryId;
}
@Override
public boolean equals(Object o) {
if (o == null) {
return false;
}
if (getClass() != o.getClass()) {
return false;
}
PathBasedCacheDescriptor other = (PathBasedCacheDescriptor)o;
return new EqualsBuilder().append(entryId, other.entryId).
append(getPath(), other.getPath()).
append(getPool(), other.getPool()).
isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(entryId).
append(getPath()).
append(getPool()).
hashCode();
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("{ entryId:").append(entryId).
append(", path:").append(getPath()).
append(", pool:").append(getPool()).
append(" }");
return builder.toString();
}
};

View File

@ -20,9 +20,11 @@
import java.io.IOException;
import com.google.common.base.Preconditions;
import com.google.common.collect.ComparisonChain;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
@ -31,7 +33,9 @@
/**
* A directive to add a path to a cache pool.
*/
public class PathBasedCacheDirective implements Comparable<PathBasedCacheDirective> {
@InterfaceStability.Evolving
@InterfaceAudience.Public
public class PathBasedCacheDirective {
private final String path;
private final String pool;
@ -76,26 +80,24 @@ public void validate() throws IOException {
}
@Override
public int compareTo(PathBasedCacheDirective rhs) {
return ComparisonChain.start().
compare(pool, rhs.getPool()).
compare(path, rhs.getPath()).
result();
public boolean equals(Object o) {
if (o == null) {
return false;
}
if (getClass() != o.getClass()) {
return false;
}
PathBasedCacheDirective other = (PathBasedCacheDirective)o;
return new EqualsBuilder().append(getPath(), other.getPath()).
append(getPool(), other.getPool()).
isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(path).append(pool).hashCode();
}
@Override
public boolean equals(Object o) {
try {
PathBasedCacheDirective other = (PathBasedCacheDirective)o;
return other.compareTo(this) == 0;
} catch (ClassCastException e) {
return false;
}
return new HashCodeBuilder().append(getPath()).
append(getPool()).
hashCode();
}
@Override

View File

@ -17,59 +17,54 @@
*/
package org.apache.hadoop.hdfs.protocol;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.server.namenode.CachePool;
import com.google.common.base.Preconditions;
/**
* An entry in the NameNode's PathBasedCache.
* Represents an entry in the PathBasedCache on the NameNode.
*
* This is an implementation class, not part of the public API.
*/
@InterfaceAudience.Private
public final class PathBasedCacheEntry {
private final long entryId;
private final PathBasedCacheDirective directive;
private final String path;
private final CachePool pool;
public PathBasedCacheEntry(long entryId, PathBasedCacheDirective directive) {
public PathBasedCacheEntry(long entryId, String path, CachePool pool) {
Preconditions.checkArgument(entryId > 0);
this.entryId = entryId;
this.directive = directive;
Preconditions.checkNotNull(path);
this.path = path;
Preconditions.checkNotNull(pool);
this.pool = pool;
}
public long getEntryId() {
return entryId;
}
public PathBasedCacheDirective getDirective() {
return directive;
public String getPath() {
return path;
}
@Override
public boolean equals(Object o) {
try {
PathBasedCacheEntry other = (PathBasedCacheEntry)o;
return new EqualsBuilder().
append(this.entryId, other.entryId).
append(this.directive, other.directive).
isEquals();
} catch (ClassCastException e) {
return false;
}
}
@Override
public int hashCode() {
return new HashCodeBuilder().
append(entryId).
append(directive).
hashCode();
public CachePool getPool() {
return pool;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("{ entryId:").append(entryId).
append(", directive:").append(directive.toString()).
append(" }");
append(", path:").append(path).
append(", pool:").append(pool).
append(" }");
return builder.toString();
}
public PathBasedCacheDescriptor getDescriptor() {
return new PathBasedCacheDescriptor(entryId, path, pool.getName());
}
};

View File

@ -0,0 +1,79 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import java.io.IOException;
import com.google.common.base.Preconditions;
/**
* An exception which occurred when trying to remove a PathBasedCache entry.
*/
public abstract class RemovePathBasedCacheDescriptorException extends IOException {
private static final long serialVersionUID = 1L;
private final long entryId;
public RemovePathBasedCacheDescriptorException(String description, long entryId) {
super(description);
this.entryId = entryId;
}
public long getEntryId() {
return this.entryId;
}
public final static class InvalidIdException
extends RemovePathBasedCacheDescriptorException {
private static final long serialVersionUID = 1L;
public InvalidIdException(long entryId) {
super("invalid PathBasedCacheDescriptor id " + entryId, entryId);
}
}
public final static class RemovePermissionDeniedException
extends RemovePathBasedCacheDescriptorException {
private static final long serialVersionUID = 1L;
public RemovePermissionDeniedException(long entryId) {
super("permission denied when trying to remove " +
"PathBasedCacheDescriptor id " + entryId, entryId);
}
}
public final static class NoSuchIdException
extends RemovePathBasedCacheDescriptorException {
private static final long serialVersionUID = 1L;
public NoSuchIdException(long entryId) {
super("there is no PathBasedCacheDescriptor with id " + entryId,
entryId);
}
}
public final static class UnexpectedRemovePathBasedCacheDescriptorException
extends RemovePathBasedCacheDescriptorException {
private static final long serialVersionUID = 1L;
public UnexpectedRemovePathBasedCacheDescriptorException(long id) {
super("encountered an unexpected error when trying to " +
"remove PathBasedCacheDescriptor with id " + id, id);
}
}
}

View File

@ -32,6 +32,7 @@
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PathAlreadyExistsInPoolError;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
@ -40,10 +41,10 @@
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.RemovePermissionDeniedException;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.InvalidIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.NoSuchIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.RemovePermissionDeniedException;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AbandonBlockRequestProto;
@ -115,9 +116,9 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesElementProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsElementProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsRequestProto;
@ -131,9 +132,9 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntryErrorProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorErrorProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2ResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
@ -1048,13 +1049,13 @@ public AddPathBasedCacheDirectivesResponseProto addPathBasedCacheDirectives(RpcC
PathBasedCacheDirectiveProto proto = request.getElements(i);
input.add(new PathBasedCacheDirective(proto.getPath(), proto.getPool()));
}
List<Fallible<PathBasedCacheEntry>> output = server.addPathBasedCacheDirectives(input);
List<Fallible<PathBasedCacheDescriptor>> output = server.addPathBasedCacheDirectives(input);
AddPathBasedCacheDirectivesResponseProto.Builder builder =
AddPathBasedCacheDirectivesResponseProto.newBuilder();
for (int idx = 0; idx < output.size(); idx++) {
try {
PathBasedCacheEntry entry = output.get(idx).get();
builder.addResults(entry.getEntryId());
PathBasedCacheDescriptor directive = output.get(idx).get();
builder.addResults(directive.getEntryId());
} catch (IOException ioe) {
if (ioe.getCause() instanceof EmptyPathError) {
builder.addResults(AddPathBasedCacheDirectiveErrorProto.
@ -1068,6 +1069,9 @@ public AddPathBasedCacheDirectivesResponseProto addPathBasedCacheDirectives(RpcC
} else if (ioe.getCause() instanceof PoolWritePermissionDeniedError) {
builder.addResults(AddPathBasedCacheDirectiveErrorProto.
ADD_PERMISSION_DENIED_ERROR_VALUE);
} else if (ioe.getCause() instanceof PathAlreadyExistsInPoolError) {
builder.addResults(AddPathBasedCacheDirectiveErrorProto.
PATH_ALREADY_EXISTS_IN_POOL_ERROR_VALUE);
} else {
builder.addResults(AddPathBasedCacheDirectiveErrorProto.
UNEXPECTED_ADD_ERROR_VALUE);
@ -1081,29 +1085,29 @@ public AddPathBasedCacheDirectivesResponseProto addPathBasedCacheDirectives(RpcC
}
@Override
public RemovePathBasedCacheEntriesResponseProto removePathBasedCacheEntries(
RpcController controller, RemovePathBasedCacheEntriesRequestProto request)
public RemovePathBasedCacheDescriptorsResponseProto removePathBasedCacheDescriptors(
RpcController controller, RemovePathBasedCacheDescriptorsRequestProto request)
throws ServiceException {
try {
List<Fallible<Long>> output =
server.removePathBasedCacheEntries(request.getElementsList());
RemovePathBasedCacheEntriesResponseProto.Builder builder =
RemovePathBasedCacheEntriesResponseProto.newBuilder();
server.removePathBasedCacheDescriptors(request.getElementsList());
RemovePathBasedCacheDescriptorsResponseProto.Builder builder =
RemovePathBasedCacheDescriptorsResponseProto.newBuilder();
for (int idx = 0; idx < output.size(); idx++) {
try {
long id = output.get(idx).get();
builder.addResults(id);
} catch (InvalidIdException ioe) {
builder.addResults(RemovePathBasedCacheEntryErrorProto.
builder.addResults(RemovePathBasedCacheDescriptorErrorProto.
INVALID_CACHED_PATH_ID_ERROR_VALUE);
} catch (NoSuchIdException ioe) {
builder.addResults(RemovePathBasedCacheEntryErrorProto.
builder.addResults(RemovePathBasedCacheDescriptorErrorProto.
NO_SUCH_CACHED_PATH_ID_ERROR_VALUE);
} catch (RemovePermissionDeniedException ioe) {
builder.addResults(RemovePathBasedCacheEntryErrorProto.
builder.addResults(RemovePathBasedCacheDescriptorErrorProto.
REMOVE_PERMISSION_DENIED_ERROR_VALUE);
} catch (IOException ioe) {
builder.addResults(RemovePathBasedCacheEntryErrorProto.
builder.addResults(RemovePathBasedCacheDescriptorErrorProto.
UNEXPECTED_REMOVE_ERROR_VALUE);
}
}
@ -1114,31 +1118,32 @@ public RemovePathBasedCacheEntriesResponseProto removePathBasedCacheEntries(
}
@Override
public ListPathBasedCacheEntriesResponseProto listPathBasedCacheEntries(
RpcController controller, ListPathBasedCacheEntriesRequestProto request)
public ListPathBasedCacheDescriptorsResponseProto listPathBasedCacheDescriptors(
RpcController controller, ListPathBasedCacheDescriptorsRequestProto request)
throws ServiceException {
try {
RemoteIterator<PathBasedCacheEntry> iter =
server.listPathBasedCacheEntries(request.getPrevId(),
RemoteIterator<PathBasedCacheDescriptor> iter =
server.listPathBasedCacheDescriptors(request.getPrevId(),
request.hasPool() ? request.getPool() : null,
request.hasPath() ? request.getPath() : null);
ListPathBasedCacheEntriesResponseProto.Builder builder =
ListPathBasedCacheEntriesResponseProto.newBuilder();
ListPathBasedCacheDescriptorsResponseProto.Builder builder =
ListPathBasedCacheDescriptorsResponseProto.newBuilder();
long prevId = 0;
while (iter.hasNext()) {
PathBasedCacheEntry entry = iter.next();
PathBasedCacheDescriptor directive = iter.next();
builder.addElements(
ListPathBasedCacheEntriesElementProto.newBuilder().
setId(entry.getEntryId()).
setPath(entry.getDirective().getPath()).
setPool(entry.getDirective().getPool()));
prevId = entry.getEntryId();
ListPathBasedCacheDescriptorsElementProto.newBuilder().
setId(directive.getEntryId()).
setPath(directive.getPath()).
setPool(directive.getPool()));
prevId = directive.getEntryId();
}
if (prevId == 0) {
builder.setHasMore(false);
} else {
iter = server.listPathBasedCacheEntries(prevId, request.getPool(),
request.getPath());
iter = server.listPathBasedCacheDescriptors(prevId,
request.hasPool() ? request.getPool() : null,
request.hasPath() ? request.getPath() : null);
builder.setHasMore(iter.hasNext());
}
return builder.build();

View File

@ -39,15 +39,17 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.EmptyPathError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PathAlreadyExistsInPoolError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.UnexpectedAddPathBasedCacheDirectiveException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.RemovePermissionDeniedException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.UnexpectedRemovePathBasedCacheEntryException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.InvalidIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.NoSuchIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.RemovePermissionDeniedException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.UnexpectedRemovePathBasedCacheDescriptorException;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
@ -108,10 +110,10 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetSnapshottableDirListingResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesElementProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheEntriesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsElementProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListPathBasedCacheDescriptorsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseElementProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCachePoolsResponseProto;
@ -121,9 +123,9 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ModifyCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RecoverLeaseRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RefreshNodesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntriesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheEntryErrorProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemovePathBasedCacheDescriptorErrorProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RemoveCachePoolRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.Rename2RequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.RenameRequestProto;
@ -145,6 +147,7 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineRequestProto;
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.namenode.CacheManager;
import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException;
import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
import org.apache.hadoop.io.EnumSetWritable;
@ -1027,13 +1030,19 @@ private static IOException addPathBasedCacheDirectivesError(long code,
} else if (code == AddPathBasedCacheDirectiveErrorProto.
INVALID_POOL_NAME_ERROR_VALUE) {
return new InvalidPoolNameError(directive);
} else if (code == AddPathBasedCacheDirectiveErrorProto.
ADD_PERMISSION_DENIED_ERROR_VALUE) {
return new PoolWritePermissionDeniedError(directive);
} else if (code == AddPathBasedCacheDirectiveErrorProto.
PATH_ALREADY_EXISTS_IN_POOL_ERROR_VALUE) {
return new PathAlreadyExistsInPoolError(directive);
} else {
return new UnexpectedAddPathBasedCacheDirectiveException(directive);
}
}
@Override
public List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
public List<Fallible<PathBasedCacheDescriptor>> addPathBasedCacheDirectives(
List<PathBasedCacheDirective> directives) throws IOException {
try {
AddPathBasedCacheDirectivesRequestProto.Builder builder =
@ -1047,17 +1056,18 @@ public List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
AddPathBasedCacheDirectivesResponseProto result =
rpcProxy.addPathBasedCacheDirectives(null, builder.build());
int resultsCount = result.getResultsCount();
ArrayList<Fallible<PathBasedCacheEntry>> results =
new ArrayList<Fallible<PathBasedCacheEntry>>(resultsCount);
ArrayList<Fallible<PathBasedCacheDescriptor>> results =
new ArrayList<Fallible<PathBasedCacheDescriptor>>(resultsCount);
for (int i = 0; i < resultsCount; i++) {
PathBasedCacheDirective directive = directives.get(i);
long code = result.getResults(i);
if (code > 0) {
results.add(new Fallible<PathBasedCacheEntry>(
new PathBasedCacheEntry(code, directive)));
results.add(new Fallible<PathBasedCacheDescriptor>(
new PathBasedCacheDescriptor(code,
directive.getPath(), directive.getPool())));
} else {
results.add(new Fallible<PathBasedCacheEntry>(
addPathBasedCacheDirectivesError(code, directive)));
results.add(new Fallible<PathBasedCacheDescriptor>(
addPathBasedCacheDirectivesError(code, directive)));
}
}
return results;
@ -1066,32 +1076,32 @@ public List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
}
}
private static IOException removePathBasedCacheEntriesError(long code, long id) {
if (code == RemovePathBasedCacheEntryErrorProto.
private static IOException removePathBasedCacheDescriptorsError(long code, long id) {
if (code == RemovePathBasedCacheDescriptorErrorProto.
INVALID_CACHED_PATH_ID_ERROR_VALUE) {
return new InvalidIdException(id);
} else if (code == RemovePathBasedCacheEntryErrorProto.
} else if (code == RemovePathBasedCacheDescriptorErrorProto.
NO_SUCH_CACHED_PATH_ID_ERROR_VALUE) {
return new NoSuchIdException(id);
} else if (code == RemovePathBasedCacheEntryErrorProto.
} else if (code == RemovePathBasedCacheDescriptorErrorProto.
REMOVE_PERMISSION_DENIED_ERROR_VALUE) {
return new RemovePermissionDeniedException(id);
} else {
return new UnexpectedRemovePathBasedCacheEntryException(id);
return new UnexpectedRemovePathBasedCacheDescriptorException(id);
}
}
@Override
public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
public List<Fallible<Long>> removePathBasedCacheDescriptors(List<Long> ids)
throws IOException {
try {
RemovePathBasedCacheEntriesRequestProto.Builder builder =
RemovePathBasedCacheEntriesRequestProto.newBuilder();
RemovePathBasedCacheDescriptorsRequestProto.Builder builder =
RemovePathBasedCacheDescriptorsRequestProto.newBuilder();
for (Long id : ids) {
builder.addElements(id);
}
RemovePathBasedCacheEntriesResponseProto result =
rpcProxy.removePathBasedCacheEntries(null, builder.build());
RemovePathBasedCacheDescriptorsResponseProto result =
rpcProxy.removePathBasedCacheDescriptors(null, builder.build());
int resultsCount = result.getResultsCount();
ArrayList<Fallible<Long>> results =
new ArrayList<Fallible<Long>>(resultsCount);
@ -1101,7 +1111,7 @@ public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
results.add(new Fallible<Long>(code));
} else {
results.add(new Fallible<Long>(
removePathBasedCacheEntriesError(code, ids.get(i))));
removePathBasedCacheDescriptorsError(code, ids.get(i))));
}
}
return results;
@ -1111,20 +1121,19 @@ public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
}
private static class BatchedPathBasedCacheEntries
implements BatchedEntries<PathBasedCacheEntry> {
private ListPathBasedCacheEntriesResponseProto response;
implements BatchedEntries<PathBasedCacheDescriptor> {
private ListPathBasedCacheDescriptorsResponseProto response;
BatchedPathBasedCacheEntries(ListPathBasedCacheEntriesResponseProto response) {
BatchedPathBasedCacheEntries(ListPathBasedCacheDescriptorsResponseProto response) {
this.response = response;
}
@Override
public PathBasedCacheEntry get(int i) {
ListPathBasedCacheEntriesElementProto elementProto =
public PathBasedCacheDescriptor get(int i) {
ListPathBasedCacheDescriptorsElementProto elementProto =
response.getElements(i);
return new PathBasedCacheEntry(elementProto.getId(),
new PathBasedCacheDirective(elementProto.getPath(),
elementProto.getPool()));
return new PathBasedCacheDescriptor(elementProto.getId(),
elementProto.getPath(), elementProto.getPool());
}
@Override
@ -1139,7 +1148,7 @@ public boolean hasMore() {
}
private class PathBasedCacheEntriesIterator
extends BatchedRemoteIterator<Long, PathBasedCacheEntry> {
extends BatchedRemoteIterator<Long, PathBasedCacheDescriptor> {
private final String pool;
private final String path;
@ -1150,20 +1159,20 @@ public PathBasedCacheEntriesIterator(long prevKey, String pool, String path) {
}
@Override
public BatchedEntries<PathBasedCacheEntry> makeRequest(
public BatchedEntries<PathBasedCacheDescriptor> makeRequest(
Long nextKey) throws IOException {
ListPathBasedCacheEntriesResponseProto response;
ListPathBasedCacheDescriptorsResponseProto response;
try {
ListPathBasedCacheEntriesRequestProto.Builder builder =
ListPathBasedCacheEntriesRequestProto.newBuilder().setPrevId(nextKey);
ListPathBasedCacheDescriptorsRequestProto.Builder builder =
ListPathBasedCacheDescriptorsRequestProto.newBuilder().setPrevId(nextKey);
if (pool != null) {
builder.setPool(pool);
}
if (path != null) {
builder.setPath(path);
}
ListPathBasedCacheEntriesRequestProto req = builder.build();
response = rpcProxy.listPathBasedCacheEntries(null, req);
ListPathBasedCacheDescriptorsRequestProto req = builder.build();
response = rpcProxy.listPathBasedCacheDescriptors(null, req);
} catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e);
}
@ -1171,13 +1180,13 @@ public BatchedEntries<PathBasedCacheEntry> makeRequest(
}
@Override
public Long elementToPrevKey(PathBasedCacheEntry element) {
public Long elementToPrevKey(PathBasedCacheDescriptor element) {
return element.getEntryId();
}
}
@Override
public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(long prevId,
public RemoteIterator<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(long prevId,
String pool, String path) throws IOException {
return new PathBasedCacheEntriesIterator(prevId, pool, path);
}

View File

@ -19,8 +19,8 @@
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DESCRIPTORS_NUM_RESPONSES;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DESCRIPTORS_NUM_RESPONSES_DEFAULT;
import java.io.IOException;
import java.util.ArrayList;
@ -38,14 +38,15 @@
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPoolNameError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.UnexpectedAddPathBasedCacheDirectiveException;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.UnexpectedRemovePathBasedCacheEntryException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.RemovePermissionDeniedException;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.InvalidIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.NoSuchIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.UnexpectedRemovePathBasedCacheDescriptorException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.RemovePermissionDeniedException;
import org.apache.hadoop.util.Fallible;
/**
@ -57,18 +58,12 @@ public final class CacheManager {
/**
* Cache entries, sorted by ID.
*
* listPathBasedCacheEntries relies on the ordering of elements in this map
* listPathBasedCacheDescriptors relies on the ordering of elements in this map
* to track what has already been listed by the client.
*/
private final TreeMap<Long, PathBasedCacheEntry> entriesById =
new TreeMap<Long, PathBasedCacheEntry>();
/**
* Cache entries, sorted by directive.
*/
private final TreeMap<PathBasedCacheDirective, PathBasedCacheEntry> entriesByDirective =
new TreeMap<PathBasedCacheDirective, PathBasedCacheEntry>();
/**
* Cache entries, sorted by path
*/
@ -94,7 +89,7 @@ public final class CacheManager {
/**
* Maximum number of cache pool directives to list in one operation.
*/
private final int maxListCacheDirectivesResponses;
private final int maxListCacheDescriptorsResponses;
final private FSNamesystem namesystem;
final private FSDirectory dir;
@ -107,14 +102,13 @@ public final class CacheManager {
maxListCachePoolsResponses = conf.getInt(
DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES,
DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES_DEFAULT);
maxListCacheDirectivesResponses = conf.getInt(
DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES,
DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES_DEFAULT);
maxListCacheDescriptorsResponses = conf.getInt(
DFS_NAMENODE_LIST_CACHE_DESCRIPTORS_NUM_RESPONSES,
DFS_NAMENODE_LIST_CACHE_DESCRIPTORS_NUM_RESPONSES_DEFAULT);
}
synchronized void clear() {
entriesById.clear();
entriesByDirective.clear();
entriesByPath.clear();
cachePools.clear();
nextEntryId = 1;
@ -127,17 +121,32 @@ synchronized long getNextEntryId() throws IOException {
return nextEntryId++;
}
private synchronized Fallible<PathBasedCacheEntry> addDirective(
private synchronized PathBasedCacheEntry
findEntry(PathBasedCacheDirective directive) {
List<PathBasedCacheEntry> existing =
entriesByPath.get(directive.getPath());
if (existing == null) {
return null;
}
for (PathBasedCacheEntry entry : existing) {
if (entry.getPool().getName().equals(directive.getPool())) {
return entry;
}
}
return null;
}
private synchronized Fallible<PathBasedCacheDescriptor> addDirective(
PathBasedCacheDirective directive, FSPermissionChecker pc) {
CachePool pool = cachePools.get(directive.getPool());
if (pool == null) {
LOG.info("addDirective " + directive + ": pool not found.");
return new Fallible<PathBasedCacheEntry>(
return new Fallible<PathBasedCacheDescriptor>(
new InvalidPoolNameError(directive));
}
if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
LOG.info("addDirective " + directive + ": write permission denied.");
return new Fallible<PathBasedCacheEntry>(
return new Fallible<PathBasedCacheDescriptor>(
new PoolWritePermissionDeniedError(directive));
}
try {
@ -145,22 +154,24 @@ private synchronized Fallible<PathBasedCacheEntry> addDirective(
} catch (IOException ioe) {
LOG.info("addDirective " + directive + ": validation failed: "
+ ioe.getClass().getName() + ": " + ioe.getMessage());
return new Fallible<PathBasedCacheEntry>(ioe);
return new Fallible<PathBasedCacheDescriptor>(ioe);
}
// Check if we already have this entry.
PathBasedCacheEntry existing = entriesByDirective.get(directive);
PathBasedCacheEntry existing = findEntry(directive);
if (existing != null) {
// Entry already exists: return existing entry.
LOG.info("addDirective " + directive + ": there is an " +
"existing directive " + existing);
return new Fallible<PathBasedCacheEntry>(existing);
"existing directive " + existing + " in this pool.");
return new Fallible<PathBasedCacheDescriptor>(
existing.getDescriptor());
}
// Add a new entry with the next available ID.
PathBasedCacheEntry entry;
try {
entry = new PathBasedCacheEntry(getNextEntryId(), directive);
entry = new PathBasedCacheEntry(getNextEntryId(),
directive.getPath(), pool);
} catch (IOException ioe) {
return new Fallible<PathBasedCacheEntry>(
return new Fallible<PathBasedCacheDescriptor>(
new UnexpectedAddPathBasedCacheDirectiveException(directive));
}
LOG.info("addDirective " + directive + ": added cache directive "
@ -168,7 +179,6 @@ private synchronized Fallible<PathBasedCacheEntry> addDirective(
// Success!
// First, add it to the various maps
entriesByDirective.put(directive, entry);
entriesById.put(entry.getEntryId(), entry);
String path = directive.getPath();
List<PathBasedCacheEntry> entryList = entriesByPath.get(path);
@ -181,7 +191,7 @@ private synchronized Fallible<PathBasedCacheEntry> addDirective(
// Next, set the path as cached in the namesystem
try {
INode node = dir.getINode(directive.getPath());
if (node.isFile()) {
if (node != null && node.isFile()) {
INodeFile file = node.asFile();
// TODO: adjustable cache replication factor
namesystem.setCacheReplicationInt(directive.getPath(),
@ -192,96 +202,90 @@ private synchronized Fallible<PathBasedCacheEntry> addDirective(
} catch (IOException ioe) {
LOG.info("addDirective " + directive +": failed to cache file: " +
ioe.getClass().getName() +": " + ioe.getMessage());
return new Fallible<PathBasedCacheEntry>(ioe);
return new Fallible<PathBasedCacheDescriptor>(ioe);
}
return new Fallible<PathBasedCacheEntry>(entry);
return new Fallible<PathBasedCacheDescriptor>(
entry.getDescriptor());
}
public synchronized List<Fallible<PathBasedCacheEntry>> addDirectives(
public synchronized List<Fallible<PathBasedCacheDescriptor>> addDirectives(
List<PathBasedCacheDirective> directives, FSPermissionChecker pc) {
ArrayList<Fallible<PathBasedCacheEntry>> results =
new ArrayList<Fallible<PathBasedCacheEntry>>(directives.size());
ArrayList<Fallible<PathBasedCacheDescriptor>> results =
new ArrayList<Fallible<PathBasedCacheDescriptor>>(directives.size());
for (PathBasedCacheDirective directive: directives) {
results.add(addDirective(directive, pc));
}
return results;
}
private synchronized Fallible<Long> removeEntry(long entryId,
private synchronized Fallible<Long> removeDescriptor(long id,
FSPermissionChecker pc) {
// Check for invalid IDs.
if (entryId <= 0) {
LOG.info("removeEntry " + entryId + ": invalid non-positive entry ID.");
return new Fallible<Long>(new InvalidIdException(entryId));
if (id <= 0) {
LOG.info("removeDescriptor " + id + ": invalid non-positive " +
"descriptor ID.");
return new Fallible<Long>(new InvalidIdException(id));
}
// Find the entry.
PathBasedCacheEntry existing = entriesById.get(entryId);
PathBasedCacheEntry existing = entriesById.get(id);
if (existing == null) {
LOG.info("removeEntry " + entryId + ": entry not found.");
return new Fallible<Long>(new NoSuchIdException(entryId));
LOG.info("removeDescriptor " + id + ": entry not found.");
return new Fallible<Long>(new NoSuchIdException(id));
}
CachePool pool = cachePools.get(existing.getDirective().getPool());
CachePool pool = cachePools.get(existing.getDescriptor().getPool());
if (pool == null) {
LOG.info("removeEntry " + entryId + ": pool not found for directive " +
existing.getDirective());
LOG.info("removeDescriptor " + id + ": pool not found for directive " +
existing.getDescriptor());
return new Fallible<Long>(
new UnexpectedRemovePathBasedCacheEntryException(entryId));
new UnexpectedRemovePathBasedCacheDescriptorException(id));
}
if ((pc != null) && (!pc.checkPermission(pool, FsAction.WRITE))) {
LOG.info("removeEntry " + entryId + ": write permission denied to " +
LOG.info("removeDescriptor " + id + ": write permission denied to " +
"pool " + pool + " for entry " + existing);
return new Fallible<Long>(
new RemovePermissionDeniedException(entryId));
new RemovePermissionDeniedException(id));
}
// Remove the corresponding entry in entriesByDirective.
if (entriesByDirective.remove(existing.getDirective()) == null) {
LOG.warn("removeEntry " + entryId + ": failed to find existing entry " +
existing + " in entriesByDirective");
return new Fallible<Long>(
new UnexpectedRemovePathBasedCacheEntryException(entryId));
}
// Remove the corresponding entry in entriesByPath.
String path = existing.getDirective().getPath();
String path = existing.getDescriptor().getPath();
List<PathBasedCacheEntry> entries = entriesByPath.get(path);
if (entries == null || !entries.remove(existing)) {
return new Fallible<Long>(
new UnexpectedRemovePathBasedCacheEntryException(entryId));
new UnexpectedRemovePathBasedCacheDescriptorException(id));
}
if (entries.size() == 0) {
entriesByPath.remove(path);
}
entriesById.remove(entryId);
entriesById.remove(id);
// Set the path as uncached in the namesystem
try {
INode node = dir.getINode(existing.getDirective().getPath());
if (node.isFile()) {
namesystem.setCacheReplicationInt(existing.getDirective().getPath(),
INode node = dir.getINode(existing.getDescriptor().getPath());
if (node != null && node.isFile()) {
namesystem.setCacheReplicationInt(existing.getDescriptor().getPath(),
(short) 0);
}
} catch (IOException e) {
LOG.warn("removeEntry " + entryId + ": failure while setting cache"
LOG.warn("removeDescriptor " + id + ": failure while setting cache"
+ " replication factor", e);
return new Fallible<Long>(e);
}
LOG.info("removeEntry successful for PathCacheEntry id " + entryId);
return new Fallible<Long>(entryId);
LOG.info("removeDescriptor successful for PathCacheEntry id " + id);
return new Fallible<Long>(id);
}
public synchronized List<Fallible<Long>> removeEntries(List<Long> entryIds,
public synchronized List<Fallible<Long>> removeDescriptors(List<Long> ids,
FSPermissionChecker pc) {
ArrayList<Fallible<Long>> results =
new ArrayList<Fallible<Long>>(entryIds.size());
for (Long entryId : entryIds) {
results.add(removeEntry(entryId, pc));
new ArrayList<Fallible<Long>>(ids.size());
for (Long id : ids) {
results.add(removeDescriptor(id, pc));
}
return results;
}
public synchronized BatchedListEntries<PathBasedCacheEntry>
listPathBasedCacheEntries(long prevId, String filterPool,
public synchronized BatchedListEntries<PathBasedCacheDescriptor>
listPathBasedCacheDescriptors(long prevId, String filterPool,
String filterPath, FSPermissionChecker pc) throws IOException {
final int NUM_PRE_ALLOCATED_ENTRIES = 16;
if (filterPath != null) {
@ -289,16 +293,16 @@ public synchronized List<Fallible<Long>> removeEntries(List<Long> entryIds,
throw new IOException("invalid path name '" + filterPath + "'");
}
}
ArrayList<PathBasedCacheEntry> replies =
new ArrayList<PathBasedCacheEntry>(NUM_PRE_ALLOCATED_ENTRIES);
ArrayList<PathBasedCacheDescriptor> replies =
new ArrayList<PathBasedCacheDescriptor>(NUM_PRE_ALLOCATED_ENTRIES);
int numReplies = 0;
SortedMap<Long, PathBasedCacheEntry> tailMap = entriesById.tailMap(prevId + 1);
for (Entry<Long, PathBasedCacheEntry> cur : tailMap.entrySet()) {
if (numReplies >= maxListCacheDirectivesResponses) {
return new BatchedListEntries<PathBasedCacheEntry>(replies, true);
if (numReplies >= maxListCacheDescriptorsResponses) {
return new BatchedListEntries<PathBasedCacheDescriptor>(replies, true);
}
PathBasedCacheEntry curEntry = cur.getValue();
PathBasedCacheDirective directive = cur.getValue().getDirective();
PathBasedCacheDirective directive = cur.getValue().getDescriptor();
if (filterPool != null &&
!directive.getPool().equals(filterPool)) {
continue;
@ -307,17 +311,12 @@ public synchronized List<Fallible<Long>> removeEntries(List<Long> entryIds,
!directive.getPath().equals(filterPath)) {
continue;
}
CachePool pool = cachePools.get(curEntry.getDirective().getPool());
if (pool == null) {
LOG.error("invalid pool for PathBasedCacheEntry " + curEntry);
continue;
}
if (pc.checkPermission(pool, FsAction.READ)) {
replies.add(cur.getValue());
if (pc.checkPermission(curEntry.getPool(), FsAction.READ)) {
replies.add(cur.getValue().getDescriptor());
numReplies++;
}
}
return new BatchedListEntries<PathBasedCacheEntry>(replies, false);
return new BatchedListEntries<PathBasedCacheDescriptor>(replies, false);
}
/**
@ -409,12 +408,12 @@ public synchronized void removeCachePool(String poolName)
// Remove entries using this pool
// TODO: could optimize this somewhat to avoid the need to iterate
// over all entries in entriesByDirective
Iterator<Entry<PathBasedCacheDirective, PathBasedCacheEntry>> iter =
entriesByDirective.entrySet().iterator();
// over all entries in entriesById
Iterator<Entry<Long, PathBasedCacheEntry>> iter =
entriesById.entrySet().iterator();
while (iter.hasNext()) {
Entry<PathBasedCacheDirective, PathBasedCacheEntry> entry = iter.next();
if (entry.getKey().getPool().equals(poolName)) {
Entry<Long, PathBasedCacheEntry> entry = iter.next();
if (entry.getValue().getPool() == pool) {
entriesById.remove(entry.getValue().getEntryId());
iter.remove();
}

View File

@ -144,7 +144,7 @@
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@ -6801,17 +6801,17 @@ void removeSnapshottableDirs(List<INodeDirectorySnapshottable> toRemove) {
}
@SuppressWarnings("unchecked")
List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
List<Fallible<PathBasedCacheDescriptor>> addPathBasedCacheDirectives(
List<PathBasedCacheDirective> directives) throws IOException {
CacheEntryWithPayload retryCacheEntry =
RetryCache.waitForCompletion(retryCache, null);
if (retryCacheEntry != null && retryCacheEntry.isSuccess()) {
return (List<Fallible<PathBasedCacheEntry>>) retryCacheEntry.getPayload();
return (List<Fallible<PathBasedCacheDescriptor>>) retryCacheEntry.getPayload();
}
final FSPermissionChecker pc = isPermissionEnabled ?
getPermissionChecker() : null;
boolean success = false;
List<Fallible<PathBasedCacheEntry>> results = null;
List<Fallible<PathBasedCacheDescriptor>> results = null;
checkOperation(OperationCategory.WRITE);
writeLock();
try {
@ -6837,7 +6837,7 @@ List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
}
@SuppressWarnings("unchecked")
List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids) throws IOException {
List<Fallible<Long>> removePathBasedCacheDescriptors(List<Long> ids) throws IOException {
CacheEntryWithPayload retryCacheEntry =
RetryCache.waitForCompletion(retryCache, null);
if (retryCacheEntry != null && retryCacheEntry.isSuccess()) {
@ -6855,13 +6855,13 @@ List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids) throws IOExcept
throw new SafeModeException(
"Cannot remove PathBasedCache directives", safeMode);
}
results = cacheManager.removeEntries(ids, pc);
results = cacheManager.removeDescriptors(ids, pc);
//getEditLog().logRemovePathBasedCacheEntries(results); FIXME: HDFS-5119
success = true;
} finally {
writeUnlock();
if (isAuditEnabled() && isExternalInvocation()) {
logAuditEvent(success, "removePathBasedCacheEntries", null, null, null);
logAuditEvent(success, "removePathBasedCacheDescriptors", null, null, null);
}
RetryCache.setState(retryCacheEntry, success, results);
}
@ -6869,22 +6869,22 @@ List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids) throws IOExcept
return results;
}
BatchedListEntries<PathBasedCacheEntry> listPathBasedCacheEntries(long startId,
BatchedListEntries<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(long startId,
String pool, String path) throws IOException {
final FSPermissionChecker pc = isPermissionEnabled ?
getPermissionChecker() : null;
BatchedListEntries<PathBasedCacheEntry> results;
BatchedListEntries<PathBasedCacheDescriptor> results;
checkOperation(OperationCategory.READ);
readLock();
boolean success = false;
try {
checkOperation(OperationCategory.READ);
results = cacheManager.listPathBasedCacheEntries(startId, pool, path, pc);
results = cacheManager.listPathBasedCacheDescriptors(startId, pool, path, pc);
success = true;
} finally {
readUnlock();
if (isAuditEnabled() && isExternalInvocation()) {
logAuditEvent(success, "listPathBasedCacheEntries", null, null, null);
logAuditEvent(success, "listPathBasedCacheDescriptors", null, null, null);
}
}
return results;

View File

@ -61,7 +61,7 @@
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
@ -1213,19 +1213,19 @@ public SnapshotDiffReport getSnapshotDiffReport(String snapshotRoot,
}
@Override
public List<Fallible<PathBasedCacheEntry>> addPathBasedCacheDirectives(
public List<Fallible<PathBasedCacheDescriptor>> addPathBasedCacheDirectives(
List<PathBasedCacheDirective> paths) throws IOException {
return namesystem.addPathBasedCacheDirectives(paths);
}
@Override
public List<Fallible<Long>> removePathBasedCacheEntries(List<Long> ids)
public List<Fallible<Long>> removePathBasedCacheDescriptors(List<Long> ids)
throws IOException {
return namesystem.removePathBasedCacheEntries(ids);
return namesystem.removePathBasedCacheDescriptors(ids);
}
private class ServerSidePathBasedCacheEntriesIterator
extends BatchedRemoteIterator<Long, PathBasedCacheEntry> {
extends BatchedRemoteIterator<Long, PathBasedCacheDescriptor> {
private final String pool;
@ -1239,19 +1239,19 @@ public ServerSidePathBasedCacheEntriesIterator(Long firstKey, String pool,
}
@Override
public BatchedEntries<PathBasedCacheEntry> makeRequest(
public BatchedEntries<PathBasedCacheDescriptor> makeRequest(
Long nextKey) throws IOException {
return namesystem.listPathBasedCacheEntries(nextKey, pool, path);
return namesystem.listPathBasedCacheDescriptors(nextKey, pool, path);
}
@Override
public Long elementToPrevKey(PathBasedCacheEntry entry) {
public Long elementToPrevKey(PathBasedCacheDescriptor entry) {
return entry.getEntryId();
}
}
@Override
public RemoteIterator<PathBasedCacheEntry> listPathBasedCacheEntries(long prevId,
public RemoteIterator<PathBasedCacheDescriptor> listPathBasedCacheDescriptors(long prevId,
String pool, String path) throws IOException {
return new ServerSidePathBasedCacheEntriesIterator(prevId, pool, path);
}

View File

@ -28,7 +28,7 @@
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.tools.TableListing.Justification;
import org.apache.hadoop.util.Fallible;
import org.apache.hadoop.util.StringUtils;
@ -100,10 +100,10 @@ public int run(List<String> args) throws IOException {
new LinkedList<PathBasedCacheDirective>();
PathBasedCacheDirective directive = new PathBasedCacheDirective(path, poolName);
directives.add(directive);
List<Fallible<PathBasedCacheEntry>> results =
List<Fallible<PathBasedCacheDescriptor>> results =
dfs.addPathBasedCacheDirective(directives);
try {
PathBasedCacheEntry entry = results.get(0).get();
PathBasedCacheDescriptor entry = results.get(0).get();
System.out.println("Added PathBasedCache entry " + entry.getEntryId());
return 0;
} catch (IOException e) {
@ -155,7 +155,7 @@ public int run(List<String> args) throws IOException {
DistributedFileSystem dfs = getDFS();
List<Long> ids = new LinkedList<Long>();
ids.add(id);
List<Fallible<Long>> results = dfs.removePathBasedCacheEntries(ids);
List<Fallible<Long>> results = dfs.removePathBasedCacheDescriptors(ids);
try {
Long resultId = results.get(0).get();
System.out.println("Removed PathBasedCache entry " + resultId);
@ -208,15 +208,13 @@ public int run(List<String> args) throws IOException {
addField("PATH", Justification.LEFT).
build();
DistributedFileSystem dfs = getDFS();
RemoteIterator<PathBasedCacheEntry> iter =
dfs.listPathBasedCacheEntries(poolFilter, pathFilter);
RemoteIterator<PathBasedCacheDescriptor> iter =
dfs.listPathBasedCacheDescriptors(poolFilter, pathFilter);
int numEntries = 0;
while (iter.hasNext()) {
PathBasedCacheEntry entry = iter.next();
PathBasedCacheDescriptor entry = iter.next();
String row[] = new String[] {
"" + entry.getEntryId(),
entry.getDirective().getPool(),
entry.getDirective().getPath(),
"" + entry.getEntryId(), entry.getPool(), entry.getPath(),
};
tableListing.addRow(row);
numEntries++;

View File

@ -382,37 +382,38 @@ enum AddPathBasedCacheDirectiveErrorProto {
INVALID_PATH_NAME_ERROR = -3;
INVALID_POOL_NAME_ERROR = -4;
ADD_PERMISSION_DENIED_ERROR = -5;
PATH_ALREADY_EXISTS_IN_POOL_ERROR = -6;
}
message RemovePathBasedCacheEntriesRequestProto {
message RemovePathBasedCacheDescriptorsRequestProto {
repeated int64 elements = 1 [packed=true];
}
message RemovePathBasedCacheEntriesResponseProto {
message RemovePathBasedCacheDescriptorsResponseProto {
repeated int64 results = 1 [packed=true];
}
enum RemovePathBasedCacheEntryErrorProto {
enum RemovePathBasedCacheDescriptorErrorProto {
UNEXPECTED_REMOVE_ERROR = -1;
INVALID_CACHED_PATH_ID_ERROR = -2;
NO_SUCH_CACHED_PATH_ID_ERROR = -3;
REMOVE_PERMISSION_DENIED_ERROR = -4;
}
message ListPathBasedCacheEntriesRequestProto {
message ListPathBasedCacheDescriptorsRequestProto {
required int64 prevId = 1;
optional string pool = 2;
optional string path = 3;
}
message ListPathBasedCacheEntriesElementProto {
message ListPathBasedCacheDescriptorsElementProto {
required int64 id = 1;
required string pool = 2;
required string path = 3;
}
message ListPathBasedCacheEntriesResponseProto {
repeated ListPathBasedCacheEntriesElementProto elements = 1;
message ListPathBasedCacheDescriptorsResponseProto {
repeated ListPathBasedCacheDescriptorsElementProto elements = 1;
required bool hasMore = 2;
}
@ -645,10 +646,10 @@ service ClientNamenodeProtocol {
rpc getFileInfo(GetFileInfoRequestProto) returns(GetFileInfoResponseProto);
rpc addPathBasedCacheDirectives(AddPathBasedCacheDirectivesRequestProto)
returns (AddPathBasedCacheDirectivesResponseProto);
rpc removePathBasedCacheEntries(RemovePathBasedCacheEntriesRequestProto)
returns (RemovePathBasedCacheEntriesResponseProto);
rpc listPathBasedCacheEntries(ListPathBasedCacheEntriesRequestProto)
returns (ListPathBasedCacheEntriesResponseProto);
rpc removePathBasedCacheDescriptors(RemovePathBasedCacheDescriptorsRequestProto)
returns (RemovePathBasedCacheDescriptorsResponseProto);
rpc listPathBasedCacheDescriptors(ListPathBasedCacheDescriptorsRequestProto)
returns (ListPathBasedCacheDescriptorsResponseProto);
rpc addCachePool(AddCachePoolRequestProto)
returns(AddCachePoolResponseProto);
rpc modifyCachePool(ModifyCachePoolRequestProto)

View File

@ -37,7 +37,7 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.io.nativeio.NativeIO;
@ -151,12 +151,11 @@ public void testCachePaths() throws Exception {
List<PathBasedCacheDirective> toAdd =
new ArrayList<PathBasedCacheDirective>();
toAdd.add(new PathBasedCacheDirective(paths.get(i), pool));
List<Fallible<PathBasedCacheEntry>> fallibles =
List<Fallible<PathBasedCacheDescriptor>> fallibles =
nnRpc.addPathBasedCacheDirectives(toAdd);
assertEquals("Unexpected number of fallibles",
1, fallibles.size());
PathBasedCacheEntry entry = fallibles.get(0).get();
PathBasedCacheDirective directive = entry.getDirective();
PathBasedCacheDescriptor directive = fallibles.get(0).get();
assertEquals("Directive does not match requested path", paths.get(i),
directive.getPath());
assertEquals("Directive does not match requested pool", pool,
@ -165,13 +164,13 @@ public void testCachePaths() throws Exception {
waitForExpectedNumCachedBlocks(expected);
}
// Uncache and check each path in sequence
RemoteIterator<PathBasedCacheEntry> entries =
nnRpc.listPathBasedCacheEntries(0, null, null);
RemoteIterator<PathBasedCacheDescriptor> entries =
nnRpc.listPathBasedCacheDescriptors(0, null, null);
for (int i=0; i<numFiles; i++) {
PathBasedCacheEntry entry = entries.next();
PathBasedCacheDescriptor entry = entries.next();
List<Long> toRemove = new ArrayList<Long>();
toRemove.add(entry.getEntryId());
List<Fallible<Long>> fallibles = nnRpc.removePathBasedCacheEntries(toRemove);
List<Fallible<Long>> fallibles = nnRpc.removePathBasedCacheDescriptors(toRemove);
assertEquals("Unexpected number of fallibles", 1, fallibles.size());
Long l = fallibles.get(0).get();
assertEquals("Removed entryId does not match requested",

View File

@ -39,10 +39,10 @@
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.InvalidPathNameError;
import org.apache.hadoop.hdfs.protocol.AddPathBasedCacheDirectiveException.PoolWritePermissionDeniedError;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.InvalidIdException;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.InvalidIdException;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheEntry;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheEntryException.NoSuchIdException;
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDescriptor;
import org.apache.hadoop.hdfs.protocol.RemovePathBasedCacheDescriptorException.NoSuchIdException;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
@ -94,7 +94,7 @@ public void testCreateAndModifyPools() throws Exception {
MiniDFSCluster cluster = null;
// set low limits here for testing purposes
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES, 2);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES, 2);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DESCRIPTORS_NUM_RESPONSES, 2);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
NamenodeProtocols proto = cluster.getNameNodeRpc();
@ -132,17 +132,14 @@ public void testCreateAndModifyPools() throws Exception {
}
private static void validateListAll(
RemoteIterator<PathBasedCacheEntry> iter,
RemoteIterator<PathBasedCacheDescriptor> iter,
long id0, long id1, long id2) throws Exception {
Assert.assertEquals(new PathBasedCacheEntry(id0,
new PathBasedCacheDirective("/alpha", "pool1")),
iter.next());
Assert.assertEquals(new PathBasedCacheEntry(id1,
new PathBasedCacheDirective("/beta", "pool2")),
iter.next());
Assert.assertEquals(new PathBasedCacheEntry(id2,
new PathBasedCacheDirective("/gamma", "pool1")),
iter.next());
Assert.assertEquals(new PathBasedCacheDescriptor(id0,
"/alpha", "pool1"), iter.next());
Assert.assertEquals(new PathBasedCacheDescriptor(id1,
"/beta", "pool2"), iter.next());
Assert.assertEquals(new PathBasedCacheDescriptor(id2,
"/gamma", "pool1"), iter.next());
Assert.assertFalse(iter.hasNext());
}
@ -164,11 +161,11 @@ public void testSetAndGet() throws Exception {
proto.addCachePool(new CachePoolInfo("pool4").
setMode(new FsPermission((short)0)));
List<Fallible<PathBasedCacheEntry>> addResults1 =
List<Fallible<PathBasedCacheDescriptor>> addResults1 =
unprivilegedUser.doAs(new PrivilegedExceptionAction<
List<Fallible<PathBasedCacheEntry>>>() {
List<Fallible<PathBasedCacheDescriptor>>>() {
@Override
public List<Fallible<PathBasedCacheEntry>> run() throws IOException {
public List<Fallible<PathBasedCacheDescriptor>> run() throws IOException {
return proto.addPathBasedCacheDirectives(Arrays.asList(
new PathBasedCacheDirective[] {
new PathBasedCacheDirective("/alpha", "pool1"),
@ -212,7 +209,7 @@ public List<Fallible<PathBasedCacheEntry>> run() throws IOException {
//instanceof PoolWritePermissionDeniedError);
}
List<Fallible<PathBasedCacheEntry>> addResults2 =
List<Fallible<PathBasedCacheDescriptor>> addResults2 =
proto.addPathBasedCacheDirectives(Arrays.asList(
new PathBasedCacheDirective[] {
new PathBasedCacheDirective("/alpha", "pool1"),
@ -240,20 +237,20 @@ public List<Fallible<PathBasedCacheEntry>> run() throws IOException {
long ids2[] = new long[1];
ids2[0] = addResults2.get(3).get().getEntryId();
RemoteIterator<PathBasedCacheEntry> iter =
proto.listPathBasedCacheEntries(0, null, null);
RemoteIterator<PathBasedCacheDescriptor> iter =
proto.listPathBasedCacheDescriptors(0, null, null);
validateListAll(iter, ids1[0], ids1[1], ids2[0]);
iter = proto.listPathBasedCacheEntries(0, null, null);
iter = proto.listPathBasedCacheDescriptors(0, null, null);
validateListAll(iter, ids1[0], ids1[1], ids2[0]);
iter = proto.listPathBasedCacheEntries(0, "pool3", null);
iter = proto.listPathBasedCacheDescriptors(0, "pool3", null);
Assert.assertFalse(iter.hasNext());
iter = proto.listPathBasedCacheEntries(0, "pool2", null);
iter = proto.listPathBasedCacheDescriptors(0, "pool2", null);
Assert.assertEquals(addResults1.get(1).get(),
iter.next());
Assert.assertFalse(iter.hasNext());
List<Fallible<Long>> removeResults1 =
proto.removePathBasedCacheEntries(Arrays.asList(
proto.removePathBasedCacheDescriptors(Arrays.asList(
new Long[] { ids1[1], -42L, 999999L }));
Assert.assertEquals(Long.valueOf(ids1[1]),
removeResults1.get(0).get());
@ -269,7 +266,7 @@ public List<Fallible<PathBasedCacheEntry>> run() throws IOException {
} catch (IOException ioe) {
Assert.assertTrue(ioe.getCause() instanceof NoSuchIdException);
}
iter = proto.listPathBasedCacheEntries(0, "pool2", null);
iter = proto.listPathBasedCacheDescriptors(0, "pool2", null);
Assert.assertFalse(iter.hasNext());
} finally {
if (cluster != null) { cluster.shutdown(); }