HDFS-6851. Refactor EncryptionZoneWithId and EncryptionZone. (clamb via wang)

This commit is contained in:
Andrew Wang 2014-09-16 16:29:35 -07:00
parent 02adf7185d
commit 33ce887c20
15 changed files with 73 additions and 194 deletions

View File

@ -466,6 +466,8 @@ Release 2.6.0 - UNRELEASED
HDFS-7006. Test encryption zones with KMS. (Anthony Young-Garner and tucu) HDFS-7006. Test encryption zones with KMS. (Anthony Young-Garner and tucu)
HDFS-6851. Refactor EncryptionZoneWithId and EncryptionZone. (clamb via wang)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-6690. Deduplicate xattr names in memory. (wang) HDFS-6690. Deduplicate xattr names in memory. (wang)

View File

@ -154,7 +154,6 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneIterator; import org.apache.hadoop.hdfs.protocol.EncryptionZoneIterator;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneWithId;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsBlocksMetadata; import org.apache.hadoop.hdfs.protocol.HdfsBlocksMetadata;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
@ -2887,8 +2886,8 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
throws IOException { throws IOException {
checkOpen(); checkOpen();
try { try {
final EncryptionZoneWithId ezi = namenode.getEZForPath(src); final EncryptionZone ez = namenode.getEZForPath(src);
return (ezi.getId() < 0) ? null : ezi; return (ez.getId() < 0) ? null : ez;
} catch (RemoteException re) { } catch (RemoteException re) {
throw re.unwrapRemoteException(AccessControlException.class, throw re.unwrapRemoteException(AccessControlException.class,
UnresolvedPathException.class); UnresolvedPathException.class);

View File

@ -1282,7 +1282,7 @@ public interface ClientProtocol {
* Get the encryption zone for a path. * Get the encryption zone for a path.
*/ */
@Idempotent @Idempotent
public EncryptionZoneWithId getEZForPath(String src) public EncryptionZone getEZForPath(String src)
throws IOException; throws IOException;
/** /**
@ -1293,7 +1293,7 @@ public interface ClientProtocol {
* @return Batch of encryption zones. * @return Batch of encryption zones.
*/ */
@Idempotent @Idempotent
public BatchedEntries<EncryptionZoneWithId> listEncryptionZones( public BatchedEntries<EncryptionZone> listEncryptionZones(
long prevId) throws IOException; long prevId) throws IOException;
/** /**

View File

@ -24,7 +24,8 @@ import org.apache.hadoop.classification.InterfaceStability;
/** /**
* A simple class for representing an encryption zone. Presently an encryption * A simple class for representing an encryption zone. Presently an encryption
* zone only has a path (the root of the encryption zone) and a key name. * zone only has a path (the root of the encryption zone), a key name, and a
* unique id. The id is used to implement batched listing of encryption zones.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
@ -32,10 +33,12 @@ public class EncryptionZone {
private final String path; private final String path;
private final String keyName; private final String keyName;
private final long id;
public EncryptionZone(String path, String keyName) { public EncryptionZone(String path, String keyName, long id) {
this.path = path; this.path = path;
this.keyName = keyName; this.keyName = keyName;
this.id = id;
} }
public String getPath() { public String getPath() {
@ -46,10 +49,14 @@ public class EncryptionZone {
return keyName; return keyName;
} }
public long getId() {
return id;
}
@Override @Override
public int hashCode() { public int hashCode() {
return new HashCodeBuilder(13, 31). return new HashCodeBuilder(13, 31).
append(path).append(keyName). append(path).append(keyName).append(id).
toHashCode(); toHashCode();
} }
@ -69,11 +76,14 @@ public class EncryptionZone {
return new EqualsBuilder(). return new EqualsBuilder().
append(path, rhs.path). append(path, rhs.path).
append(keyName, rhs.keyName). append(keyName, rhs.keyName).
append(id, rhs.id).
isEquals(); isEquals();
} }
@Override @Override
public String toString() { public String toString() {
return "EncryptionZone [path=" + path + ", keyName=" + keyName + "]"; return "EncryptionZone [path=" + path +
", keyName=" + keyName +
", id=" + id + "]";
} }
} }

View File

@ -22,7 +22,7 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.BatchedRemoteIterator;
/** /**
* EncryptionZoneIterator is a remote iterator that iterates over encryption * EncryptionZoneIterator is a remote iterator that iterates over encryption
@ -30,22 +30,24 @@ import org.apache.hadoop.fs.RemoteIterator;
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class EncryptionZoneIterator implements RemoteIterator<EncryptionZone> { public class EncryptionZoneIterator
extends BatchedRemoteIterator<Long, EncryptionZone> {
private final EncryptionZoneWithIdIterator iterator; private final ClientProtocol namenode;
public EncryptionZoneIterator(ClientProtocol namenode) { public EncryptionZoneIterator(ClientProtocol namenode) {
iterator = new EncryptionZoneWithIdIterator(namenode); super(Long.valueOf(0));
this.namenode = namenode;
} }
@Override @Override
public boolean hasNext() throws IOException { public BatchedEntries<EncryptionZone> makeRequest(Long prevId)
return iterator.hasNext(); throws IOException {
return namenode.listEncryptionZones(prevId);
} }
@Override @Override
public EncryptionZone next() throws IOException { public Long elementToPrevKey(EncryptionZone entry) {
EncryptionZoneWithId ezwi = iterator.next(); return entry.getId();
return ezwi.toEncryptionZone();
} }
} }

View File

@ -1,81 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Internal class similar to an {@link EncryptionZone} which also holds a
* unique id. Used to implement batched listing of encryption zones.
*/
@InterfaceAudience.Private
public class EncryptionZoneWithId extends EncryptionZone {
final long id;
public EncryptionZoneWithId(String path, String keyName, long id) {
super(path, keyName);
this.id = id;
}
public long getId() {
return id;
}
EncryptionZone toEncryptionZone() {
return new EncryptionZone(getPath(), getKeyName());
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 29)
.append(super.hashCode())
.append(id)
.toHashCode();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
EncryptionZoneWithId that = (EncryptionZoneWithId) o;
if (id != that.id) {
return false;
}
return true;
}
@Override
public String toString() {
return "EncryptionZoneWithId [" +
"id=" + id +
", " + super.toString() +
']';
}
}

View File

@ -1,53 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BatchedRemoteIterator;
/**
* Used on the client-side to iterate over the list of encryption zones
* stored on the namenode.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class EncryptionZoneWithIdIterator
extends BatchedRemoteIterator<Long, EncryptionZoneWithId> {
private final ClientProtocol namenode;
EncryptionZoneWithIdIterator(ClientProtocol namenode) {
super(Long.valueOf(0));
this.namenode = namenode;
}
@Override
public BatchedEntries<EncryptionZoneWithId> makeRequest(Long prevId)
throws IOException {
return namenode.listEncryptionZones(prevId);
}
@Override
public Long elementToPrevKey(EncryptionZoneWithId entry) {
return entry.getId();
}
}

View File

@ -32,7 +32,7 @@ import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks; import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneWithId; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
@ -1331,7 +1331,7 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
try { try {
GetEZForPathResponseProto.Builder builder = GetEZForPathResponseProto.Builder builder =
GetEZForPathResponseProto.newBuilder(); GetEZForPathResponseProto.newBuilder();
final EncryptionZoneWithId ret = server.getEZForPath(req.getSrc()); final EncryptionZone ret = server.getEZForPath(req.getSrc());
builder.setZone(PBHelper.convert(ret)); builder.setZone(PBHelper.convert(ret));
return builder.build(); return builder.build();
} catch (IOException e) { } catch (IOException e) {
@ -1344,7 +1344,7 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
RpcController controller, ListEncryptionZonesRequestProto req) RpcController controller, ListEncryptionZonesRequestProto req)
throws ServiceException { throws ServiceException {
try { try {
BatchedEntries<EncryptionZoneWithId> entries = server BatchedEntries<EncryptionZone> entries = server
.listEncryptionZones(req.getId()); .listEncryptionZones(req.getId());
ListEncryptionZonesResponseProto.Builder builder = ListEncryptionZonesResponseProto.Builder builder =
ListEncryptionZonesResponseProto.newBuilder(); ListEncryptionZonesResponseProto.newBuilder();

View File

@ -55,7 +55,7 @@ import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneWithId; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.RollingUpgradeAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.RollingUpgradeAction;
@ -186,7 +186,7 @@ import com.google.protobuf.ServiceException;
import static org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries; import static org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
import static org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos import static org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos
.EncryptionZoneWithIdProto; .EncryptionZoneProto;
/** /**
* This class forwards NN's ClientProtocol calls as RPC calls to the NN server * This class forwards NN's ClientProtocol calls as RPC calls to the NN server
@ -1331,7 +1331,7 @@ public class ClientNamenodeProtocolTranslatorPB implements
} }
@Override @Override
public EncryptionZoneWithId getEZForPath(String src) public EncryptionZone getEZForPath(String src)
throws IOException { throws IOException {
final GetEZForPathRequestProto.Builder builder = final GetEZForPathRequestProto.Builder builder =
GetEZForPathRequestProto.newBuilder(); GetEZForPathRequestProto.newBuilder();
@ -1347,7 +1347,7 @@ public class ClientNamenodeProtocolTranslatorPB implements
} }
@Override @Override
public BatchedEntries<EncryptionZoneWithId> listEncryptionZones(long id) public BatchedEntries<EncryptionZone> listEncryptionZones(long id)
throws IOException { throws IOException {
final ListEncryptionZonesRequestProto req = final ListEncryptionZonesRequestProto req =
ListEncryptionZonesRequestProto.newBuilder() ListEncryptionZonesRequestProto.newBuilder()
@ -1356,12 +1356,12 @@ public class ClientNamenodeProtocolTranslatorPB implements
try { try {
EncryptionZonesProtos.ListEncryptionZonesResponseProto response = EncryptionZonesProtos.ListEncryptionZonesResponseProto response =
rpcProxy.listEncryptionZones(null, req); rpcProxy.listEncryptionZones(null, req);
List<EncryptionZoneWithId> elements = List<EncryptionZone> elements =
Lists.newArrayListWithCapacity(response.getZonesCount()); Lists.newArrayListWithCapacity(response.getZonesCount());
for (EncryptionZoneWithIdProto p : response.getZonesList()) { for (EncryptionZoneProto p : response.getZonesList()) {
elements.add(PBHelper.convert(p)); elements.add(PBHelper.convert(p));
} }
return new BatchedListEntries<EncryptionZoneWithId>(elements, return new BatchedListEntries<EncryptionZone>(elements,
response.getHasMore()); response.getHasMore());
} catch (ServiceException e) { } catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e); throw ProtobufHelper.getRemoteException(e);

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.protocolPB;
import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos import static org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos
.EncryptionZoneWithIdProto; .EncryptionZoneProto;
import java.io.EOFException; import java.io.EOFException;
import java.io.IOException; import java.io.IOException;
@ -63,7 +63,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates; import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
import org.apache.hadoop.hdfs.protocol.DatanodeLocalInfo; import org.apache.hadoop.hdfs.protocol.DatanodeLocalInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneWithId; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.fs.FileEncryptionInfo;
import org.apache.hadoop.hdfs.protocol.FsAclPermission; import org.apache.hadoop.hdfs.protocol.FsAclPermission;
@ -2303,15 +2303,15 @@ public class PBHelper {
return builder.build(); return builder.build();
} }
public static EncryptionZoneWithIdProto convert(EncryptionZoneWithId zone) { public static EncryptionZoneProto convert(EncryptionZone zone) {
return EncryptionZoneWithIdProto.newBuilder() return EncryptionZoneProto.newBuilder()
.setId(zone.getId()) .setId(zone.getId())
.setKeyName(zone.getKeyName()) .setKeyName(zone.getKeyName())
.setPath(zone.getPath()).build(); .setPath(zone.getPath()).build();
} }
public static EncryptionZoneWithId convert(EncryptionZoneWithIdProto proto) { public static EncryptionZone convert(EncryptionZoneProto proto) {
return new EncryptionZoneWithId(proto.getPath(), proto.getKeyName(), return new EncryptionZone(proto.getPath(), proto.getKeyName(),
proto.getId()); proto.getId());
} }

View File

@ -31,7 +31,7 @@ import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.XAttrHelper;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneWithId; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException; import org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -53,8 +53,8 @@ public class EncryptionZoneManager {
public static Logger LOG = LoggerFactory.getLogger(EncryptionZoneManager public static Logger LOG = LoggerFactory.getLogger(EncryptionZoneManager
.class); .class);
private static final EncryptionZoneWithId NULL_EZ = private static final EncryptionZone NULL_EZ =
new EncryptionZoneWithId("", "", -1); new EncryptionZone("", "", -1);
/** /**
* EncryptionZoneInt is the internal representation of an encryption zone. The * EncryptionZoneInt is the internal representation of an encryption zone. The
@ -196,18 +196,18 @@ public class EncryptionZoneManager {
} }
/** /**
* Returns an EncryptionZoneWithId representing the ez for a given path. * Returns an EncryptionZone representing the ez for a given path.
* Returns an empty marker EncryptionZoneWithId if path is not in an ez. * Returns an empty marker EncryptionZone if path is not in an ez.
* *
* @param iip The INodesInPath of the path to check * @param iip The INodesInPath of the path to check
* @return the EncryptionZoneWithId representing the ez for the path. * @return the EncryptionZone representing the ez for the path.
*/ */
EncryptionZoneWithId getEZINodeForPath(INodesInPath iip) { EncryptionZone getEZINodeForPath(INodesInPath iip) {
final EncryptionZoneInt ezi = getEncryptionZoneForPath(iip); final EncryptionZoneInt ezi = getEncryptionZoneForPath(iip);
if (ezi == null) { if (ezi == null) {
return NULL_EZ; return NULL_EZ;
} else { } else {
return new EncryptionZoneWithId(getFullPathName(ezi), ezi.getKeyName(), return new EncryptionZone(getFullPathName(ezi), ezi.getKeyName(),
ezi.getINodeId()); ezi.getINodeId());
} }
} }
@ -300,19 +300,19 @@ public class EncryptionZoneManager {
* <p/> * <p/>
* Called while holding the FSDirectory lock. * Called while holding the FSDirectory lock.
*/ */
BatchedListEntries<EncryptionZoneWithId> listEncryptionZones(long prevId) BatchedListEntries<EncryptionZone> listEncryptionZones(long prevId)
throws IOException { throws IOException {
assert dir.hasReadLock(); assert dir.hasReadLock();
NavigableMap<Long, EncryptionZoneInt> tailMap = encryptionZones.tailMap NavigableMap<Long, EncryptionZoneInt> tailMap = encryptionZones.tailMap
(prevId, false); (prevId, false);
final int numResponses = Math.min(maxListEncryptionZonesResponses, final int numResponses = Math.min(maxListEncryptionZonesResponses,
tailMap.size()); tailMap.size());
final List<EncryptionZoneWithId> zones = final List<EncryptionZone> zones =
Lists.newArrayListWithExpectedSize(numResponses); Lists.newArrayListWithExpectedSize(numResponses);
int count = 0; int count = 0;
for (EncryptionZoneInt ezi : tailMap.values()) { for (EncryptionZoneInt ezi : tailMap.values()) {
zones.add(new EncryptionZoneWithId(getFullPathName(ezi), zones.add(new EncryptionZone(getFullPathName(ezi),
ezi.getKeyName(), ezi.getINodeId())); ezi.getKeyName(), ezi.getINodeId()));
count++; count++;
if (count >= numResponses) { if (count >= numResponses) {
@ -320,6 +320,6 @@ public class EncryptionZoneManager {
} }
} }
final boolean hasMore = (numResponses < tailMap.size()); final boolean hasMore = (numResponses < tailMap.size());
return new BatchedListEntries<EncryptionZoneWithId>(zones, hasMore); return new BatchedListEntries<EncryptionZone>(zones, hasMore);
} }
} }

View File

@ -59,7 +59,7 @@ import org.apache.hadoop.hdfs.protocol.AclException;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneWithId; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.FSLimitException.MaxDirectoryItemsExceededException; import org.apache.hadoop.hdfs.protocol.FSLimitException.MaxDirectoryItemsExceededException;
import org.apache.hadoop.hdfs.protocol.FSLimitException.PathComponentTooLongException; import org.apache.hadoop.hdfs.protocol.FSLimitException.PathComponentTooLongException;
import org.apache.hadoop.hdfs.protocol.FsAclPermission; import org.apache.hadoop.hdfs.protocol.FsAclPermission;
@ -2671,7 +2671,7 @@ public class FSDirectory implements Closeable {
} }
} }
EncryptionZoneWithId getEZForPath(INodesInPath iip) { EncryptionZone getEZForPath(INodesInPath iip) {
readLock(); readLock();
try { try {
return ezManager.getEZINodeForPath(iip); return ezManager.getEZINodeForPath(iip);
@ -2680,7 +2680,7 @@ public class FSDirectory implements Closeable {
} }
} }
BatchedListEntries<EncryptionZoneWithId> listEncryptionZones(long prevId) BatchedListEntries<EncryptionZone> listEncryptionZones(long prevId)
throws IOException { throws IOException {
readLock(); readLock();
try { try {

View File

@ -178,7 +178,7 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneWithId; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
@ -8629,7 +8629,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
* @throws AccessControlException if the caller is not the superuser. * @throws AccessControlException if the caller is not the superuser.
* @throws UnresolvedLinkException if the path can't be resolved. * @throws UnresolvedLinkException if the path can't be resolved.
*/ */
EncryptionZoneWithId getEZForPath(final String srcArg) EncryptionZone getEZForPath(final String srcArg)
throws AccessControlException, UnresolvedLinkException, IOException { throws AccessControlException, UnresolvedLinkException, IOException {
String src = srcArg; String src = srcArg;
HdfsFileStatus resultingStat = null; HdfsFileStatus resultingStat = null;
@ -8646,7 +8646,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
checkOperation(OperationCategory.READ); checkOperation(OperationCategory.READ);
src = resolvePath(src, pathComponents); src = resolvePath(src, pathComponents);
final INodesInPath iip = dir.getINodesInPath(src, true); final INodesInPath iip = dir.getINodesInPath(src, true);
final EncryptionZoneWithId ret = dir.getEZForPath(iip); final EncryptionZone ret = dir.getEZForPath(iip);
resultingStat = getAuditFileInfo(src, false); resultingStat = getAuditFileInfo(src, false);
success = true; success = true;
return ret; return ret;
@ -8656,7 +8656,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
} }
} }
BatchedListEntries<EncryptionZoneWithId> listEncryptionZones(long prevId) BatchedListEntries<EncryptionZone> listEncryptionZones(long prevId)
throws IOException { throws IOException {
boolean success = false; boolean success = false;
checkSuperuserPrivilege(); checkSuperuserPrivilege();
@ -8665,7 +8665,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
try { try {
checkSuperuserPrivilege(); checkSuperuserPrivilege();
checkOperation(OperationCategory.READ); checkOperation(OperationCategory.READ);
final BatchedListEntries<EncryptionZoneWithId> ret = final BatchedListEntries<EncryptionZone> ret =
dir.listEncryptionZones(prevId); dir.listEncryptionZones(prevId);
success = true; success = true;
return ret; return ret;

View File

@ -81,7 +81,7 @@ import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneWithId; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.FSLimitException; import org.apache.hadoop.hdfs.protocol.FSLimitException;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
@ -1437,13 +1437,13 @@ class NameNodeRpcServer implements NamenodeProtocols {
} }
@Override @Override
public EncryptionZoneWithId getEZForPath(String src) public EncryptionZone getEZForPath(String src)
throws IOException { throws IOException {
return namesystem.getEZForPath(src); return namesystem.getEZForPath(src);
} }
@Override @Override
public BatchedEntries<EncryptionZoneWithId> listEncryptionZones( public BatchedEntries<EncryptionZone> listEncryptionZones(
long prevId) throws IOException { long prevId) throws IOException {
return namesystem.listEncryptionZones(prevId); return namesystem.listEncryptionZones(prevId);
} }

View File

@ -45,14 +45,14 @@ message ListEncryptionZonesRequestProto {
required int64 id = 1; required int64 id = 1;
} }
message EncryptionZoneWithIdProto { message EncryptionZoneProto {
required string path = 1; required string path = 1;
required string keyName = 2; required string keyName = 2;
required int64 id = 3; required int64 id = 3;
} }
message ListEncryptionZonesResponseProto { message ListEncryptionZonesResponseProto {
repeated EncryptionZoneWithIdProto zones = 1; repeated EncryptionZoneProto zones = 1;
required bool hasMore = 2; required bool hasMore = 2;
} }
@ -61,5 +61,5 @@ message GetEZForPathRequestProto {
} }
message GetEZForPathResponseProto { message GetEZForPathResponseProto {
required EncryptionZoneWithIdProto zone = 1; required EncryptionZoneProto zone = 1;
} }