diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index f834583630c..806c36f3a7c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -390,6 +390,9 @@ Release 2.0.5-beta - UNRELEASED HDFS-4618. Default transaction interval for checkpoints is too low. (todd) + HDFS-4525. Provide an API for knowing that whether file is closed or not. + (SreeHari via umamahesh) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java index 92287789959..d1db17c7010 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java @@ -1544,7 +1544,22 @@ public class DFSClient implements java.io.Closeable { UnresolvedPathException.class); } } - + + /** + * Close status of a file + * @return true if file is already closed + */ + public boolean isFileClosed(String src) throws IOException{ + checkOpen(); + try { + return namenode.isFileClosed(src); + } catch(RemoteException re) { + throw re.unwrapRemoteException(AccessControlException.class, + FileNotFoundException.class, + UnresolvedPathException.class); + } + } + /** * Get the file info for a specific file or directory. If src * refers to a symlink then the FileStatus of the link is returned. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java index e772859b8ee..2fda8b8e8ef 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java @@ -917,4 +917,17 @@ public class DistributedFileSystem extends FileSystem { public boolean isInSafeMode() throws IOException { return setSafeMode(SafeModeAction.SAFEMODE_GET, true); } + + /** + * Get the close status of a file + * @param src The path to the file + * + * @return return true if file is closed + * @throws FileNotFoundException if the file does not exist. + * @throws IOException If an I/O error occurred + */ + public boolean isFileClosed(Path src) throws IOException { + return dfs.isFileClosed(getPathName(src)); + } + } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java index 9621c979473..cbfc2f2759a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java @@ -757,7 +757,21 @@ public interface ClientProtocol { @Idempotent public HdfsFileStatus getFileInfo(String src) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException; - + + /** + * Get the close status of a file + * @param src The string representation of the path to the file + * + * @return return true if file is closed + * @throws AccessControlException permission denied + * @throws FileNotFoundException file src is not found + * @throws UnresolvedLinkException if the path contains a symlink. + * @throws IOException If an I/O error occurred + */ + @Idempotent + public boolean isFileClosed(String src) throws AccessControlException, + FileNotFoundException, UnresolvedLinkException, IOException; + /** * Get the file info for a specific file or directory. If the path * refers to a symlink then the FileStatus of the symlink is returned. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java index af422155508..d3e931587cd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java @@ -76,6 +76,8 @@ import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetPre import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetPreferredBlockSizeResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetServerDefaultsRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetServerDefaultsResponseProto; +import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto; @@ -864,4 +866,17 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements throw new ServiceException(e); } } + + @Override + public IsFileClosedResponseProto isFileClosed( + RpcController controller, IsFileClosedRequestProto request) + throws ServiceException { + try { + boolean result = server.isFileClosed(request.getSrc()); + return IsFileClosedResponseProto.newBuilder().setResult(result).build(); + } catch (IOException e) { + throw new ServiceException(e); + } + } + } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java index 8e510e6d3ec..cd9c8111b49 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java @@ -77,6 +77,7 @@ import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetLis import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetListingResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetPreferredBlockSizeRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetServerDefaultsRequestProto; +import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.IsFileClosedRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCorruptFileBlocksRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MetaSaveRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.MkdirsRequestProto; @@ -850,6 +851,19 @@ public class ClientNamenodeProtocolTranslatorPB implements throw ProtobufHelper.getRemoteException(e); } } + + + @Override + public boolean isFileClosed(String src) throws AccessControlException, + FileNotFoundException, UnresolvedLinkException, IOException { + IsFileClosedRequestProto req = IsFileClosedRequestProto.newBuilder() + .setSrc(src).build(); + try { + return rpcProxy.isFileClosed(null, req).getResult(); + } catch (ServiceException e) { + throw ProtobufHelper.getRemoteException(e); + } + } @Override public Object getUnderlyingProxyObject() { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index aa077fe5a29..47604277918 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -2973,6 +2973,33 @@ public class FSNamesystem implements Namesystem, FSClusterStats, logAuditEvent(true, "getfileinfo", src); return stat; } + + /** + * Returns true if the file is closed + */ + boolean isFileClosed(String src) + throws AccessControlException, UnresolvedLinkException, + StandbyException, IOException { + FSPermissionChecker pc = getPermissionChecker(); + checkOperation(OperationCategory.READ); + readLock(); + try { + checkOperation(OperationCategory.READ); + if (isPermissionEnabled) { + checkTraverse(pc, src); + } + return !INodeFile.valueOf(dir.getINode(src), src).isUnderConstruction(); + } catch (AccessControlException e) { + if (isAuditEnabled() && isExternalInvocation()) { + logAuditEvent(false, UserGroupInformation.getCurrentUser(), + getRemoteIp(), + "isFileClosed", src, null, null); + } + throw e; + } finally { + readUnlock(); + } + } /** * Create all the necessary directories diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java index 275b9bdbe61..a2e346e055b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java @@ -690,7 +690,12 @@ class NameNodeRpcServer implements NamenodeProtocols { metrics.incrFileInfoOps(); return namesystem.getFileInfo(src, true); } - + + @Override // ClientProtocol + public boolean isFileClosed(String src) throws IOException{ + return namesystem.isFileClosed(src); + } + @Override // ClientProtocol public HdfsFileStatus getFileLinkInfo(String src) throws IOException { metrics.incrFileInfoOps(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto index 419ae45b738..d99e4e7209b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto @@ -332,6 +332,14 @@ message GetFileInfoResponseProto { optional HdfsFileStatusProto fs = 1; } +message IsFileClosedRequestProto { + required string src = 1; +} + +message IsFileClosedResponseProto { + required bool result = 1; +} + message GetFileLinkInfoRequestProto { required string src = 1; } @@ -498,4 +506,6 @@ service ClientNamenodeProtocol { returns(SetBalancerBandwidthResponseProto); rpc getDataEncryptionKey(GetDataEncryptionKeyRequestProto) returns(GetDataEncryptionKeyResponseProto); + rpc isFileClosed(IsFileClosedRequestProto) + returns(IsFileClosedResponseProto); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java index ad262c48c86..2353c9a3f3c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java @@ -762,4 +762,27 @@ public class TestDistributedFileSystem { } } } + + @Test(timeout=60000) + public void testFileCloseStatus() throws IOException { + Configuration conf = new HdfsConfiguration(); + MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); + DistributedFileSystem fs = cluster.getFileSystem(); + try { + // create a new file. + Path file = new Path("/simpleFlush.dat"); + FSDataOutputStream output = fs.create(file); + // write to file + output.writeBytes("Some test data"); + output.flush(); + assertFalse("File status should be open", fs.isFileClosed(file)); + output.close(); + assertTrue("File status should be closed", fs.isFileClosed(file)); + } finally { + if (cluster != null) { + cluster.shutdown(); + } + } + } + }