HDFS-12190. Enable 'hdfs dfs -stat' to display access time. Contributed by Yongjun Zhang.
This commit is contained in:
parent
e3c7300225
commit
c6330f22a5
|
@ -40,8 +40,10 @@ import org.apache.hadoop.fs.FileStatus;
|
|||
* %o: Block size<br>
|
||||
* %r: replication<br>
|
||||
* %u: User name of owner<br>
|
||||
* %y: UTC date as "yyyy-MM-dd HH:mm:ss"<br>
|
||||
* %Y: Milliseconds since January 1, 1970 UTC<br>
|
||||
* %x: atime UTC date as "yyyy-MM-dd HH:mm:ss"<br>
|
||||
* %X: atime Milliseconds since January 1, 1970 UTC<br>
|
||||
* %y: mtime UTC date as "yyyy-MM-dd HH:mm:ss"<br>
|
||||
* %Y: mtime Milliseconds since January 1, 1970 UTC<br>
|
||||
* If the format is not specified, %y is used by default.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
|
@ -62,9 +64,10 @@ class Stat extends FsCommand {
|
|||
"octal (%a) and symbolic (%A), filesize in" + NEWLINE +
|
||||
"bytes (%b), type (%F), group name of owner (%g)," + NEWLINE +
|
||||
"name (%n), block size (%o), replication (%r), user name" + NEWLINE +
|
||||
"of owner (%u), modification date (%y, %Y)." + NEWLINE +
|
||||
"%y shows UTC date as \"yyyy-MM-dd HH:mm:ss\" and" + NEWLINE +
|
||||
"%Y shows milliseconds since January 1, 1970 UTC." + NEWLINE +
|
||||
"of owner (%u), access date (%x, %X)." + NEWLINE +
|
||||
"modification date (%y, %Y)." + NEWLINE +
|
||||
"%x and %y show UTC date as \"yyyy-MM-dd HH:mm:ss\" and" + NEWLINE +
|
||||
"%X and %Y show milliseconds since January 1, 1970 UTC." + NEWLINE +
|
||||
"If the format is not specified, %y is used by default." + NEWLINE;
|
||||
|
||||
protected final SimpleDateFormat timeFmt;
|
||||
|
@ -127,6 +130,12 @@ class Stat extends FsCommand {
|
|||
case 'u':
|
||||
buf.append(stat.getOwner());
|
||||
break;
|
||||
case 'x':
|
||||
buf.append(timeFmt.format(new Date(stat.getAccessTime())));
|
||||
break;
|
||||
case 'X':
|
||||
buf.append(stat.getAccessTime());
|
||||
break;
|
||||
case 'y':
|
||||
buf.append(timeFmt.format(new Date(stat.getModificationTime())));
|
||||
break;
|
||||
|
|
|
@ -676,11 +676,11 @@ stat
|
|||
|
||||
Usage: `hadoop fs -stat [format] <path> ...`
|
||||
|
||||
Print statistics about the file/directory at \<path\> in the specified format. Format accepts permissions in octal (%a) and symbolic (%A), filesize in bytes (%b), type (%F), group name of owner (%g), name (%n), block size (%o), replication (%r), user name of owner(%u), and modification date (%y, %Y). %y shows UTC date as "yyyy-MM-dd HH:mm:ss" and %Y shows milliseconds since January 1, 1970 UTC. If the format is not specified, %y is used by default.
|
||||
Print statistics about the file/directory at \<path\> in the specified format. Format accepts permissions in octal (%a) and symbolic (%A), filesize in bytes (%b), type (%F), group name of owner (%g), name (%n), block size (%o), replication (%r), user name of owner(%u), access date(%x, %X), and modification date (%y, %Y). %x and %y show UTC date as "yyyy-MM-dd HH:mm:ss", and %X and %Y show milliseconds since January 1, 1970 UTC. If the format is not specified, %y is used by default.
|
||||
|
||||
Example:
|
||||
|
||||
* `hadoop fs -stat "%F %a %u:%g %b %y %n" /file`
|
||||
* `hadoop fs -stat "type:%F perm:%a %u:%g size:%b mtime:%y atime:%x name:%n" /file`
|
||||
|
||||
Exit Code: Returns 0 on success and -1 on error.
|
||||
|
||||
|
|
|
@ -919,15 +919,19 @@
|
|||
</comparator>
|
||||
<comparator>
|
||||
<type>RegexpComparator</type>
|
||||
<expected-output>^( |\t)*of owner \(%u\), modification date \(%y, %Y\).( )*</expected-output>
|
||||
<expected-output>^( |\t)*of owner \(%u\), access date \(%x, %X\).( )*</expected-output>
|
||||
</comparator>
|
||||
<comparator>
|
||||
<type>RegexpComparator</type>
|
||||
<expected-output>^( |\t)*%y shows UTC date as "yyyy-MM-dd HH:mm:ss" and( )*</expected-output>
|
||||
<expected-output>^( |\t)*modification date \(%y, %Y\).( )*</expected-output>
|
||||
</comparator>
|
||||
<comparator>
|
||||
<type>RegexpComparator</type>
|
||||
<expected-output>^( |\t)*%Y shows milliseconds since January 1, 1970 UTC.( )*</expected-output>
|
||||
<expected-output>^( |\t)*%x and %y show UTC date as "yyyy-MM-dd HH:mm:ss" and( )*</expected-output>
|
||||
</comparator>
|
||||
<comparator>
|
||||
<type>RegexpComparator</type>
|
||||
<expected-output>^( |\t)*%X and %Y show milliseconds since January 1, 1970 UTC.( )*</expected-output>
|
||||
</comparator>
|
||||
<comparator>
|
||||
<type>RegexpComparator</type>
|
||||
|
|
|
@ -36,12 +36,12 @@ import java.util.zip.GZIPOutputStream;
|
|||
|
||||
import com.google.common.base.Supplier;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.apache.commons.lang.RandomStringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.log4j.Level;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.fs.permission.AclEntry;
|
||||
|
@ -65,6 +65,7 @@ import org.apache.hadoop.test.GenericTestUtils;
|
|||
import org.apache.hadoop.test.PathUtils;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.Time;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.junit.rules.Timeout;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -115,6 +116,7 @@ public class TestDFSShell {
|
|||
GenericTestUtils.getTestDir("TestDFSShell").getAbsolutePath());
|
||||
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, true);
|
||||
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
|
||||
conf.setLong(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, 1000);
|
||||
|
||||
miniCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
miniCluster.waitActive();
|
||||
|
@ -2002,8 +2004,12 @@ public class TestDFSShell {
|
|||
DFSTestUtil.createFile(dfs, testFile2, 2 * BLOCK_SIZE, (short) 3, 0);
|
||||
final FileStatus status1 = dfs.getFileStatus(testDir1);
|
||||
final String mtime1 = fmt.format(new Date(status1.getModificationTime()));
|
||||
final String atime1 = fmt.format(new Date(status1.getAccessTime()));
|
||||
long now = Time.now();
|
||||
dfs.setTimes(testFile2, now + 3000, now + 6000);
|
||||
final FileStatus status2 = dfs.getFileStatus(testFile2);
|
||||
final String mtime2 = fmt.format(new Date(status2.getModificationTime()));
|
||||
final String atime2 = fmt.format(new Date(status2.getAccessTime()));
|
||||
|
||||
final ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
@ -2036,17 +2042,19 @@ public class TestDFSShell {
|
|||
out.toString().contains(String.valueOf(octal)));
|
||||
|
||||
out.reset();
|
||||
doFsStat(dfs.getConf(), "%F %a %A %u:%g %b %y %n", testDir1, testFile2);
|
||||
doFsStat(dfs.getConf(), "%F %a %A %u:%g %b %x %y %n", testDir1, testFile2);
|
||||
|
||||
n = status2.getPermission().toShort();
|
||||
octal = (n>>>9&1)*1000 + (n>>>6&7)*100 + (n>>>3&7)*10 + (n&7);
|
||||
assertTrue(out.toString(), out.toString().contains(mtime1));
|
||||
assertTrue(out.toString(), out.toString().contains(atime1));
|
||||
assertTrue(out.toString(), out.toString().contains("regular file"));
|
||||
assertTrue(out.toString(),
|
||||
out.toString().contains(status2.getPermission().toString()));
|
||||
assertTrue(out.toString(),
|
||||
out.toString().contains(String.valueOf(octal)));
|
||||
assertTrue(out.toString(), out.toString().contains(mtime2));
|
||||
assertTrue(out.toString(), out.toString().contains(atime2));
|
||||
}
|
||||
|
||||
private static void doFsStat(Configuration conf, String format, Path... files)
|
||||
|
|
Loading…
Reference in New Issue