diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index a22b760c3cf..35cd55ea068 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -361,6 +361,9 @@ Release 2.7.0 - UNRELEASED
HADOOP-10748. HttpServer2 should not load JspServlet. (wheat9)
+ HADOOP-6857. FsShell should report raw disk usage including replication
+ factor. (Byron Wong via shv)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java
index f48ba160ba1..5c1dbf032ae 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java
@@ -26,6 +26,7 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.StringUtils;
@@ -117,7 +118,7 @@ class FsUsage extends FsCommand {
"Note that, even without the -s option, this only shows size summaries " +
"one level deep into a directory.\n\n" +
"The output is in the form \n" +
- "\tsize\tname(full path)\n";
+ "\tsize\tdisk space consumed\tname(full path)\n";
protected boolean summary = false;
@@ -132,7 +133,7 @@ class FsUsage extends FsCommand {
@Override
protected void processPathArgument(PathData item) throws IOException {
- usagesTable = new TableBuilder(2);
+ usagesTable = new TableBuilder(3);
// go one level deep on dirs from cmdline unless in summary mode
if (!summary && item.stat.isDirectory()) {
recursePath(item);
@@ -144,16 +145,12 @@ class FsUsage extends FsCommand {
@Override
protected void processPath(PathData item) throws IOException {
- long length;
- if (item.stat.isDirectory()) {
- length = item.fs.getContentSummary(item.path).getLength();
- } else {
- length = item.stat.getLen();
- }
- usagesTable.addRow(formatSize(length), item);
+ ContentSummary contentSummary = item.fs.getContentSummary(item.path);
+ long length = contentSummary.getLength();
+ long spaceConsumed = contentSummary.getSpaceConsumed();
+ usagesTable.addRow(formatSize(length), formatSize(spaceConsumed), item);
}
}
-
/** show disk usage summary */
public static class Dus extends Du {
public static final String NAME = "dus";
diff --git a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
index c6e5fc5f0c2..dcf8fb42000 100644
--- a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
+++ b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
@@ -204,7 +204,7 @@
RegexpComparator
- ^\s*size\s+name\(full path\)\s*
+ ^\s*size\s+disk space consumed\s+name\(full path\)\s*
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/DirectoryWithQuotaFeature.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/DirectoryWithQuotaFeature.java
index 6f326f8a24a..05742b2f686 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/DirectoryWithQuotaFeature.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/DirectoryWithQuotaFeature.java
@@ -21,6 +21,7 @@ import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
+import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
/**
* Quota feature for {@link INodeDirectory}.
@@ -68,7 +69,7 @@ public final class DirectoryWithQuotaFeature implements INode.Feature {
final ContentSummaryComputationContext summary) {
final long original = summary.getCounts().get(Content.DISKSPACE);
long oldYieldCount = summary.getYieldCount();
- dir.computeDirectoryContentSummary(summary);
+ dir.computeDirectoryContentSummary(summary, Snapshot.CURRENT_STATE_ID);
// Check only when the content has not changed in the middle.
if (oldYieldCount == summary.getYieldCount()) {
checkDiskspace(dir, summary.getCounts().get(Content.DISKSPACE) - original);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
index a75323017fa..797a62cc107 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
@@ -615,13 +615,13 @@ public class INodeDirectory extends INodeWithAdditionalFields
if (q != null) {
return q.computeContentSummary(this, summary);
} else {
- return computeDirectoryContentSummary(summary);
+ return computeDirectoryContentSummary(summary, Snapshot.CURRENT_STATE_ID);
}
}
- ContentSummaryComputationContext computeDirectoryContentSummary(
- ContentSummaryComputationContext summary) {
- ReadOnlyList childrenList = getChildrenList(Snapshot.CURRENT_STATE_ID);
+ protected ContentSummaryComputationContext computeDirectoryContentSummary(
+ ContentSummaryComputationContext summary, int snapshotId) {
+ ReadOnlyList childrenList = getChildrenList(snapshotId);
// Explicit traversing is done to enable repositioning after relinquishing
// and reacquiring locks.
for (int i = 0; i < childrenList.size(); i++) {
@@ -643,7 +643,7 @@ public class INodeDirectory extends INodeWithAdditionalFields
break;
}
// Obtain the children list again since it may have been modified.
- childrenList = getChildrenList(Snapshot.CURRENT_STATE_ID);
+ childrenList = getChildrenList(snapshotId);
// Reposition in case the children list is changed. Decrement by 1
// since it will be incremented when loops.
i = nextChild(childrenList, childName) - 1;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/Snapshot.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/Snapshot.java
index 56d3418df34..59e618abb92 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/Snapshot.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/Snapshot.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.server.namenode.AclFeature;
+import org.apache.hadoop.hdfs.server.namenode.ContentSummaryComputationContext;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormat;
import org.apache.hadoop.hdfs.server.namenode.FSImageSerialization;
import org.apache.hadoop.hdfs.server.namenode.INode;
@@ -172,7 +173,14 @@ public class Snapshot implements Comparable {
public INode getChild(byte[] name, int snapshotId) {
return getParent().getChild(name, snapshotId);
}
-
+
+ @Override
+ public ContentSummaryComputationContext computeContentSummary(
+ ContentSummaryComputationContext summary) {
+ int snapshotId = getParent().getSnapshot(getLocalNameBytes()).getId();
+ return computeDirectoryContentSummary(summary, snapshotId);
+ }
+
@Override
public String getFullPathName() {
return getSnapshotPath(getParent().getFullPathName(), getLocalName());
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
index 2daf69dd6a0..59c0b2c3a08 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
@@ -63,6 +63,7 @@ import static org.apache.hadoop.hdfs.server.namenode.AclTestHelpers.aclEntry;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.*;
+import static org.hamcrest.core.StringContains.containsString;
import com.google.common.collect.Lists;
@@ -197,8 +198,10 @@ public class TestDFSShell {
@Test (timeout = 30000)
public void testDu() throws IOException {
+ int replication = 2;
Configuration conf = new HdfsConfiguration();
- MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+ MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
+ .numDataNodes(replication).build();
DistributedFileSystem fs = cluster.getFileSystem();
PrintStream psBackup = System.out;
ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -217,6 +220,10 @@ public class TestDFSShell {
Path myFile2 = new Path("/test/dir/file2");
writeFile(fs, myFile2);
assertTrue(fs.exists(myFile2));
+ Long myFileLength = fs.getFileStatus(myFile).getLen();
+ Long myFileDiskUsed = myFileLength * replication;
+ Long myFile2Length = fs.getFileStatus(myFile2).getLen();
+ Long myFile2DiskUsed = myFile2Length * replication;
String[] args = new String[2];
args[0] = "-du";
@@ -232,9 +239,37 @@ public class TestDFSShell {
String returnString = out.toString();
out.reset();
// Check if size matchs as expected
- assertTrue(returnString.contains("22"));
- assertTrue(returnString.contains("23"));
+ assertThat(returnString, containsString(myFileLength.toString()));
+ assertThat(returnString, containsString(myFileDiskUsed.toString()));
+ assertThat(returnString, containsString(myFile2Length.toString()));
+ assertThat(returnString, containsString(myFile2DiskUsed.toString()));
+ // Check that -du -s reports the state of the snapshot
+ String snapshotName = "ss1";
+ Path snapshotPath = new Path(myPath, ".snapshot/" + snapshotName);
+ fs.allowSnapshot(myPath);
+ assertThat(fs.createSnapshot(myPath, snapshotName), is(snapshotPath));
+ assertThat(fs.delete(myFile, false), is(true));
+ assertThat(fs.exists(myFile), is(false));
+
+ args = new String[3];
+ args[0] = "-du";
+ args[1] = "-s";
+ args[2] = snapshotPath.toString();
+ val = -1;
+ try {
+ val = shell.run(args);
+ } catch (Exception e) {
+ System.err.println("Exception raised from DFSShell.run " +
+ e.getLocalizedMessage());
+ }
+ assertThat(val, is(0));
+ returnString = out.toString();
+ out.reset();
+ Long combinedLength = myFileLength + myFile2Length;
+ Long combinedDiskUsed = myFileDiskUsed + myFile2DiskUsed;
+ assertThat(returnString, containsString(combinedLength.toString()));
+ assertThat(returnString, containsString(combinedDiskUsed.toString()));
} finally {
System.setOut(psBackup);
cluster.shutdown();
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
index 087c3ab48aa..8939f87ac36 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
@@ -1086,7 +1086,7 @@
RegexpComparator
- ^15\s+/data15bytes
+ ^15\s+15\s+/data15bytes
@@ -1104,7 +1104,7 @@
RegexpComparator
- ^15\s+data15bytesZZ
+ ^15\s+15\s+data15bytesZZ
@@ -1125,19 +1125,19 @@
RegexpComparator
- ^120\s+data120bytes
+ ^120\s+120\s+data120bytes
RegexpComparator
- ^15\s+data15bytes
+ ^15\s+15\s+data15bytes
RegexpComparator
- ^30\s+data30bytes
+ ^30\s+30\s+data30bytes
RegexpComparator
- ^60\s+data60bytes
+ ^60\s+60\s+data60bytes
@@ -1155,7 +1155,7 @@
RegexpComparator
- ^15\s+/dir0/data15bytes
+ ^15\s+15\s+/dir0/data15bytes
@@ -1173,7 +1173,7 @@
RegexpComparator
- ^15\s+dir0/data15bytes
+ ^15\s+15\s+dir0/data15bytes
@@ -1194,19 +1194,19 @@
RegexpComparator
- ^15( |\t)*/dir0/data15bytes
+ ^15( |\t)*15( |\t)*/dir0/data15bytes
RegexpComparator
- ^30( |\t)*/dir0/data30bytes
+ ^30( |\t)*30( |\t)*/dir0/data30bytes
RegexpComparator
- ^60( |\t)*/dir0/data60bytes
+ ^60( |\t)*60( |\t)*/dir0/data60bytes
RegexpComparator
- ^120( |\t)*/dir0/data120bytes
+ ^120( |\t)*120( |\t)*/dir0/data120bytes
@@ -1223,7 +1223,7 @@
RegexpComparator
- ^15\s+hdfs:///data15bytes
+ ^15\s+15\s+hdfs:///data15bytes
@@ -1243,19 +1243,19 @@
RegexpComparator
- ^120\s+hdfs:///data120bytes
+ ^120\s+120\s+hdfs:///data120bytes
RegexpComparator
- ^15\s+hdfs:///data15bytes
+ ^15\s+15\s+hdfs:///data15bytes
RegexpComparator
- ^30\s+hdfs:///data30bytes
+ ^30\s+30\s+hdfs:///data30bytes
RegexpComparator
- ^60\s+hdfs:///data60bytes
+ ^60\s+60\s+hdfs:///data60bytes
@@ -1273,7 +1273,7 @@
RegexpComparator
- ^15\s+hdfs:///dir0/data15bytes
+ ^15\s+15\s+hdfs:///dir0/data15bytes
@@ -1292,11 +1292,11 @@
RegexpComparator
- ^15\s+hdfs:///dir0/data15bytes
+ ^15\s+15\s+hdfs:///dir0/data15bytes
RegexpComparator
- ^1\.0 K\s+hdfs:///dir0/data1k
+ ^1\.0 K\s+1\.0 K\s+hdfs:///dir0/data1k
@@ -1317,19 +1317,19 @@
RegexpComparator
- ^15( |\t)*hdfs:///dir0/data15bytes
+ ^15( |\t)*15( |\t)*hdfs:///dir0/data15bytes
RegexpComparator
- ^30( |\t)*hdfs:///dir0/data30bytes
+ ^30( |\t)*30( |\t)*hdfs:///dir0/data30bytes
RegexpComparator
- ^60( |\t)*hdfs:///dir0/data60bytes
+ ^60( |\t)*60( |\t)*hdfs:///dir0/data60bytes
RegexpComparator
- ^120( |\t)*hdfs:///dir0/data120bytes
+ ^120( |\t)*120( |\t)*hdfs:///dir0/data120bytes
@@ -1346,7 +1346,7 @@
RegexpComparator
- ^15( |\t)*NAMENODE/data15bytes
+ ^15( |\t)*15( |\t)*NAMENODE/data15bytes
@@ -1366,19 +1366,19 @@
RegexpComparator
- ^15( |\t)*NAMENODE/data15bytes
+ ^15( |\t)*15( |\t)*NAMENODE/data15bytes
RegexpComparator
- ^30( |\t)*NAMENODE/data30bytes
+ ^30( |\t)*30( |\t)*NAMENODE/data30bytes
RegexpComparator
- ^60( |\t)*NAMENODE/data60bytes
+ ^60( |\t)*60( |\t)*NAMENODE/data60bytes
RegexpComparator
- ^120( |\t)*NAMENODE/data120bytes
+ ^120( |\t)*120( |\t)*NAMENODE/data120bytes
@@ -1396,7 +1396,7 @@
RegexpComparator
- ^15( |\t)*NAMENODE/dir0/data15bytes
+ ^15( |\t)*15( |\t)*NAMENODE/dir0/data15bytes
@@ -1417,19 +1417,19 @@
RegexpComparator
- ^15( |\t)*NAMENODE/dir0/data15bytes
+ ^15( |\t)*15( |\t)*NAMENODE/dir0/data15bytes
RegexpComparator
- ^30( |\t)*NAMENODE/dir0/data30bytes
+ ^30( |\t)*30( |\t)*NAMENODE/dir0/data30bytes
RegexpComparator
- ^60( |\t)*NAMENODE/dir0/data60bytes
+ ^60( |\t)*60( |\t)*NAMENODE/dir0/data60bytes
RegexpComparator
- ^120( |\t)*NAMENODE/dir0/data120bytes
+ ^120( |\t)*120( |\t)*NAMENODE/dir0/data120bytes
@@ -1462,7 +1462,7 @@
RegexpComparator
- ^450\s+/dir0
+ ^450\s+450\s+/dir0
@@ -1494,7 +1494,7 @@
RegexpComparator
- ^450\s+dir0
+ ^450\s+450\s+dir0
@@ -1532,7 +1532,7 @@
RegexpComparator
- ^450\s+/dir0
+ ^450\s+450\s+/dir0
@@ -1565,7 +1565,7 @@
RegexpComparator
- ^450\s+hdfs:///dir0
+ ^450\s+450\s+hdfs:///dir0
@@ -1603,7 +1603,7 @@
RegexpComparator
- ^450\s+hdfs:///dir0
+ ^450\s+450\s+hdfs:///dir0
@@ -1635,7 +1635,7 @@
RegexpComparator
- ^450\s+NAMENODE/dir0
+ ^450\s+450\s+NAMENODE/dir0
@@ -1673,7 +1673,7 @@
RegexpComparator
- ^450\s+NAMENODE/dir0
+ ^450\s+450\s+NAMENODE/dir0
@@ -4073,7 +4073,7 @@
RegexpComparator
- ^15\s+/data15bytes
+ ^15\s+15\s+/data15bytes
@@ -4092,7 +4092,7 @@
RegexpComparator
- ^15\s+data15bytes
+ ^15\s+15\s+data15bytes
@@ -4110,7 +4110,7 @@
RegexpComparator
- ^15\s+/dir0/dir1/data/data15bytes
+ ^15\s+15\s+/dir0/dir1/data/data15bytes
@@ -4128,7 +4128,7 @@
RegexpComparator
- ^15\s+dir0/dir1/data/data15bytes
+ ^15\s+15\s+dir0/dir1/data/data15bytes
@@ -4146,11 +4146,11 @@
RegexpComparator
- ^15\s+/dir0/data15bytes
+ ^15\s+15\s+/dir0/data15bytes
RegexpComparator
- ^30\s+/dir0/data30bytes
+ ^30\s+30\s+/dir0/data30bytes
@@ -4168,11 +4168,11 @@
RegexpComparator
- ^15\s+dir0/data15bytes
+ ^15\s+15\s+dir0/data15bytes
RegexpComparator
- ^30\s+dir0/data30bytes
+ ^30\s+30\s+dir0/data30bytes
@@ -4318,7 +4318,7 @@
RegexpComparator
- ^15\s+hdfs:///data15bytes
+ ^15\s+15\s+hdfs:///data15bytes
@@ -4336,7 +4336,7 @@
RegexpComparator
- ^15\s+hdfs:///dir1/data/data15bytes
+ ^15\s+15\s+hdfs:///dir1/data/data15bytes
@@ -4354,11 +4354,11 @@
RegexpComparator
- ^15\s+hdfs:///dir0/data15bytes
+ ^15\s+15\s+hdfs:///dir0/data15bytes
RegexpComparator
- ^30\s+hdfs:///dir0/data30bytes
+ ^30\s+30\s+hdfs:///dir0/data30bytes
@@ -4442,7 +4442,7 @@
RegexpComparator
- ^15\s+NAMENODE/data15bytes
+ ^15\s+15\s+NAMENODE/data15bytes
@@ -4460,7 +4460,7 @@
RegexpComparator
- ^15\s+NAMENODE/dir1/data/data15bytes
+ ^15\s+15\s+NAMENODE/dir1/data/data15bytes
@@ -4478,11 +4478,11 @@
RegexpComparator
- ^15\s+NAMENODE/dir0/data15bytes
+ ^15\s+15\s+NAMENODE/dir0/data15bytes
RegexpComparator
- ^30\s+NAMENODE/dir0/data30bytes
+ ^30\s+30\s+NAMENODE/dir0/data30bytes
@@ -4567,7 +4567,7 @@
RegexpComparator
- ^15\s+/data15bytes
+ ^15\s+15\s+/data15bytes
@@ -4586,7 +4586,7 @@
RegexpComparator
- ^15\s+data15bytes
+ ^15\s+15\s+data15bytes
@@ -4604,7 +4604,7 @@
RegexpComparator
- ^15\s+/dir0/dir1/data/data15bytes
+ ^15\s+15\s+/dir0/dir1/data/data15bytes
@@ -4622,7 +4622,7 @@
RegexpComparator
- ^15\s+dir0/dir1/data/data15bytes
+ ^15\s+15\s+dir0/dir1/data/data15bytes
@@ -4640,11 +4640,11 @@
RegexpComparator
- ^15\s+/dir0/data15bytes
+ ^15\s+15\s+/dir0/data15bytes
RegexpComparator
- ^30\s+/dir0/data30bytes
+ ^30\s+30\s+/dir0/data30bytes
@@ -4662,11 +4662,11 @@
RegexpComparator
- ^15\s+dir0/data15bytes
+ ^15\s+15\s+dir0/data15bytes
RegexpComparator
- ^30\s+dir0/data30bytes
+ ^30\s+30\s+dir0/data30bytes
@@ -4813,7 +4813,7 @@
RegexpComparator
- ^15\s+hdfs:///data15bytes
+ ^15\s+15\s+hdfs:///data15bytes
@@ -4833,7 +4833,7 @@
RegexpComparator
- ^15\s+hdfs:///dir0/dir1/data/data15bytes
+ ^15\s+15\s+hdfs:///dir0/dir1/data/data15bytes
@@ -4851,11 +4851,11 @@
RegexpComparator
- ^15\s+hdfs:///dir0/data15bytes
+ ^15\s+15\s+hdfs:///dir0/data15bytes
RegexpComparator
- ^30\s+hdfs:///dir0/data30bytes
+ ^30\s+30\s+hdfs:///dir0/data30bytes
@@ -4940,7 +4940,7 @@
RegexpComparator
- ^15\s+NAMENODE/data15bytes
+ ^15\s+15\s+NAMENODE/data15bytes
@@ -4960,7 +4960,7 @@
RegexpComparator
- ^15\s+NAMENODE/dir0/dir1/data/data15bytes
+ ^15\s+15\s+NAMENODE/dir0/dir1/data/data15bytes
@@ -4978,11 +4978,11 @@
RegexpComparator
- ^15\s+NAMENODE/dir0/data15bytes
+ ^15\s+15\s+NAMENODE/dir0/data15bytes
RegexpComparator
- ^30\s+NAMENODE/dir0/data30bytes
+ ^30\s+30\s+NAMENODE/dir0/data30bytes
@@ -5605,7 +5605,7 @@
RegexpComparator
- ^0\s+/dir0
+ ^0\s+0\s+/dir0
@@ -5623,7 +5623,7 @@
RegexpComparator
- ^0\s+/dir0/b
+ ^0\s+0\s+/dir0/b
@@ -5641,7 +5641,7 @@
RegexpComparator
- ^0\s+dir0
+ ^0\s+0\s+dir0
@@ -5661,19 +5661,19 @@
RegexpComparator
- ^0\s+/dir0
+ ^0\s+0\s+/dir0
RegexpComparator
- ^0\s+/dir1
+ ^0\s+0\s+/dir1
RegexpComparator
- ^0\s+/dir2
+ ^0\s+0\s+/dir2
RegexpComparator
- ^0\s+/dir3
+ ^0\s+0\s+/dir3
@@ -5693,19 +5693,19 @@
RegexpComparator
- ^0\s+dir0
+ ^0\s+0\s+dir0
RegexpComparator
- ^0\s+dir1
+ ^0\s+0\s+dir1
RegexpComparator
- ^0\s+dir2
+ ^0\s+0\s+dir2
RegexpComparator
- ^0\s+dir3
+ ^0\s+0\s+dir3
@@ -5756,7 +5756,7 @@
RegexpComparator
- ^0\s+hdfs:///dir0
+ ^0\s+0\s+hdfs:///dir0
@@ -5773,19 +5773,19 @@
RegexpComparator
- ^0\s+hdfs:///dir0
+ ^0\s+0\s+hdfs:///dir0
RegexpComparator
- ^0\s+hdfs:///dir1
+ ^0\s+0\s+hdfs:///dir1
RegexpComparator
- ^0\s+hdfs:///dir2
+ ^0\s+0\s+hdfs:///dir2
RegexpComparator
- ^0\s+hdfs:///dir3
+ ^0\s+0\s+hdfs:///dir3
@@ -5836,7 +5836,7 @@
RegexpComparator
- ^0\s+NAMENODE/dir0
+ ^0\s+0\s+NAMENODE/dir0
@@ -5853,19 +5853,19 @@
RegexpComparator
- ^0\s+NAMENODE/dir0
+ ^0\s+0\s+NAMENODE/dir0
RegexpComparator
- ^0\s+NAMENODE/dir1
+ ^0\s+0\s+NAMENODE/dir1
RegexpComparator
- ^0\s+NAMENODE/dir2
+ ^0\s+0\s+NAMENODE/dir2
RegexpComparator
- ^0\s+NAMENODE/dir3
+ ^0\s+0\s+NAMENODE/dir3
@@ -6219,7 +6219,7 @@
RegexpComparator
- ^0\s+/user/file0
+ ^0\s+0\s+/user/file0
@@ -6252,7 +6252,7 @@
RegexpComparator
- ^0\s+file0
+ ^0\s+0\s+file0
@@ -6270,9 +6270,9 @@
RegexpComparator
- ^0( |\t)*file0
- ^0( |\t)*file1
- ^0( |\t)*file2
+ ^0( |\t)*0( |\t)*file0
+ ^0( |\t)*0( |\t)*file1
+ ^0( |\t)*0( |\t)*file2
@@ -6308,7 +6308,7 @@
RegexpComparator
- ^0\s+hdfs:///user/file0
+ ^0\s+0\s+hdfs:///user/file0
@@ -6325,9 +6325,9 @@
RegexpComparator
- ^0( |\t)*hdfs:///file0
- ^0( |\t)*hdfs:///file1
- ^0( |\t)*hdfs:///file2
+ ^0( |\t)*0( |\t)*hdfs:///file0
+ ^0( |\t)*0( |\t)*hdfs:///file1
+ ^0( |\t)*0( |\t)*hdfs:///file2
@@ -6361,7 +6361,7 @@
RegexpComparator
- ^0\s+NAMENODE/user/file0
+ ^0\s+0\s+NAMENODE/user/file0
@@ -6378,9 +6378,9 @@
RegexpComparator
- ^0\s+hdfs://\w+[-.a-z0-9]*:[0-9]+/file0
- ^0\s+hdfs://\w+[-.a-z0-9]*:[0-9]+/file1
- ^0\s+hdfs://\w+[-.a-z0-9]*:[0-9]+/file2
+ ^0\s+0\s+hdfs://\w+[-.a-z0-9]*:[0-9]+/file0
+ ^0\s+0\s+hdfs://\w+[-.a-z0-9]*:[0-9]+/file1
+ ^0\s+0\s+hdfs://\w+[-.a-z0-9]*:[0-9]+/file2