HDFS-8545. Refactor FS#getUsed() to use ContentSummary and add an API to fetch the total file length from a specific path (Contributed by J.Andreina)
(cherry picked from commit 7d2d16f4ee
)
This commit is contained in:
parent
16db203a83
commit
dea66a8392
|
@ -2067,12 +2067,13 @@ public abstract class FileSystem extends Configured implements Closeable {
|
|||
|
||||
/** Return the total size of all files in the filesystem. */
|
||||
public long getUsed() throws IOException {
|
||||
long used = 0;
|
||||
RemoteIterator<LocatedFileStatus> files = listFiles(new Path("/"), true);
|
||||
while (files.hasNext()) {
|
||||
used += files.next().getLen();
|
||||
Path path = new Path("/");
|
||||
return getUsed(path);
|
||||
}
|
||||
return used;
|
||||
|
||||
/** Return the total size of all files from a specified path. */
|
||||
public long getUsed(Path path) throws IOException {
|
||||
return getContentSummary(path).getLength();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -390,6 +390,12 @@ public class FilterFileSystem extends FileSystem {
|
|||
return fs.getUsed();
|
||||
}
|
||||
|
||||
/** Return the total size of all files from a specified path.*/
|
||||
@Override
|
||||
public long getUsed(Path path) throws IOException {
|
||||
return fs.getUsed(path);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getDefaultBlockSize() {
|
||||
return fs.getDefaultBlockSize();
|
||||
|
|
|
@ -1237,6 +1237,12 @@ public class HarFileSystem extends FileSystem {
|
|||
return fs.getUsed();
|
||||
}
|
||||
|
||||
/** Return the total size of all files from a specified path.*/
|
||||
@Override
|
||||
public long getUsed(Path path) throws IOException {
|
||||
return fs.getUsed(path);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public long getDefaultBlockSize() {
|
||||
|
|
|
@ -766,6 +766,9 @@ Release 2.8.0 - UNRELEASED
|
|||
HDFS-9295. Add a thorough test of the full KMS code path.
|
||||
(Daniel Templeton via zhz)
|
||||
|
||||
HDFS-8545. Refactor FS#getUsed() to use ContentSummary and add an API to fetch
|
||||
the total file length from a specific path (J.Andreina via vinayakumarb)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than
|
||||
|
|
|
@ -1151,4 +1151,32 @@ public class TestDistributedFileSystem {
|
|||
cluster.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
@Test(timeout = 30000)
|
||||
public void testTotalDfsUsed() throws Exception {
|
||||
Configuration conf = new HdfsConfiguration();
|
||||
MiniDFSCluster cluster = null;
|
||||
try {
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||
FileSystem fs = cluster.getFileSystem();
|
||||
// create file under root
|
||||
FSDataOutputStream File1 = fs.create(new Path("/File1"));
|
||||
File1.write("hi".getBytes());
|
||||
File1.close();
|
||||
// create file under sub-folder
|
||||
FSDataOutputStream File2 = fs.create(new Path("/Folder1/File2"));
|
||||
File2.write("hi".getBytes());
|
||||
File2.close();
|
||||
// getUsed(Path) should return total len of all the files from a path
|
||||
assertEquals(2, fs.getUsed(new Path("/Folder1")));
|
||||
//getUsed() should return total length of all files in filesystem
|
||||
assertEquals(4, fs.getUsed());
|
||||
} finally {
|
||||
if (cluster != null) {
|
||||
cluster.shutdown();
|
||||
cluster = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue