HDFS-7824. GetContentSummary API and its namenode implementation for Storage Type Quota/Usage. (Contributed by Xiaoyu Yao)

This commit is contained in:
Arpit Agarwal 2015-03-26 10:24:11 -07:00
parent 222845632b
commit 3d0708bdb0
25 changed files with 563 additions and 92 deletions

View File

@ -21,6 +21,7 @@ import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
@ -36,17 +37,106 @@ public class ContentSummary implements Writable{
private long quota; private long quota;
private long spaceConsumed; private long spaceConsumed;
private long spaceQuota; private long spaceQuota;
private long typeConsumed[];
private long typeQuota[];
/** Constructor */ public static class Builder{
public Builder() {
this.quota = -1;
this.spaceQuota = -1;
typeConsumed = new long[StorageType.values().length];
typeQuota = new long[StorageType.values().length];
for (int i = 0; i < typeQuota.length; i++) {
typeQuota[i] = -1;
}
}
public Builder length(long length) {
this.length = length;
return this;
}
public Builder fileCount(long fileCount) {
this.fileCount = fileCount;
return this;
}
public Builder directoryCount(long directoryCount) {
this.directoryCount = directoryCount;
return this;
}
public Builder quota(long quota){
this.quota = quota;
return this;
}
public Builder spaceConsumed(long spaceConsumed) {
this.spaceConsumed = spaceConsumed;
return this;
}
public Builder spaceQuota(long spaceQuota) {
this.spaceQuota = spaceQuota;
return this;
}
public Builder typeConsumed(long typeConsumed[]) {
for (int i = 0; i < typeConsumed.length; i++) {
this.typeConsumed[i] = typeConsumed[i];
}
return this;
}
public Builder typeQuota(StorageType type, long quota) {
this.typeQuota[type.ordinal()] = quota;
return this;
}
public Builder typeConsumed(StorageType type, long consumed) {
this.typeConsumed[type.ordinal()] = consumed;
return this;
}
public Builder typeQuota(long typeQuota[]) {
for (int i = 0; i < typeQuota.length; i++) {
this.typeQuota[i] = typeQuota[i];
}
return this;
}
public ContentSummary build() {
return new ContentSummary(length, fileCount, directoryCount, quota,
spaceConsumed, spaceQuota, typeConsumed, typeQuota);
}
private long length;
private long fileCount;
private long directoryCount;
private long quota;
private long spaceConsumed;
private long spaceQuota;
private long typeConsumed[];
private long typeQuota[];
}
/** Constructor deprecated by ContentSummary.Builder*/
@Deprecated
public ContentSummary() {} public ContentSummary() {}
/** Constructor */ /** Constructor, deprecated by ContentSummary.Builder
* This constructor implicitly set spaceConsumed the same as length.
* spaceConsumed and length must be set explicitly with
* ContentSummary.Builder
* */
@Deprecated
public ContentSummary(long length, long fileCount, long directoryCount) { public ContentSummary(long length, long fileCount, long directoryCount) {
this(length, fileCount, directoryCount, -1L, length, -1L); this(length, fileCount, directoryCount, -1L, length, -1L);
} }
/** Constructor */ /** Constructor, deprecated by ContentSummary.Builder */
@Deprecated
public ContentSummary( public ContentSummary(
long length, long fileCount, long directoryCount, long quota, long length, long fileCount, long directoryCount, long quota,
long spaceConsumed, long spaceQuota) { long spaceConsumed, long spaceQuota) {
@ -58,6 +148,21 @@ public class ContentSummary implements Writable{
this.spaceQuota = spaceQuota; this.spaceQuota = spaceQuota;
} }
/** Constructor for ContentSummary.Builder*/
private ContentSummary(
long length, long fileCount, long directoryCount, long quota,
long spaceConsumed, long spaceQuota, long typeConsumed[],
long typeQuota[]) {
this.length = length;
this.fileCount = fileCount;
this.directoryCount = directoryCount;
this.quota = quota;
this.spaceConsumed = spaceConsumed;
this.spaceQuota = spaceQuota;
this.typeConsumed = typeConsumed;
this.typeQuota = typeQuota;
}
/** @return the length */ /** @return the length */
public long getLength() {return length;} public long getLength() {return length;}
@ -70,12 +175,48 @@ public class ContentSummary implements Writable{
/** Return the directory quota */ /** Return the directory quota */
public long getQuota() {return quota;} public long getQuota() {return quota;}
/** Retuns (disk) space consumed */ /** Retuns storage space consumed */
public long getSpaceConsumed() {return spaceConsumed;} public long getSpaceConsumed() {return spaceConsumed;}
/** Returns (disk) space quota */ /** Returns storage space quota */
public long getSpaceQuota() {return spaceQuota;} public long getSpaceQuota() {return spaceQuota;}
/** Returns storage type quota */
public long getTypeQuota(StorageType type) {
return (typeQuota != null) ? typeQuota[type.ordinal()] : -1;
}
/** Returns storage type consumed*/
public long getTypeConsumed(StorageType type) {
return (typeConsumed != null) ? typeConsumed[type.ordinal()] : 0;
}
/** Returns true if any storage type quota has been set*/
public boolean isTypeQuotaSet() {
if (typeQuota == null) {
return false;
}
for (StorageType t : StorageType.getTypesSupportingQuota()) {
if (typeQuota[t.ordinal()] > 0) {
return true;
}
}
return false;
}
/** Returns true if any storage type consumption information is available*/
public boolean isTypeConsumedAvailable() {
if (typeConsumed == null) {
return false;
}
for (StorageType t : StorageType.getTypesSupportingQuota()) {
if (typeConsumed[t.ordinal()] > 0) {
return true;
}
}
return false;
}
@Override @Override
@InterfaceAudience.Private @InterfaceAudience.Private
public void write(DataOutput out) throws IOException { public void write(DataOutput out) throws IOException {

View File

@ -1644,20 +1644,27 @@ public class FileContext {
UnsupportedFileSystemException, IOException { UnsupportedFileSystemException, IOException {
FileStatus status = FileContext.this.getFileStatus(f); FileStatus status = FileContext.this.getFileStatus(f);
if (status.isFile()) { if (status.isFile()) {
return new ContentSummary(status.getLen(), 1, 0); long length = status.getLen();
return new ContentSummary.Builder().length(length).
fileCount(1).directoryCount(0).spaceConsumed(length).
build();
} }
long[] summary = {0, 0, 1}; long[] summary = {0, 0, 1};
RemoteIterator<FileStatus> statusIterator = RemoteIterator<FileStatus> statusIterator =
FileContext.this.listStatus(f); FileContext.this.listStatus(f);
while(statusIterator.hasNext()) { while(statusIterator.hasNext()) {
FileStatus s = statusIterator.next(); FileStatus s = statusIterator.next();
long length = s.getLen();
ContentSummary c = s.isDirectory() ? getContentSummary(s.getPath()) : ContentSummary c = s.isDirectory() ? getContentSummary(s.getPath()) :
new ContentSummary(s.getLen(), 1, 0); new ContentSummary.Builder().length(length).fileCount(1).
directoryCount(0).spaceConsumed(length).build();
summary[0] += c.getLength(); summary[0] += c.getLength();
summary[1] += c.getFileCount(); summary[1] += c.getFileCount();
summary[2] += c.getDirectoryCount(); summary[2] += c.getDirectoryCount();
} }
return new ContentSummary(summary[0], summary[1], summary[2]); return new ContentSummary.Builder().length(summary[0]).
fileCount(summary[1]).directoryCount(summary[2]).
spaceConsumed(summary[0]).build();
} }
/** /**

View File

@ -1467,18 +1467,24 @@ public abstract class FileSystem extends Configured implements Closeable {
FileStatus status = getFileStatus(f); FileStatus status = getFileStatus(f);
if (status.isFile()) { if (status.isFile()) {
// f is a file // f is a file
return new ContentSummary(status.getLen(), 1, 0); long length = status.getLen();
return new ContentSummary.Builder().length(length).
fileCount(1).directoryCount(0).spaceConsumed(length).build();
} }
// f is a directory // f is a directory
long[] summary = {0, 0, 1}; long[] summary = {0, 0, 1};
for(FileStatus s : listStatus(f)) { for(FileStatus s : listStatus(f)) {
long length = s.getLen();
ContentSummary c = s.isDirectory() ? getContentSummary(s.getPath()) : ContentSummary c = s.isDirectory() ? getContentSummary(s.getPath()) :
new ContentSummary(s.getLen(), 1, 0); new ContentSummary.Builder().length(length).
fileCount(1).directoryCount(0).spaceConsumed(length).build();
summary[0] += c.getLength(); summary[0] += c.getLength();
summary[1] += c.getFileCount(); summary[1] += c.getFileCount();
summary[2] += c.getDirectoryCount(); summary[2] += c.getDirectoryCount();
} }
return new ContentSummary(summary[0], summary[1], summary[2]); return new ContentSummary.Builder().length(summary[0]).
fileCount(summary[1]).directoryCount(summary[2]).
spaceConsumed(summary[0]).build();
} }
final private static PathFilter DEFAULT_FILTER = new PathFilter() { final private static PathFilter DEFAULT_FILTER = new PathFilter() {

View File

@ -32,13 +32,13 @@ public class TestContentSummary {
// check the empty constructor correctly initialises the object // check the empty constructor correctly initialises the object
@Test @Test
public void testConstructorEmpty() { public void testConstructorEmpty() {
ContentSummary contentSummary = new ContentSummary(); ContentSummary contentSummary = new ContentSummary.Builder().build();
assertEquals("getLength", 0, contentSummary.getLength()); assertEquals("getLength", 0, contentSummary.getLength());
assertEquals("getFileCount", 0, contentSummary.getFileCount()); assertEquals("getFileCount", 0, contentSummary.getFileCount());
assertEquals("getDirectoryCount", 0, contentSummary.getDirectoryCount()); assertEquals("getDirectoryCount", 0, contentSummary.getDirectoryCount());
assertEquals("getQuota", 0, contentSummary.getQuota()); assertEquals("getQuota", -1, contentSummary.getQuota());
assertEquals("getSpaceConsumed", 0, contentSummary.getSpaceConsumed()); assertEquals("getSpaceConsumed", 0, contentSummary.getSpaceConsumed());
assertEquals("getSpaceQuota", 0, contentSummary.getSpaceQuota()); assertEquals("getSpaceQuota", -1, contentSummary.getSpaceQuota());
} }
// check the full constructor with quota information // check the full constructor with quota information
@ -51,8 +51,9 @@ public class TestContentSummary {
long spaceConsumed = 55555; long spaceConsumed = 55555;
long spaceQuota = 66666; long spaceQuota = 66666;
ContentSummary contentSummary = new ContentSummary(length, fileCount, ContentSummary contentSummary = new ContentSummary.Builder().length(length).
directoryCount, quota, spaceConsumed, spaceQuota); fileCount(fileCount).directoryCount(directoryCount).quota(quota).
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
assertEquals("getLength", length, contentSummary.getLength()); assertEquals("getLength", length, contentSummary.getLength());
assertEquals("getFileCount", fileCount, contentSummary.getFileCount()); assertEquals("getFileCount", fileCount, contentSummary.getFileCount());
assertEquals("getDirectoryCount", directoryCount, assertEquals("getDirectoryCount", directoryCount,
@ -70,8 +71,9 @@ public class TestContentSummary {
long fileCount = 22222; long fileCount = 22222;
long directoryCount = 33333; long directoryCount = 33333;
ContentSummary contentSummary = new ContentSummary(length, fileCount, ContentSummary contentSummary = new ContentSummary.Builder().length(length).
directoryCount); fileCount(fileCount).directoryCount(directoryCount).
spaceConsumed(length).build();
assertEquals("getLength", length, contentSummary.getLength()); assertEquals("getLength", length, contentSummary.getLength());
assertEquals("getFileCount", fileCount, contentSummary.getFileCount()); assertEquals("getFileCount", fileCount, contentSummary.getFileCount());
assertEquals("getDirectoryCount", directoryCount, assertEquals("getDirectoryCount", directoryCount,
@ -91,8 +93,9 @@ public class TestContentSummary {
long spaceConsumed = 55555; long spaceConsumed = 55555;
long spaceQuota = 66666; long spaceQuota = 66666;
ContentSummary contentSummary = new ContentSummary(length, fileCount, ContentSummary contentSummary = new ContentSummary.Builder().length(length).
directoryCount, quota, spaceConsumed, spaceQuota); fileCount(fileCount).directoryCount(directoryCount).quota(quota).
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
DataOutput out = mock(DataOutput.class); DataOutput out = mock(DataOutput.class);
InOrder inOrder = inOrder(out); InOrder inOrder = inOrder(out);
@ -116,7 +119,7 @@ public class TestContentSummary {
long spaceConsumed = 55555; long spaceConsumed = 55555;
long spaceQuota = 66666; long spaceQuota = 66666;
ContentSummary contentSummary = new ContentSummary(); ContentSummary contentSummary = new ContentSummary.Builder().build();
DataInput in = mock(DataInput.class); DataInput in = mock(DataInput.class);
when(in.readLong()).thenReturn(length).thenReturn(fileCount) when(in.readLong()).thenReturn(length).thenReturn(fileCount)
@ -159,8 +162,9 @@ public class TestContentSummary {
long spaceConsumed = 55555; long spaceConsumed = 55555;
long spaceQuota = 66665; long spaceQuota = 66665;
ContentSummary contentSummary = new ContentSummary(length, fileCount, ContentSummary contentSummary = new ContentSummary.Builder().length(length).
directoryCount, quota, spaceConsumed, spaceQuota); fileCount(fileCount).directoryCount(directoryCount).quota(quota).
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
String expected = " 44444 -11111 66665 11110" String expected = " 44444 -11111 66665 11110"
+ " 33333 22222 11111 "; + " 33333 22222 11111 ";
assertEquals(expected, contentSummary.toString(true)); assertEquals(expected, contentSummary.toString(true));
@ -173,8 +177,8 @@ public class TestContentSummary {
long fileCount = 22222; long fileCount = 22222;
long directoryCount = 33333; long directoryCount = 33333;
ContentSummary contentSummary = new ContentSummary(length, fileCount, ContentSummary contentSummary = new ContentSummary.Builder().length(length).
directoryCount); fileCount(fileCount).directoryCount(directoryCount).build();
String expected = " none inf none" String expected = " none inf none"
+ " inf 33333 22222 11111 "; + " inf 33333 22222 11111 ";
assertEquals(expected, contentSummary.toString(true)); assertEquals(expected, contentSummary.toString(true));
@ -190,8 +194,9 @@ public class TestContentSummary {
long spaceConsumed = 55555; long spaceConsumed = 55555;
long spaceQuota = 66665; long spaceQuota = 66665;
ContentSummary contentSummary = new ContentSummary(length, fileCount, ContentSummary contentSummary = new ContentSummary.Builder().length(length).
directoryCount, quota, spaceConsumed, spaceQuota); fileCount(fileCount).directoryCount(directoryCount).quota(quota).
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
String expected = " 33333 22222 11111 "; String expected = " 33333 22222 11111 ";
assertEquals(expected, contentSummary.toString(false)); assertEquals(expected, contentSummary.toString(false));
} }
@ -206,8 +211,9 @@ public class TestContentSummary {
long spaceConsumed = 55555; long spaceConsumed = 55555;
long spaceQuota = 66665; long spaceQuota = 66665;
ContentSummary contentSummary = new ContentSummary(length, fileCount, ContentSummary contentSummary = new ContentSummary.Builder().length(length).
directoryCount, quota, spaceConsumed, spaceQuota); fileCount(fileCount).directoryCount(directoryCount).quota(quota).
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
String expected = " 44444 -11111 66665" String expected = " 44444 -11111 66665"
+ " 11110 33333 22222 11111 "; + " 11110 33333 22222 11111 ";
assertEquals(expected, contentSummary.toString()); assertEquals(expected, contentSummary.toString());
@ -223,8 +229,9 @@ public class TestContentSummary {
long spaceConsumed = 1073741825; long spaceConsumed = 1073741825;
long spaceQuota = 1; long spaceQuota = 1;
ContentSummary contentSummary = new ContentSummary(length, fileCount, ContentSummary contentSummary = new ContentSummary.Builder().length(length).
directoryCount, quota, spaceConsumed, spaceQuota); fileCount(fileCount).directoryCount(directoryCount).quota(quota).
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
String expected = " 212.0 M 1023 1 " String expected = " 212.0 M 1023 1 "
+ " -1 G 32.6 K 211.9 M 8.0 E "; + " -1 G 32.6 K 211.9 M 8.0 E ";
assertEquals(expected, contentSummary.toString(true, true)); assertEquals(expected, contentSummary.toString(true, true));
@ -240,8 +247,9 @@ public class TestContentSummary {
long spaceConsumed = 55555; long spaceConsumed = 55555;
long spaceQuota = Long.MAX_VALUE; long spaceQuota = Long.MAX_VALUE;
ContentSummary contentSummary = new ContentSummary(length, fileCount, ContentSummary contentSummary = new ContentSummary.Builder().length(length).
directoryCount, quota, spaceConsumed, spaceQuota); fileCount(fileCount).directoryCount(directoryCount).quota(quota).
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
String expected = " 32.6 K 211.9 M 8.0 E "; String expected = " 32.6 K 211.9 M 8.0 E ";
assertEquals(expected, contentSummary.toString(false, true)); assertEquals(expected, contentSummary.toString(false, true));
} }

View File

@ -315,6 +315,8 @@ public class TestCount {
// mock content system // mock content system
static class MockContentSummary extends ContentSummary { static class MockContentSummary extends ContentSummary {
@SuppressWarnings("deprecation")
// suppress warning on the usage of deprecated ContentSummary constructor
public MockContentSummary() { public MockContentSummary() {
} }

View File

@ -1013,13 +1013,13 @@ public class HttpFSFileSystem extends FileSystem
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK); HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) ((JSONObject) JSONObject json = (JSONObject) ((JSONObject)
HttpFSUtils.jsonParse(conn)).get(CONTENT_SUMMARY_JSON); HttpFSUtils.jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
return new ContentSummary((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON), return new ContentSummary.Builder().
(Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON), length((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON)).
(Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON), fileCount((Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON)).
(Long) json.get(CONTENT_SUMMARY_QUOTA_JSON), directoryCount((Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON)).
(Long) json.get(CONTENT_SUMMARY_SPACE_CONSUMED_JSON), quota((Long) json.get(CONTENT_SUMMARY_QUOTA_JSON)).
(Long) json.get(CONTENT_SUMMARY_SPACE_QUOTA_JSON) spaceConsumed((Long) json.get(CONTENT_SUMMARY_SPACE_CONSUMED_JSON)).
); spaceQuota((Long) json.get(CONTENT_SUMMARY_SPACE_QUOTA_JSON)).build();
} }
@Override @Override

View File

@ -1300,6 +1300,9 @@ Release 2.7.0 - UNRELEASED
HDFS-7806. Refactor: move StorageType from hadoop-hdfs to HDFS-7806. Refactor: move StorageType from hadoop-hdfs to
hadoop-common. (Xiaoyu Yao via Arpit Agarwal) hadoop-common. (Xiaoyu Yao via Arpit Agarwal)
HDFS-7824. GetContentSummary API and its namenode implementation for
Storage Type Quota/Usage. (Xiaoyu Yao via Arpit Agarwal)
Release 2.6.1 - UNRELEASED Release 2.6.1 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -1728,21 +1728,49 @@ public class PBHelper {
public static ContentSummary convert(ContentSummaryProto cs) { public static ContentSummary convert(ContentSummaryProto cs) {
if (cs == null) return null; if (cs == null) return null;
return new ContentSummary( ContentSummary.Builder builder = new ContentSummary.Builder();
cs.getLength(), cs.getFileCount(), cs.getDirectoryCount(), cs.getQuota(), builder.length(cs.getLength()).
cs.getSpaceConsumed(), cs.getSpaceQuota()); fileCount(cs.getFileCount()).
directoryCount(cs.getDirectoryCount()).
quota(cs.getQuota()).
spaceConsumed(cs.getSpaceConsumed()).
spaceQuota(cs.getSpaceQuota());
if (cs.hasTypeQuotaInfos()) {
for (HdfsProtos.StorageTypeQuotaInfoProto info :
cs.getTypeQuotaInfos().getTypeQuotaInfoList()) {
StorageType type = PBHelper.convertStorageType(info.getType());
builder.typeConsumed(type, info.getConsumed());
builder.typeQuota(type, info.getQuota());
}
}
return builder.build();
} }
public static ContentSummaryProto convert(ContentSummary cs) { public static ContentSummaryProto convert(ContentSummary cs) {
if (cs == null) return null; if (cs == null) return null;
return ContentSummaryProto.newBuilder(). ContentSummaryProto.Builder builder = ContentSummaryProto.newBuilder();
setLength(cs.getLength()). builder.setLength(cs.getLength()).
setFileCount(cs.getFileCount()). setFileCount(cs.getFileCount()).
setDirectoryCount(cs.getDirectoryCount()). setDirectoryCount(cs.getDirectoryCount()).
setQuota(cs.getQuota()). setQuota(cs.getQuota()).
setSpaceConsumed(cs.getSpaceConsumed()). setSpaceConsumed(cs.getSpaceConsumed()).
setSpaceQuota(cs.getSpaceQuota()). setSpaceQuota(cs.getSpaceQuota());
build();
if (cs.isTypeQuotaSet() || cs.isTypeConsumedAvailable()) {
HdfsProtos.StorageTypeQuotaInfosProto.Builder isb =
HdfsProtos.StorageTypeQuotaInfosProto.newBuilder();
for (StorageType t: StorageType.getTypesSupportingQuota()) {
HdfsProtos.StorageTypeQuotaInfoProto info =
HdfsProtos.StorageTypeQuotaInfoProto.newBuilder().
setType(convertStorageType(t)).
setConsumed(cs.getTypeConsumed(t)).
setQuota(cs.getTypeQuota(t)).
build();
isb.addTypeQuotaInfo(info);
}
builder.setTypeQuotaInfos(isb);
}
return builder.build();
} }
public static NNHAStatusHeartbeat convert(NNHAStatusHeartbeatProto s) { public static NNHAStatusHeartbeat convert(NNHAStatusHeartbeatProto s) {

View File

@ -36,7 +36,7 @@ public interface BlockCollection {
/** /**
* Get content summary. * Get content summary.
*/ */
public ContentSummary computeContentSummary(); public ContentSummary computeContentSummary(BlockStoragePolicySuite bsps);
/** /**
* @return the number of blocks * @return the number of blocks

View File

@ -745,7 +745,7 @@ public class BlockManager {
// always decrement total blocks // always decrement total blocks
-1); -1);
final long fileLength = bc.computeContentSummary().getLength(); final long fileLength = bc.computeContentSummary(getStoragePolicySuite()).getLength();
final long pos = fileLength - ucBlock.getNumBytes(); final long pos = fileLength - ucBlock.getNumBytes();
return createLocatedBlock(ucBlock, pos, AccessMode.WRITE); return createLocatedBlock(ucBlock, pos, AccessMode.WRITE);
} }

View File

@ -0,0 +1,146 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.util.EnumCounters;
/**
* The counter to be computed for content types such as file, directory and symlink,
* and the storage type usage such as SSD, DISK, ARCHIVE.
*/
public class ContentCounts {
private EnumCounters<Content> contents;
private EnumCounters<StorageType> types;
public static class Builder {
private EnumCounters<Content> contents;
// storage spaces used by corresponding storage types
private EnumCounters<StorageType> types;
public Builder() {
contents = new EnumCounters<Content>(Content.class);
types = new EnumCounters<StorageType>(StorageType.class);
}
public Builder file(long file) {
contents.set(Content.FILE, file);
return this;
}
public Builder directory(long directory) {
contents.set(Content.DIRECTORY, directory);
return this;
}
public Builder symlink(long symlink) {
contents.set(Content.SYMLINK, symlink);
return this;
}
public Builder length(long length) {
contents.set(Content.LENGTH, length);
return this;
}
public Builder storagespace(long storagespace) {
contents.set(Content.DISKSPACE, storagespace);
return this;
}
public Builder snapshot(long snapshot) {
contents.set(Content.SNAPSHOT, snapshot);
return this;
}
public Builder snapshotable_directory(long snapshotable_directory) {
contents.set(Content.SNAPSHOTTABLE_DIRECTORY, snapshotable_directory);
return this;
}
public ContentCounts build() {
return new ContentCounts(contents, types);
}
}
private ContentCounts(EnumCounters<Content> contents,
EnumCounters<StorageType> types) {
this.contents = contents;
this.types = types;
}
// Get the number of files.
public long getFileCount() {
return contents.get(Content.FILE);
}
// Get the number of directories.
public long getDirectoryCount() {
return contents.get(Content.DIRECTORY);
}
// Get the number of symlinks.
public long getSymlinkCount() {
return contents.get(Content.SYMLINK);
}
// Get the total of file length in bytes.
public long getLength() {
return contents.get(Content.LENGTH);
}
// Get the total of storage space usage in bytes including replication.
public long getStoragespace() {
return contents.get(Content.DISKSPACE);
}
// Get the number of snapshots
public long getSnapshotCount() {
return contents.get(Content.SNAPSHOT);
}
// Get the number of snapshottable directories.
public long getSnapshotableDirectoryCount() {
return contents.get(Content.SNAPSHOTTABLE_DIRECTORY);
}
public long[] getTypeSpaces() {
return types.asArray();
}
public long getTypeSpace(StorageType t) {
return types.get(t);
}
public void addContent(Content c, long val) {
contents.add(c, val);
}
public void addContents(ContentCounts that) {
contents.add(that.contents);
types.add(that.types);
}
public void addTypeSpace(StorageType t, long val) {
types.add(t, val);
}
public void addTypeSpaces(EnumCounters<StorageType> that) {
this.types.add(that);
}
}

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
@ -26,7 +27,8 @@ import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
public class ContentSummaryComputationContext { public class ContentSummaryComputationContext {
private FSDirectory dir = null; private FSDirectory dir = null;
private FSNamesystem fsn = null; private FSNamesystem fsn = null;
private Content.Counts counts = null; private BlockStoragePolicySuite bsps = null;
private ContentCounts counts = null;
private long nextCountLimit = 0; private long nextCountLimit = 0;
private long limitPerRun = 0; private long limitPerRun = 0;
private long yieldCount = 0; private long yieldCount = 0;
@ -46,12 +48,13 @@ public class ContentSummaryComputationContext {
this.fsn = fsn; this.fsn = fsn;
this.limitPerRun = limitPerRun; this.limitPerRun = limitPerRun;
this.nextCountLimit = limitPerRun; this.nextCountLimit = limitPerRun;
this.counts = Content.Counts.newInstance(); this.counts = new ContentCounts.Builder().build();
} }
/** Constructor for blocking computation. */ /** Constructor for blocking computation. */
public ContentSummaryComputationContext() { public ContentSummaryComputationContext(BlockStoragePolicySuite bsps) {
this(null, null, 0); this(null, null, 0);
this.bsps = bsps;
} }
/** Return current yield count */ /** Return current yield count */
@ -73,10 +76,10 @@ public class ContentSummaryComputationContext {
} }
// Have we reached the limit? // Have we reached the limit?
long currentCount = counts.get(Content.FILE) + long currentCount = counts.getFileCount() +
counts.get(Content.SYMLINK) + counts.getSymlinkCount() +
counts.get(Content.DIRECTORY) + counts.getDirectoryCount() +
counts.get(Content.SNAPSHOTTABLE_DIRECTORY); counts.getSnapshotableDirectoryCount();
if (currentCount <= nextCountLimit) { if (currentCount <= nextCountLimit) {
return false; return false;
} }
@ -114,11 +117,15 @@ public class ContentSummaryComputationContext {
} }
/** Get the content counts */ /** Get the content counts */
public Content.Counts getCounts() { public ContentCounts getCounts() {
return counts; return counts;
} }
public BlockStoragePolicySuite getBlockStoragePolicySuite() { public BlockStoragePolicySuite getBlockStoragePolicySuite() {
return fsn.getBlockManager().getStoragePolicySuite(); Preconditions.checkState((bsps != null || fsn != null),
"BlockStoragePolicySuite must be either initialized or available via" +
" FSNameSystem");
return (bsps != null) ? bsps:
fsn.getBlockManager().getStoragePolicySuite();
} }
} }

View File

@ -126,12 +126,12 @@ public final class DirectoryWithQuotaFeature implements INode.Feature {
ContentSummaryComputationContext computeContentSummary(final INodeDirectory dir, ContentSummaryComputationContext computeContentSummary(final INodeDirectory dir,
final ContentSummaryComputationContext summary) { final ContentSummaryComputationContext summary) {
final long original = summary.getCounts().get(Content.DISKSPACE); final long original = summary.getCounts().getStoragespace();
long oldYieldCount = summary.getYieldCount(); long oldYieldCount = summary.getYieldCount();
dir.computeDirectoryContentSummary(summary, Snapshot.CURRENT_STATE_ID); dir.computeDirectoryContentSummary(summary, Snapshot.CURRENT_STATE_ID);
// Check only when the content has not changed in the middle. // Check only when the content has not changed in the middle.
if (oldYieldCount == summary.getYieldCount()) { if (oldYieldCount == summary.getYieldCount()) {
checkStoragespace(dir, summary.getCounts().get(Content.DISKSPACE) - original); checkStoragespace(dir, summary.getCounts().getStoragespace() - original);
} }
return summary; return summary;
} }
@ -277,4 +277,4 @@ public final class DirectoryWithQuotaFeature implements INode.Feature {
return "Quota[" + namespaceString() + ", " + storagespaceString() + return "Quota[" + namespaceString() + ", " + storagespaceString() +
", " + typeSpaceString() + "]"; ", " + typeSpaceString() + "]";
} }
} }

View File

@ -432,9 +432,9 @@ public abstract class INode implements INodeAttributes, Diff.Element<byte[]> {
BlocksMapUpdateInfo collectedBlocks, List<INode> removedINodes); BlocksMapUpdateInfo collectedBlocks, List<INode> removedINodes);
/** Compute {@link ContentSummary}. Blocking call */ /** Compute {@link ContentSummary}. Blocking call */
public final ContentSummary computeContentSummary() { public final ContentSummary computeContentSummary(BlockStoragePolicySuite bsps) {
return computeAndConvertContentSummary( return computeAndConvertContentSummary(
new ContentSummaryComputationContext()); new ContentSummaryComputationContext(bsps));
} }
/** /**
@ -442,17 +442,22 @@ public abstract class INode implements INodeAttributes, Diff.Element<byte[]> {
*/ */
public final ContentSummary computeAndConvertContentSummary( public final ContentSummary computeAndConvertContentSummary(
ContentSummaryComputationContext summary) { ContentSummaryComputationContext summary) {
Content.Counts counts = computeContentSummary(summary).getCounts(); ContentCounts counts = computeContentSummary(summary).getCounts();
final QuotaCounts q = getQuotaCounts(); final QuotaCounts q = getQuotaCounts();
return new ContentSummary(counts.get(Content.LENGTH), return new ContentSummary.Builder().
counts.get(Content.FILE) + counts.get(Content.SYMLINK), length(counts.getLength()).
counts.get(Content.DIRECTORY), q.getNameSpace(), fileCount(counts.getFileCount() + counts.getSymlinkCount()).
counts.get(Content.DISKSPACE), q.getStorageSpace()); directoryCount(counts.getDirectoryCount()).
// TODO: storage type quota reporting HDFS-7701. quota(q.getNameSpace()).
spaceConsumed(counts.getStoragespace()).
spaceQuota(q.getStorageSpace()).
typeConsumed(counts.getTypeSpaces()).
typeQuota(q.getTypeSpaces().asArray()).
build();
} }
/** /**
* Count subtree content summary with a {@link Content.Counts}. * Count subtree content summary with a {@link ContentCounts}.
* *
* @param summary the context object holding counts for the subtree. * @param summary the context object holding counts for the subtree.
* @return The same objects as summary. * @return The same objects as summary.

View File

@ -664,7 +664,7 @@ public class INodeDirectory extends INodeWithAdditionalFields
} }
// Increment the directory count for this directory. // Increment the directory count for this directory.
summary.getCounts().add(Content.DIRECTORY, 1); summary.getCounts().addContent(Content.DIRECTORY, 1);
// Relinquish and reacquire locks if necessary. // Relinquish and reacquire locks if necessary.
summary.yield(); summary.yield();
return summary; return summary;

View File

@ -599,22 +599,36 @@ public class INodeFile extends INodeWithAdditionalFields
@Override @Override
public final ContentSummaryComputationContext computeContentSummary( public final ContentSummaryComputationContext computeContentSummary(
final ContentSummaryComputationContext summary) { final ContentSummaryComputationContext summary) {
final Content.Counts counts = summary.getCounts(); final ContentCounts counts = summary.getCounts();
FileWithSnapshotFeature sf = getFileWithSnapshotFeature(); FileWithSnapshotFeature sf = getFileWithSnapshotFeature();
long fileLen = 0;
if (sf == null) { if (sf == null) {
counts.add(Content.LENGTH, computeFileSize()); fileLen = computeFileSize();
counts.add(Content.FILE, 1); counts.addContent(Content.FILE, 1);
} else { } else {
final FileDiffList diffs = sf.getDiffs(); final FileDiffList diffs = sf.getDiffs();
final int n = diffs.asList().size(); final int n = diffs.asList().size();
counts.add(Content.FILE, n); counts.addContent(Content.FILE, n);
if (n > 0 && sf.isCurrentFileDeleted()) { if (n > 0 && sf.isCurrentFileDeleted()) {
counts.add(Content.LENGTH, diffs.getLast().getFileSize()); fileLen = diffs.getLast().getFileSize();
} else { } else {
counts.add(Content.LENGTH, computeFileSize()); fileLen = computeFileSize();
}
}
counts.addContent(Content.LENGTH, fileLen);
counts.addContent(Content.DISKSPACE, storagespaceConsumed());
if (getStoragePolicyID() != BlockStoragePolicySuite.ID_UNSPECIFIED){
BlockStoragePolicy bsp = summary.getBlockStoragePolicySuite().
getPolicy(getStoragePolicyID());
List<StorageType> storageTypes = bsp.chooseStorageTypes(getFileReplication());
for (StorageType t : storageTypes) {
if (!t.supportTypeQuota()) {
continue;
}
counts.addTypeSpace(t, fileLen);
} }
} }
counts.add(Content.DISKSPACE, storagespaceConsumed());
return summary; return summary;
} }

View File

@ -23,6 +23,7 @@ import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
@ -512,7 +513,8 @@ public abstract class INodeReference extends INode {
//only count storagespace for WithName //only count storagespace for WithName
final QuotaCounts q = new QuotaCounts.Builder().build(); final QuotaCounts q = new QuotaCounts.Builder().build();
computeQuotaUsage(summary.getBlockStoragePolicySuite(), q, false, lastSnapshotId); computeQuotaUsage(summary.getBlockStoragePolicySuite(), q, false, lastSnapshotId);
summary.getCounts().add(Content.DISKSPACE, q.getStorageSpace()); summary.getCounts().addContent(Content.DISKSPACE, q.getStorageSpace());
summary.getCounts().addTypeSpaces(q.getTypeSpaces());
return summary; return summary;
} }

View File

@ -102,7 +102,7 @@ public class INodeSymlink extends INodeWithAdditionalFields {
@Override @Override
public ContentSummaryComputationContext computeContentSummary( public ContentSummaryComputationContext computeContentSummary(
final ContentSummaryComputationContext summary) { final ContentSummaryComputationContext summary) {
summary.getCounts().add(Content.SYMLINK, 1); summary.getCounts().addContent(Content.SYMLINK, 1);
return summary; return summary;
} }

View File

@ -237,8 +237,8 @@ public class DirectorySnapshottableFeature extends DirectoryWithSnapshotFeature
final INodeDirectory snapshotRoot, final INodeDirectory snapshotRoot,
final ContentSummaryComputationContext summary) { final ContentSummaryComputationContext summary) {
snapshotRoot.computeContentSummary(summary); snapshotRoot.computeContentSummary(summary);
summary.getCounts().add(Content.SNAPSHOT, snapshotsByNames.size()); summary.getCounts().addContent(Content.SNAPSHOT, snapshotsByNames.size());
summary.getCounts().add(Content.SNAPSHOTTABLE_DIRECTORY, 1); summary.getCounts().addContent(Content.SNAPSHOTTABLE_DIRECTORY, 1);
return summary; return summary;
} }

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
import org.apache.hadoop.hdfs.server.namenode.AclStorage; import org.apache.hadoop.hdfs.server.namenode.AclStorage;
import org.apache.hadoop.hdfs.server.namenode.Content; import org.apache.hadoop.hdfs.server.namenode.Content;
import org.apache.hadoop.hdfs.server.namenode.ContentCounts;
import org.apache.hadoop.hdfs.server.namenode.ContentSummaryComputationContext; import org.apache.hadoop.hdfs.server.namenode.ContentSummaryComputationContext;
import org.apache.hadoop.hdfs.server.namenode.FSImageSerialization; import org.apache.hadoop.hdfs.server.namenode.FSImageSerialization;
import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INode;
@ -650,19 +651,19 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
} }
public void computeContentSummary4Snapshot(final BlockStoragePolicySuite bsps, public void computeContentSummary4Snapshot(final BlockStoragePolicySuite bsps,
final Content.Counts counts) { final ContentCounts counts) {
// Create a new blank summary context for blocking processing of subtree. // Create a new blank summary context for blocking processing of subtree.
ContentSummaryComputationContext summary = ContentSummaryComputationContext summary =
new ContentSummaryComputationContext(); new ContentSummaryComputationContext(bsps);
for(DirectoryDiff d : diffs) { for(DirectoryDiff d : diffs) {
for(INode deleted : d.getChildrenDiff().getList(ListType.DELETED)) { for(INode deleted : d.getChildrenDiff().getList(ListType.DELETED)) {
deleted.computeContentSummary(summary); deleted.computeContentSummary(summary);
} }
} }
// Add the counts from deleted trees. // Add the counts from deleted trees.
counts.add(summary.getCounts()); counts.addContents(summary.getCounts());
// Add the deleted directory count. // Add the deleted directory count.
counts.add(Content.DIRECTORY, diffs.asList().size()); counts.addContent(Content.DIRECTORY, diffs.asList().size());
} }
/** /**

View File

@ -21,6 +21,7 @@ import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.ArrayUtils;
/** /**
* Counters for an enum type. * Counters for an enum type.
@ -64,6 +65,11 @@ public class EnumCounters<E extends Enum<E>> {
return counters[e.ordinal()]; return counters[e.ordinal()];
} }
/** @return the values of counter as a shadow copy of array*/
public long[] asArray() {
return ArrayUtils.clone(counters);
}
/** Negate all counters. */ /** Negate all counters. */
public final void negation() { public final void negation() {
for(int i = 0; i < counters.length; i++) { for(int i = 0; i < counters.length; i++) {

View File

@ -553,8 +553,9 @@ public class JsonUtil {
final long spaceConsumed = ((Number) m.get("spaceConsumed")).longValue(); final long spaceConsumed = ((Number) m.get("spaceConsumed")).longValue();
final long spaceQuota = ((Number) m.get("spaceQuota")).longValue(); final long spaceQuota = ((Number) m.get("spaceQuota")).longValue();
return new ContentSummary(length, fileCount, directoryCount, return new ContentSummary.Builder().length(length).fileCount(fileCount).
quota, spaceConsumed, spaceQuota); directoryCount(directoryCount).quota(quota).spaceConsumed(spaceConsumed).
spaceQuota(spaceQuota).build();
} }
/** Convert a MD5MD5CRC32FileChecksum to a Json string. */ /** Convert a MD5MD5CRC32FileChecksum to a Json string. */

View File

@ -134,6 +134,20 @@ message ContentSummaryProto {
required uint64 quota = 4; required uint64 quota = 4;
required uint64 spaceConsumed = 5; required uint64 spaceConsumed = 5;
required uint64 spaceQuota = 6; required uint64 spaceQuota = 6;
optional StorageTypeQuotaInfosProto typeQuotaInfos = 7;
}
/**
* Storage type quota and usage information of a file or directory
*/
message StorageTypeQuotaInfosProto {
repeated StorageTypeQuotaInfoProto typeQuotaInfo = 1;
}
message StorageTypeQuotaInfoProto {
required StorageTypeProto type = 1;
required uint64 quota = 2;
required uint64 consumed = 3;
} }
/** /**

View File

@ -1221,7 +1221,7 @@ public class TestReplicationPolicy {
when(mbc.isUnderConstruction()).thenReturn(true); when(mbc.isUnderConstruction()).thenReturn(true);
ContentSummary cs = mock(ContentSummary.class); ContentSummary cs = mock(ContentSummary.class);
when(cs.getLength()).thenReturn((long)1); when(cs.getLength()).thenReturn((long)1);
when(mbc.computeContentSummary()).thenReturn(cs); when(mbc.computeContentSummary(bm.getStoragePolicySuite())).thenReturn(cs);
info.setBlockCollection(mbc); info.setBlockCollection(mbc);
bm.addBlockCollection(info, mbc); bm.addBlockCollection(info, mbc);

View File

@ -24,6 +24,7 @@ package org.apache.hadoop.hdfs.server.namenode;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
@ -156,6 +157,11 @@ public class TestQuotaByStorageType {
ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature() ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD); .getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumed); assertEquals(file1Len, ssdConsumed);
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
assertEquals(cs.getTypeConsumed(StorageType.SSD), file1Len);
assertEquals(cs.getTypeConsumed(StorageType.DISK), file1Len * 2);
} }
@Test(timeout = 60000) @Test(timeout = 60000)
@ -192,6 +198,11 @@ public class TestQuotaByStorageType {
fnode.computeQuotaUsage(fsn.getBlockManager().getStoragePolicySuite(), counts, true); fnode.computeQuotaUsage(fsn.getBlockManager().getStoragePolicySuite(), counts, true);
assertEquals(fnode.dumpTreeRecursively().toString(), 0, assertEquals(fnode.dumpTreeRecursively().toString(), 0,
counts.getTypeSpaces().get(StorageType.SSD)); counts.getTypeSpaces().get(StorageType.SSD));
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), 0);
assertEquals(cs.getTypeConsumed(StorageType.SSD), 0);
assertEquals(cs.getTypeConsumed(StorageType.DISK), 0);
} }
@Test(timeout = 60000) @Test(timeout = 60000)
@ -233,6 +244,11 @@ public class TestQuotaByStorageType {
} catch (Throwable t) { } catch (Throwable t) {
LOG.info("Got expected exception ", t); LOG.info("Got expected exception ", t);
} }
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
assertEquals(cs.getTypeConsumed(StorageType.SSD), file1Len);
assertEquals(cs.getTypeConsumed(StorageType.DISK), file1Len * 2);
} }
/** /**
@ -554,6 +570,11 @@ public class TestQuotaByStorageType {
assertEquals(sub1Node.dumpTreeRecursively().toString(), file1Len, assertEquals(sub1Node.dumpTreeRecursively().toString(), file1Len,
counts1.getTypeSpaces().get(StorageType.SSD)); counts1.getTypeSpaces().get(StorageType.SSD));
ContentSummary cs1 = dfs.getContentSummary(sub1);
assertEquals(cs1.getSpaceConsumed(), file1Len * REPLICATION);
assertEquals(cs1.getTypeConsumed(StorageType.SSD), file1Len);
assertEquals(cs1.getTypeConsumed(StorageType.DISK), file1Len * 2);
// Delete the snapshot s1 // Delete the snapshot s1
dfs.deleteSnapshot(sub1, "s1"); dfs.deleteSnapshot(sub1, "s1");
@ -566,6 +587,11 @@ public class TestQuotaByStorageType {
sub1Node.computeQuotaUsage(fsn.getBlockManager().getStoragePolicySuite(), counts2, true); sub1Node.computeQuotaUsage(fsn.getBlockManager().getStoragePolicySuite(), counts2, true);
assertEquals(sub1Node.dumpTreeRecursively().toString(), 0, assertEquals(sub1Node.dumpTreeRecursively().toString(), 0,
counts2.getTypeSpaces().get(StorageType.SSD)); counts2.getTypeSpaces().get(StorageType.SSD));
ContentSummary cs2 = dfs.getContentSummary(sub1);
assertEquals(cs2.getSpaceConsumed(), 0);
assertEquals(cs2.getTypeConsumed(StorageType.SSD), 0);
assertEquals(cs2.getTypeConsumed(StorageType.DISK), 0);
} }
@Test(timeout = 60000) @Test(timeout = 60000)
@ -601,6 +627,11 @@ public class TestQuotaByStorageType {
ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature() ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD); .getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(newFile1Len, ssdConsumed); assertEquals(newFile1Len, ssdConsumed);
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), newFile1Len * REPLICATION);
assertEquals(cs.getTypeConsumed(StorageType.SSD), newFile1Len);
assertEquals(cs.getTypeConsumed(StorageType.DISK), newFile1Len * 2);
} }
@Test @Test
@ -701,6 +732,55 @@ public class TestQuotaByStorageType {
.getDirectoryWithQuotaFeature() .getDirectoryWithQuotaFeature()
.getSpaceConsumed().getTypeSpaces().get(StorageType.SSD); .getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
assertEquals(file1Len, ssdConsumedAfterNNRestart); assertEquals(file1Len, ssdConsumedAfterNNRestart);
}
@Test(timeout = 60000)
public void testContentSummaryWithoutQuotaByStorageType() throws Exception {
final Path foo = new Path(dir, "foo");
Path createdFile1 = new Path(foo, "created_file1.data");
dfs.mkdirs(foo);
// set storage policy on directory "foo" to ONESSD
dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(!fnode.isQuotaSet());
// Create file of size 2 * BLOCKSIZE under directory "foo"
long file1Len = BLOCKSIZE * 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify getContentSummary without any quota set
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
assertEquals(cs.getTypeConsumed(StorageType.SSD), file1Len);
assertEquals(cs.getTypeConsumed(StorageType.DISK), file1Len * 2);
}
@Test(timeout = 60000)
public void testContentSummaryWithoutStoragePolicy() throws Exception {
final Path foo = new Path(dir, "foo");
Path createdFile1 = new Path(foo, "created_file1.data");
dfs.mkdirs(foo);
INode fnode = fsdir.getINode4Write(foo.toString());
assertTrue(fnode.isDirectory());
assertTrue(!fnode.isQuotaSet());
// Create file of size 2 * BLOCKSIZE under directory "foo"
long file1Len = BLOCKSIZE * 2;
int bufLen = BLOCKSIZE / 16;
DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
// Verify getContentSummary without any quota set
// Expect no type quota and usage information available
ContentSummary cs = dfs.getContentSummary(foo);
assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
for (StorageType t : StorageType.values()) {
assertEquals(cs.getTypeConsumed(t), 0);
assertEquals(cs.getTypeQuota(t), -1);
}
} }
} }