HDFS-5630. Hook up cache directive and pool usage statistics. (wang)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1548309 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
ca9a32c951
commit
55e5b0653c
|
@ -231,6 +231,8 @@ Trunk (Unreleased)
|
||||||
HDFS-5536. Implement HTTP policy for Namenode and DataNode. (Haohui Mai via
|
HDFS-5536. Implement HTTP policy for Namenode and DataNode. (Haohui Mai via
|
||||||
jing9)
|
jing9)
|
||||||
|
|
||||||
|
HDFS-5630. Hook up cache directive and pool usage statistics. (wang)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
HDFS-5349. DNA_CACHE and DNA_UNCACHE should be by blockId only. (cmccabe)
|
HDFS-5349. DNA_CACHE and DNA_UNCACHE should be by blockId only. (cmccabe)
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,9 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
|
|
||||||
private long bytesNeeded;
|
private long bytesNeeded;
|
||||||
private long bytesCached;
|
private long bytesCached;
|
||||||
private long filesAffected;
|
private long filesNeeded;
|
||||||
|
private long filesCached;
|
||||||
|
|
||||||
private Element prev;
|
private Element prev;
|
||||||
private Element next;
|
private Element next;
|
||||||
|
|
||||||
|
@ -58,9 +60,6 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
Preconditions.checkArgument(replication > 0);
|
Preconditions.checkArgument(replication > 0);
|
||||||
this.replication = replication;
|
this.replication = replication;
|
||||||
this.expiryTime = expiryTime;
|
this.expiryTime = expiryTime;
|
||||||
this.bytesNeeded = 0;
|
|
||||||
this.bytesCached = 0;
|
|
||||||
this.filesAffected = 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getId() {
|
public long getId() {
|
||||||
|
@ -112,7 +111,8 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
return new CacheDirectiveStats.Builder().
|
return new CacheDirectiveStats.Builder().
|
||||||
setBytesNeeded(bytesNeeded).
|
setBytesNeeded(bytesNeeded).
|
||||||
setBytesCached(bytesCached).
|
setBytesCached(bytesCached).
|
||||||
setFilesAffected(filesAffected).
|
setFilesNeeded(filesNeeded).
|
||||||
|
setFilesCached(filesCached).
|
||||||
setHasExpired(new Date().getTime() > expiryTime).
|
setHasExpired(new Date().getTime() > expiryTime).
|
||||||
build();
|
build();
|
||||||
}
|
}
|
||||||
|
@ -131,7 +131,8 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
append(", expiryTime: ").append(getExpiryTimeString()).
|
append(", expiryTime: ").append(getExpiryTimeString()).
|
||||||
append(", bytesNeeded:").append(bytesNeeded).
|
append(", bytesNeeded:").append(bytesNeeded).
|
||||||
append(", bytesCached:").append(bytesCached).
|
append(", bytesCached:").append(bytesCached).
|
||||||
append(", filesAffected:").append(filesAffected).
|
append(", filesNeeded:").append(filesNeeded).
|
||||||
|
append(", filesCached:").append(filesCached).
|
||||||
append(" }");
|
append(" }");
|
||||||
return builder.toString();
|
return builder.toString();
|
||||||
}
|
}
|
||||||
|
@ -152,42 +153,60 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
return new HashCodeBuilder().append(id).toHashCode();
|
return new HashCodeBuilder().append(id).toHashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Stats related getters and setters
|
||||||
|
//
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resets the byte and file statistics being tracked by this CacheDirective.
|
||||||
|
*/
|
||||||
|
public void resetStatistics() {
|
||||||
|
bytesNeeded = 0;
|
||||||
|
bytesCached = 0;
|
||||||
|
filesNeeded = 0;
|
||||||
|
filesCached = 0;
|
||||||
|
}
|
||||||
|
|
||||||
public long getBytesNeeded() {
|
public long getBytesNeeded() {
|
||||||
return bytesNeeded;
|
return bytesNeeded;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void clearBytesNeeded() {
|
public void addBytesNeeded(long bytes) {
|
||||||
this.bytesNeeded = 0;
|
this.bytesNeeded += bytes;
|
||||||
}
|
pool.addBytesNeeded(bytes);
|
||||||
|
|
||||||
public void addBytesNeeded(long toAdd) {
|
|
||||||
this.bytesNeeded += toAdd;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getBytesCached() {
|
public long getBytesCached() {
|
||||||
return bytesCached;
|
return bytesCached;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void clearBytesCached() {
|
public void addBytesCached(long bytes) {
|
||||||
this.bytesCached = 0;
|
this.bytesCached += bytes;
|
||||||
|
pool.addBytesCached(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addBytesCached(long toAdd) {
|
public long getFilesNeeded() {
|
||||||
this.bytesCached += toAdd;
|
return filesNeeded;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getFilesAffected() {
|
public void addFilesNeeded(long files) {
|
||||||
return filesAffected;
|
this.filesNeeded += files;
|
||||||
|
pool.addFilesNeeded(files);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void clearFilesAffected() {
|
public long getFilesCached() {
|
||||||
this.filesAffected = 0;
|
return filesCached;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void incrementFilesAffected() {
|
public void addFilesCached(long files) {
|
||||||
this.filesAffected++;
|
this.filesCached += files;
|
||||||
|
pool.addFilesCached(files);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// IntrusiveCollection.Element implementation
|
||||||
|
//
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override // IntrusiveCollection.Element
|
@Override // IntrusiveCollection.Element
|
||||||
public void insertInternal(IntrusiveCollection<? extends Element> list,
|
public void insertInternal(IntrusiveCollection<? extends Element> list,
|
||||||
|
|
|
@ -29,7 +29,8 @@ public class CacheDirectiveStats {
|
||||||
public static class Builder {
|
public static class Builder {
|
||||||
private long bytesNeeded;
|
private long bytesNeeded;
|
||||||
private long bytesCached;
|
private long bytesCached;
|
||||||
private long filesAffected;
|
private long filesNeeded;
|
||||||
|
private long filesCached;
|
||||||
private boolean hasExpired;
|
private boolean hasExpired;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -38,8 +39,8 @@ public class CacheDirectiveStats {
|
||||||
* @return New CacheDirectiveStats.
|
* @return New CacheDirectiveStats.
|
||||||
*/
|
*/
|
||||||
public CacheDirectiveStats build() {
|
public CacheDirectiveStats build() {
|
||||||
return new CacheDirectiveStats(bytesNeeded, bytesCached, filesAffected,
|
return new CacheDirectiveStats(bytesNeeded, bytesCached, filesNeeded,
|
||||||
hasExpired);
|
filesCached, hasExpired);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -71,13 +72,23 @@ public class CacheDirectiveStats {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the files affected by this directive.
|
* Sets the files needed by this directive.
|
||||||
*
|
* @param filesNeeded The number of files needed
|
||||||
* @param filesAffected The files affected.
|
|
||||||
* @return This builder, for call chaining.
|
* @return This builder, for call chaining.
|
||||||
*/
|
*/
|
||||||
public Builder setFilesAffected(long filesAffected) {
|
public Builder setFilesNeeded(long filesNeeded) {
|
||||||
this.filesAffected = filesAffected;
|
this.filesNeeded = filesNeeded;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the files cached by this directive.
|
||||||
|
*
|
||||||
|
* @param filesCached The number of files cached.
|
||||||
|
* @return This builder, for call chaining.
|
||||||
|
*/
|
||||||
|
public Builder setFilesCached(long filesCached) {
|
||||||
|
this.filesCached = filesCached;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,14 +106,16 @@ public class CacheDirectiveStats {
|
||||||
|
|
||||||
private final long bytesNeeded;
|
private final long bytesNeeded;
|
||||||
private final long bytesCached;
|
private final long bytesCached;
|
||||||
private final long filesAffected;
|
private final long filesNeeded;
|
||||||
|
private final long filesCached;
|
||||||
private final boolean hasExpired;
|
private final boolean hasExpired;
|
||||||
|
|
||||||
private CacheDirectiveStats(long bytesNeeded, long bytesCached,
|
private CacheDirectiveStats(long bytesNeeded, long bytesCached,
|
||||||
long filesAffected, boolean hasExpired) {
|
long filesNeeded, long filesCached, boolean hasExpired) {
|
||||||
this.bytesNeeded = bytesNeeded;
|
this.bytesNeeded = bytesNeeded;
|
||||||
this.bytesCached = bytesCached;
|
this.bytesCached = bytesCached;
|
||||||
this.filesAffected = filesAffected;
|
this.filesNeeded = filesNeeded;
|
||||||
|
this.filesCached = filesCached;
|
||||||
this.hasExpired = hasExpired;
|
this.hasExpired = hasExpired;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,10 +134,17 @@ public class CacheDirectiveStats {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return The files affected.
|
* @return The number of files needed.
|
||||||
*/
|
*/
|
||||||
public long getFilesAffected() {
|
public long getFilesNeeded() {
|
||||||
return filesAffected;
|
return filesNeeded;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return The number of files cached.
|
||||||
|
*/
|
||||||
|
public long getFilesCached() {
|
||||||
|
return filesCached;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -140,7 +160,8 @@ public class CacheDirectiveStats {
|
||||||
builder.append("{");
|
builder.append("{");
|
||||||
builder.append("bytesNeeded: ").append(bytesNeeded);
|
builder.append("bytesNeeded: ").append(bytesNeeded);
|
||||||
builder.append(", ").append("bytesCached: ").append(bytesCached);
|
builder.append(", ").append("bytesCached: ").append(bytesCached);
|
||||||
builder.append(", ").append("filesAffected: ").append(filesAffected);
|
builder.append(", ").append("filesNeeded: ").append(filesNeeded);
|
||||||
|
builder.append(", ").append("filesCached: ").append(filesCached);
|
||||||
builder.append(", ").append("hasExpired: ").append(hasExpired);
|
builder.append(", ").append("hasExpired: ").append(hasExpired);
|
||||||
builder.append("}");
|
builder.append("}");
|
||||||
return builder.toString();
|
return builder.toString();
|
||||||
|
|
|
@ -30,7 +30,8 @@ public class CachePoolStats {
|
||||||
public static class Builder {
|
public static class Builder {
|
||||||
private long bytesNeeded;
|
private long bytesNeeded;
|
||||||
private long bytesCached;
|
private long bytesCached;
|
||||||
private long filesAffected;
|
private long filesNeeded;
|
||||||
|
private long filesCached;
|
||||||
|
|
||||||
public Builder() {
|
public Builder() {
|
||||||
}
|
}
|
||||||
|
@ -45,24 +46,33 @@ public class CachePoolStats {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder setFilesAffected(long filesAffected) {
|
public Builder setFilesNeeded(long filesNeeded) {
|
||||||
this.filesAffected = filesAffected;
|
this.filesNeeded = filesNeeded;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder setFilesCached(long filesCached) {
|
||||||
|
this.filesCached = filesCached;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public CachePoolStats build() {
|
public CachePoolStats build() {
|
||||||
return new CachePoolStats(bytesNeeded, bytesCached, filesAffected);
|
return new CachePoolStats(bytesNeeded, bytesCached, filesNeeded,
|
||||||
|
filesCached);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
private final long bytesNeeded;
|
private final long bytesNeeded;
|
||||||
private final long bytesCached;
|
private final long bytesCached;
|
||||||
private final long filesAffected;
|
private final long filesNeeded;
|
||||||
|
private final long filesCached;
|
||||||
|
|
||||||
private CachePoolStats(long bytesNeeded, long bytesCached, long filesAffected) {
|
private CachePoolStats(long bytesNeeded, long bytesCached, long filesNeeded,
|
||||||
|
long filesCached) {
|
||||||
this.bytesNeeded = bytesNeeded;
|
this.bytesNeeded = bytesNeeded;
|
||||||
this.bytesCached = bytesCached;
|
this.bytesCached = bytesCached;
|
||||||
this.filesAffected = filesAffected;
|
this.filesNeeded = filesNeeded;
|
||||||
|
this.filesCached = filesCached;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getBytesNeeded() {
|
public long getBytesNeeded() {
|
||||||
|
@ -70,18 +80,23 @@ public class CachePoolStats {
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getBytesCached() {
|
public long getBytesCached() {
|
||||||
return bytesNeeded;
|
return bytesCached;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getFilesAffected() {
|
public long getFilesNeeded() {
|
||||||
return filesAffected;
|
return filesNeeded;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getFilesCached() {
|
||||||
|
return filesCached;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return new StringBuilder().append("{").
|
return new StringBuilder().append("{").
|
||||||
append("bytesNeeded:").append(bytesNeeded).
|
append("bytesNeeded:").append(bytesNeeded).
|
||||||
append(", bytesCached:").append(bytesCached).
|
append(", bytesCached:").append(bytesCached).
|
||||||
append(", filesAffected:").append(filesAffected).
|
append(", filesNeeded:").append(filesNeeded).
|
||||||
|
append(", filesCached:").append(filesCached).
|
||||||
append("}").toString();
|
append("}").toString();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1642,7 +1642,8 @@ public class PBHelper {
|
||||||
CacheDirectiveStatsProto.newBuilder();
|
CacheDirectiveStatsProto.newBuilder();
|
||||||
builder.setBytesNeeded(stats.getBytesNeeded());
|
builder.setBytesNeeded(stats.getBytesNeeded());
|
||||||
builder.setBytesCached(stats.getBytesCached());
|
builder.setBytesCached(stats.getBytesCached());
|
||||||
builder.setFilesAffected(stats.getFilesAffected());
|
builder.setFilesNeeded(stats.getFilesNeeded());
|
||||||
|
builder.setFilesCached(stats.getFilesCached());
|
||||||
builder.setHasExpired(stats.hasExpired());
|
builder.setHasExpired(stats.hasExpired());
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
@ -1651,7 +1652,8 @@ public class PBHelper {
|
||||||
CacheDirectiveStats.Builder builder = new CacheDirectiveStats.Builder();
|
CacheDirectiveStats.Builder builder = new CacheDirectiveStats.Builder();
|
||||||
builder.setBytesNeeded(proto.getBytesNeeded());
|
builder.setBytesNeeded(proto.getBytesNeeded());
|
||||||
builder.setBytesCached(proto.getBytesCached());
|
builder.setBytesCached(proto.getBytesCached());
|
||||||
builder.setFilesAffected(proto.getFilesAffected());
|
builder.setFilesNeeded(proto.getFilesNeeded());
|
||||||
|
builder.setFilesCached(proto.getFilesCached());
|
||||||
builder.setHasExpired(proto.getHasExpired());
|
builder.setHasExpired(proto.getHasExpired());
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
@ -1711,7 +1713,8 @@ public class PBHelper {
|
||||||
CachePoolStatsProto.Builder builder = CachePoolStatsProto.newBuilder();
|
CachePoolStatsProto.Builder builder = CachePoolStatsProto.newBuilder();
|
||||||
builder.setBytesNeeded(stats.getBytesNeeded());
|
builder.setBytesNeeded(stats.getBytesNeeded());
|
||||||
builder.setBytesCached(stats.getBytesCached());
|
builder.setBytesCached(stats.getBytesCached());
|
||||||
builder.setFilesAffected(stats.getFilesAffected());
|
builder.setFilesNeeded(stats.getFilesNeeded());
|
||||||
|
builder.setFilesCached(stats.getFilesCached());
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1719,7 +1722,8 @@ public class PBHelper {
|
||||||
CachePoolStats.Builder builder = new CachePoolStats.Builder();
|
CachePoolStats.Builder builder = new CachePoolStats.Builder();
|
||||||
builder.setBytesNeeded(proto.getBytesNeeded());
|
builder.setBytesNeeded(proto.getBytesNeeded());
|
||||||
builder.setBytesCached(proto.getBytesCached());
|
builder.setBytesCached(proto.getBytesCached());
|
||||||
builder.setFilesAffected(proto.getFilesAffected());
|
builder.setFilesNeeded(proto.getFilesNeeded());
|
||||||
|
builder.setFilesCached(proto.getFilesCached());
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.hadoop.hdfs.protocol.CacheDirective;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.CachedBlocksList.Type;
|
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.CachedBlocksList.Type;
|
||||||
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
|
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.CacheManager;
|
import org.apache.hadoop.hdfs.server.namenode.CacheManager;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.CachePool;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.CachedBlock;
|
import org.apache.hadoop.hdfs.server.namenode.CachedBlock;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
|
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
|
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
|
||||||
|
@ -198,6 +199,7 @@ public class CacheReplicationMonitor extends Thread implements Closeable {
|
||||||
scannedBlocks = 0;
|
scannedBlocks = 0;
|
||||||
namesystem.writeLock();
|
namesystem.writeLock();
|
||||||
try {
|
try {
|
||||||
|
resetStatistics();
|
||||||
rescanCacheDirectives();
|
rescanCacheDirectives();
|
||||||
rescanCachedBlockMap();
|
rescanCachedBlockMap();
|
||||||
blockManager.getDatanodeManager().resetLastCachingDirectiveSentTime();
|
blockManager.getDatanodeManager().resetLastCachingDirectiveSentTime();
|
||||||
|
@ -206,6 +208,15 @@ public class CacheReplicationMonitor extends Thread implements Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void resetStatistics() {
|
||||||
|
for (CachePool pool: cacheManager.getCachePools()) {
|
||||||
|
pool.resetStatistics();
|
||||||
|
}
|
||||||
|
for (CacheDirective directive: cacheManager.getCacheDirectives()) {
|
||||||
|
directive.resetStatistics();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Scan all CacheDirectives. Use the information to figure out
|
* Scan all CacheDirectives. Use the information to figure out
|
||||||
* what cache replication factor each block should have.
|
* what cache replication factor each block should have.
|
||||||
|
@ -213,11 +224,9 @@ public class CacheReplicationMonitor extends Thread implements Closeable {
|
||||||
private void rescanCacheDirectives() {
|
private void rescanCacheDirectives() {
|
||||||
FSDirectory fsDir = namesystem.getFSDirectory();
|
FSDirectory fsDir = namesystem.getFSDirectory();
|
||||||
final long now = new Date().getTime();
|
final long now = new Date().getTime();
|
||||||
for (CacheDirective directive : cacheManager.getEntriesById().values()) {
|
for (CacheDirective directive : cacheManager.getCacheDirectives()) {
|
||||||
// Reset the directive
|
// Reset the directive's statistics
|
||||||
directive.clearBytesNeeded();
|
directive.resetStatistics();
|
||||||
directive.clearBytesCached();
|
|
||||||
directive.clearFilesAffected();
|
|
||||||
// Skip processing this entry if it has expired
|
// Skip processing this entry if it has expired
|
||||||
LOG.info("Directive expiry is at " + directive.getExpiryTime());
|
LOG.info("Directive expiry is at " + directive.getExpiryTime());
|
||||||
if (directive.getExpiryTime() > 0 && directive.getExpiryTime() <= now) {
|
if (directive.getExpiryTime() > 0 && directive.getExpiryTime() <= now) {
|
||||||
|
@ -263,25 +272,33 @@ public class CacheReplicationMonitor extends Thread implements Closeable {
|
||||||
/**
|
/**
|
||||||
* Apply a CacheDirective to a file.
|
* Apply a CacheDirective to a file.
|
||||||
*
|
*
|
||||||
* @param pce The CacheDirective to apply.
|
* @param directive The CacheDirective to apply.
|
||||||
* @param file The file.
|
* @param file The file.
|
||||||
*/
|
*/
|
||||||
private void rescanFile(CacheDirective pce, INodeFile file) {
|
private void rescanFile(CacheDirective directive, INodeFile file) {
|
||||||
pce.incrementFilesAffected();
|
|
||||||
BlockInfo[] blockInfos = file.getBlocks();
|
BlockInfo[] blockInfos = file.getBlocks();
|
||||||
long cachedTotal = 0;
|
|
||||||
|
// Increment the "needed" statistics
|
||||||
|
directive.addFilesNeeded(1);
|
||||||
long neededTotal = 0;
|
long neededTotal = 0;
|
||||||
|
for (BlockInfo blockInfo : blockInfos) {
|
||||||
|
long neededByBlock =
|
||||||
|
directive.getReplication() * blockInfo.getNumBytes();
|
||||||
|
neededTotal += neededByBlock;
|
||||||
|
}
|
||||||
|
directive.addBytesNeeded(neededTotal);
|
||||||
|
|
||||||
|
// TODO: Enforce per-pool quotas
|
||||||
|
|
||||||
|
long cachedTotal = 0;
|
||||||
for (BlockInfo blockInfo : blockInfos) {
|
for (BlockInfo blockInfo : blockInfos) {
|
||||||
if (!blockInfo.getBlockUCState().equals(BlockUCState.COMPLETE)) {
|
if (!blockInfo.getBlockUCState().equals(BlockUCState.COMPLETE)) {
|
||||||
// We don't try to cache blocks that are under construction.
|
// We don't try to cache blocks that are under construction.
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
long neededByBlock =
|
|
||||||
pce.getReplication() * blockInfo.getNumBytes();
|
|
||||||
neededTotal += neededByBlock;
|
|
||||||
Block block = new Block(blockInfo.getBlockId());
|
Block block = new Block(blockInfo.getBlockId());
|
||||||
CachedBlock ncblock = new CachedBlock(block.getBlockId(),
|
CachedBlock ncblock = new CachedBlock(block.getBlockId(),
|
||||||
pce.getReplication(), mark);
|
directive.getReplication(), mark);
|
||||||
CachedBlock ocblock = cachedBlocks.get(ncblock);
|
CachedBlock ocblock = cachedBlocks.get(ncblock);
|
||||||
if (ocblock == null) {
|
if (ocblock == null) {
|
||||||
cachedBlocks.put(ncblock);
|
cachedBlocks.put(ncblock);
|
||||||
|
@ -294,26 +311,30 @@ public class CacheReplicationMonitor extends Thread implements Closeable {
|
||||||
// both get them added to their bytesCached.
|
// both get them added to their bytesCached.
|
||||||
List<DatanodeDescriptor> cachedOn =
|
List<DatanodeDescriptor> cachedOn =
|
||||||
ocblock.getDatanodes(Type.CACHED);
|
ocblock.getDatanodes(Type.CACHED);
|
||||||
long cachedByBlock = Math.min(cachedOn.size(), pce.getReplication()) *
|
long cachedByBlock = Math.min(cachedOn.size(),
|
||||||
blockInfo.getNumBytes();
|
directive.getReplication()) * blockInfo.getNumBytes();
|
||||||
cachedTotal += cachedByBlock;
|
cachedTotal += cachedByBlock;
|
||||||
|
|
||||||
if (mark != ocblock.getMark()) {
|
if (mark != ocblock.getMark()) {
|
||||||
// Mark hasn't been set in this scan, so update replication and mark.
|
// Mark hasn't been set in this scan, so update replication and mark.
|
||||||
ocblock.setReplicationAndMark(pce.getReplication(), mark);
|
ocblock.setReplicationAndMark(directive.getReplication(), mark);
|
||||||
} else {
|
} else {
|
||||||
// Mark already set in this scan. Set replication to highest value in
|
// Mark already set in this scan. Set replication to highest value in
|
||||||
// any CacheDirective that covers this file.
|
// any CacheDirective that covers this file.
|
||||||
ocblock.setReplicationAndMark((short)Math.max(
|
ocblock.setReplicationAndMark((short)Math.max(
|
||||||
pce.getReplication(), ocblock.getReplication()), mark);
|
directive.getReplication(), ocblock.getReplication()), mark);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pce.addBytesNeeded(neededTotal);
|
// Increment the "cached" statistics
|
||||||
pce.addBytesCached(cachedTotal);
|
directive.addBytesCached(cachedTotal);
|
||||||
|
if (cachedTotal == neededTotal) {
|
||||||
|
directive.addFilesCached(1);
|
||||||
|
}
|
||||||
if (LOG.isTraceEnabled()) {
|
if (LOG.isTraceEnabled()) {
|
||||||
LOG.debug("Directive " + pce.getId() + " is caching " +
|
LOG.trace("Directive " + directive.getId() + " is caching " +
|
||||||
file.getFullPathName() + ": " + cachedTotal + "/" + neededTotal);
|
file.getFullPathName() + ": " + cachedTotal + "/" + neededTotal +
|
||||||
|
" bytes");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -31,6 +31,7 @@ import java.io.DataOutput;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -238,9 +239,20 @@ public final class CacheManager {
|
||||||
return active;
|
return active;
|
||||||
}
|
}
|
||||||
|
|
||||||
public TreeMap<Long, CacheDirective> getEntriesById() {
|
/**
|
||||||
|
* @return Unmodifiable view of the collection of CachePools.
|
||||||
|
*/
|
||||||
|
public Collection<CachePool> getCachePools() {
|
||||||
assert namesystem.hasReadLock();
|
assert namesystem.hasReadLock();
|
||||||
return directivesById;
|
return Collections.unmodifiableCollection(cachePools.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return Unmodifiable view of the collection of CacheDirectives.
|
||||||
|
*/
|
||||||
|
public Collection<CacheDirective> getCacheDirectives() {
|
||||||
|
assert namesystem.hasReadLock();
|
||||||
|
return Collections.unmodifiableCollection(directivesById.values());
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
|
|
@ -73,6 +73,11 @@ public final class CachePool {
|
||||||
|
|
||||||
private int weight;
|
private int weight;
|
||||||
|
|
||||||
|
private long bytesNeeded;
|
||||||
|
private long bytesCached;
|
||||||
|
private long filesNeeded;
|
||||||
|
private long filesCached;
|
||||||
|
|
||||||
public final static class DirectiveList
|
public final static class DirectiveList
|
||||||
extends IntrusiveCollection<CacheDirective> {
|
extends IntrusiveCollection<CacheDirective> {
|
||||||
private CachePool cachePool;
|
private CachePool cachePool;
|
||||||
|
@ -202,6 +207,48 @@ public final class CachePool {
|
||||||
setWeight(weight);
|
setWeight(weight);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resets statistics related to this CachePool
|
||||||
|
*/
|
||||||
|
public void resetStatistics() {
|
||||||
|
bytesNeeded = 0;
|
||||||
|
bytesCached = 0;
|
||||||
|
filesNeeded = 0;
|
||||||
|
filesCached = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addBytesNeeded(long bytes) {
|
||||||
|
bytesNeeded += bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addBytesCached(long bytes) {
|
||||||
|
bytesCached += bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addFilesNeeded(long files) {
|
||||||
|
filesNeeded += files;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addFilesCached(long files) {
|
||||||
|
filesCached += files;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getBytesNeeded() {
|
||||||
|
return bytesNeeded;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getBytesCached() {
|
||||||
|
return bytesCached;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getFilesNeeded() {
|
||||||
|
return filesNeeded;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getFilesCached() {
|
||||||
|
return filesCached;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get statistics about this CachePool.
|
* Get statistics about this CachePool.
|
||||||
*
|
*
|
||||||
|
@ -209,9 +256,10 @@ public final class CachePool {
|
||||||
*/
|
*/
|
||||||
private CachePoolStats getStats() {
|
private CachePoolStats getStats() {
|
||||||
return new CachePoolStats.Builder().
|
return new CachePoolStats.Builder().
|
||||||
setBytesNeeded(0).
|
setBytesNeeded(bytesNeeded).
|
||||||
setBytesCached(0).
|
setBytesCached(bytesCached).
|
||||||
setFilesAffected(0).
|
setFilesNeeded(filesNeeded).
|
||||||
|
setFilesCached(filesCached).
|
||||||
build();
|
build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.CachePoolStats;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.CachePool;
|
import org.apache.hadoop.hdfs.server.namenode.CachePool;
|
||||||
import org.apache.hadoop.hdfs.tools.TableListing.Justification;
|
import org.apache.hadoop.hdfs.tools.TableListing.Justification;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
|
@ -477,9 +478,10 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
addField("EXPIRY", Justification.LEFT).
|
addField("EXPIRY", Justification.LEFT).
|
||||||
addField("PATH", Justification.LEFT);
|
addField("PATH", Justification.LEFT);
|
||||||
if (printStats) {
|
if (printStats) {
|
||||||
tableBuilder.addField("NEEDED", Justification.RIGHT).
|
tableBuilder.addField("BYTES_NEEDED", Justification.RIGHT).
|
||||||
addField("CACHED", Justification.RIGHT).
|
addField("BYTES_CACHED", Justification.RIGHT).
|
||||||
addField("FILES", Justification.RIGHT);
|
addField("FILES_NEEDED", Justification.RIGHT).
|
||||||
|
addField("FILES_CACHED", Justification.RIGHT);
|
||||||
}
|
}
|
||||||
TableListing tableListing = tableBuilder.build();
|
TableListing tableListing = tableBuilder.build();
|
||||||
|
|
||||||
|
@ -507,7 +509,8 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
if (printStats) {
|
if (printStats) {
|
||||||
row.add("" + stats.getBytesNeeded());
|
row.add("" + stats.getBytesNeeded());
|
||||||
row.add("" + stats.getBytesCached());
|
row.add("" + stats.getBytesCached());
|
||||||
row.add("" + stats.getFilesAffected());
|
row.add("" + stats.getFilesNeeded());
|
||||||
|
row.add("" + stats.getFilesCached());
|
||||||
}
|
}
|
||||||
tableListing.addRow(row.toArray(new String[0]));
|
tableListing.addRow(row.toArray(new String[0]));
|
||||||
numEntries++;
|
numEntries++;
|
||||||
|
@ -769,13 +772,14 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getShortUsage() {
|
public String getShortUsage() {
|
||||||
return "[" + getName() + " [name]]\n";
|
return "[" + getName() + " [-stats] [<name>]]\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getLongUsage() {
|
public String getLongUsage() {
|
||||||
TableListing listing = getOptionDescriptionListing();
|
TableListing listing = getOptionDescriptionListing();
|
||||||
listing.addRow("[name]", "If specified, list only the named cache pool.");
|
listing.addRow("-stats", "Display additional cache pool statistics.");
|
||||||
|
listing.addRow("<name>", "If specified, list only the named cache pool.");
|
||||||
|
|
||||||
return getShortUsage() + "\n" +
|
return getShortUsage() + "\n" +
|
||||||
WordUtils.wrap("Display information about one or more cache pools, " +
|
WordUtils.wrap("Display information about one or more cache pools, " +
|
||||||
|
@ -787,6 +791,7 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
@Override
|
@Override
|
||||||
public int run(Configuration conf, List<String> args) throws IOException {
|
public int run(Configuration conf, List<String> args) throws IOException {
|
||||||
String name = StringUtils.popFirstNonOption(args);
|
String name = StringUtils.popFirstNonOption(args);
|
||||||
|
final boolean printStats = StringUtils.popOption("-stats", args);
|
||||||
if (!args.isEmpty()) {
|
if (!args.isEmpty()) {
|
||||||
System.err.print("Can't understand arguments: " +
|
System.err.print("Can't understand arguments: " +
|
||||||
Joiner.on(" ").join(args) + "\n");
|
Joiner.on(" ").join(args) + "\n");
|
||||||
|
@ -794,28 +799,42 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
DistributedFileSystem dfs = getDFS(conf);
|
DistributedFileSystem dfs = getDFS(conf);
|
||||||
TableListing listing = new TableListing.Builder().
|
TableListing.Builder builder = new TableListing.Builder().
|
||||||
addField("NAME", Justification.LEFT).
|
addField("NAME", Justification.LEFT).
|
||||||
addField("OWNER", Justification.LEFT).
|
addField("OWNER", Justification.LEFT).
|
||||||
addField("GROUP", Justification.LEFT).
|
addField("GROUP", Justification.LEFT).
|
||||||
addField("MODE", Justification.LEFT).
|
addField("MODE", Justification.LEFT).
|
||||||
addField("WEIGHT", Justification.RIGHT).
|
addField("WEIGHT", Justification.RIGHT);
|
||||||
build();
|
if (printStats) {
|
||||||
|
builder.
|
||||||
|
addField("BYTES_NEEDED", Justification.RIGHT).
|
||||||
|
addField("BYTES_CACHED", Justification.RIGHT).
|
||||||
|
addField("FILES_NEEDED", Justification.RIGHT).
|
||||||
|
addField("FILES_CACHED", Justification.RIGHT);
|
||||||
|
}
|
||||||
|
TableListing listing = builder.build();
|
||||||
int numResults = 0;
|
int numResults = 0;
|
||||||
try {
|
try {
|
||||||
RemoteIterator<CachePoolEntry> iter = dfs.listCachePools();
|
RemoteIterator<CachePoolEntry> iter = dfs.listCachePools();
|
||||||
while (iter.hasNext()) {
|
while (iter.hasNext()) {
|
||||||
CachePoolEntry entry = iter.next();
|
CachePoolEntry entry = iter.next();
|
||||||
CachePoolInfo info = entry.getInfo();
|
CachePoolInfo info = entry.getInfo();
|
||||||
String[] row = new String[5];
|
LinkedList<String> row = new LinkedList<String>();
|
||||||
if (name == null || info.getPoolName().equals(name)) {
|
if (name == null || info.getPoolName().equals(name)) {
|
||||||
row[0] = info.getPoolName();
|
row.add(info.getPoolName());
|
||||||
row[1] = info.getOwnerName();
|
row.add(info.getOwnerName());
|
||||||
row[2] = info.getGroupName();
|
row.add(info.getGroupName());
|
||||||
row[3] = info.getMode() != null ? info.getMode().toString() : null;
|
row.add(info.getMode() != null ? info.getMode().toString() : null);
|
||||||
row[4] =
|
row.add(
|
||||||
info.getWeight() != null ? info.getWeight().toString() : null;
|
info.getWeight() != null ? info.getWeight().toString() : null);
|
||||||
listing.addRow(row);
|
if (printStats) {
|
||||||
|
CachePoolStats stats = entry.getStats();
|
||||||
|
row.add(Long.toString(stats.getBytesNeeded()));
|
||||||
|
row.add(Long.toString(stats.getBytesCached()));
|
||||||
|
row.add(Long.toString(stats.getFilesNeeded()));
|
||||||
|
row.add(Long.toString(stats.getFilesCached()));
|
||||||
|
}
|
||||||
|
listing.addRow(row.toArray(new String[] {}));
|
||||||
++numResults;
|
++numResults;
|
||||||
if (name != null) {
|
if (name != null) {
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -379,8 +379,9 @@ message CacheDirectiveInfoExpirationProto {
|
||||||
message CacheDirectiveStatsProto {
|
message CacheDirectiveStatsProto {
|
||||||
required int64 bytesNeeded = 1;
|
required int64 bytesNeeded = 1;
|
||||||
required int64 bytesCached = 2;
|
required int64 bytesCached = 2;
|
||||||
required int64 filesAffected = 3;
|
required int64 filesNeeded = 3;
|
||||||
required bool hasExpired = 4;
|
required int64 filesCached = 4;
|
||||||
|
required bool hasExpired = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddCacheDirectiveRequestProto {
|
message AddCacheDirectiveRequestProto {
|
||||||
|
@ -431,7 +432,8 @@ message CachePoolInfoProto {
|
||||||
message CachePoolStatsProto {
|
message CachePoolStatsProto {
|
||||||
required int64 bytesNeeded = 1;
|
required int64 bytesNeeded = 1;
|
||||||
required int64 bytesCached = 2;
|
required int64 bytesCached = 2;
|
||||||
required int64 filesAffected = 3;
|
required int64 filesNeeded = 3;
|
||||||
|
required int64 filesCached = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddCachePoolRequestProto {
|
message AddCachePoolRequestProto {
|
||||||
|
|
|
@ -62,6 +62,7 @@ import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.CachePoolStats;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor;
|
import org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.CachedBlocksList.Type;
|
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.CachedBlocksList.Type;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
||||||
|
@ -623,45 +624,111 @@ public class TestCacheDirectives {
|
||||||
}, 500, 60000);
|
}, 500, 60000);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void waitForCachedStats(final DistributedFileSystem dfs,
|
private static void waitForCacheDirectiveStats(final DistributedFileSystem dfs,
|
||||||
final long targetFilesAffected, final long targetBytesNeeded,
|
final long targetBytesNeeded, final long targetBytesCached,
|
||||||
final long targetBytesCached,
|
final long targetFilesNeeded, final long targetFilesCached,
|
||||||
final CacheDirectiveInfo filter, final String infoString)
|
final CacheDirectiveInfo filter, final String infoString)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
LOG.info("Polling listDirectives{" +
|
LOG.info("Polling listCacheDirectives " +
|
||||||
((filter == null) ? "ALL" : filter.toString()) +
|
((filter == null) ? "ALL" : filter.toString()) + " for " +
|
||||||
" for " + targetFilesAffected + " targetFilesAffected, " +
|
targetBytesNeeded + " targetBytesNeeded, " +
|
||||||
targetBytesNeeded + " targetBytesNeeded, " +
|
targetBytesCached + " targetBytesCached, " +
|
||||||
targetBytesCached + " targetBytesCached");
|
targetFilesNeeded + " targetFilesNeeded, " +
|
||||||
GenericTestUtils.waitFor(new Supplier<Boolean>() {
|
targetFilesCached + " targetFilesCached");
|
||||||
@Override
|
GenericTestUtils.waitFor(new Supplier<Boolean>() {
|
||||||
public Boolean get() {
|
@Override
|
||||||
RemoteIterator<CacheDirectiveEntry> iter = null;
|
public Boolean get() {
|
||||||
CacheDirectiveEntry entry = null;
|
RemoteIterator<CacheDirectiveEntry> iter = null;
|
||||||
|
CacheDirectiveEntry entry = null;
|
||||||
|
try {
|
||||||
|
iter = dfs.listCacheDirectives(filter);
|
||||||
|
entry = iter.next();
|
||||||
|
} catch (IOException e) {
|
||||||
|
fail("got IOException while calling " +
|
||||||
|
"listCacheDirectives: " + e.getMessage());
|
||||||
|
}
|
||||||
|
Assert.assertNotNull(entry);
|
||||||
|
CacheDirectiveStats stats = entry.getStats();
|
||||||
|
if ((targetBytesNeeded == stats.getBytesNeeded()) &&
|
||||||
|
(targetBytesCached == stats.getBytesCached()) &&
|
||||||
|
(targetFilesNeeded == stats.getFilesNeeded()) &&
|
||||||
|
(targetFilesCached == stats.getFilesCached())) {
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
LOG.info(infoString + ": " +
|
||||||
|
"filesNeeded: " +
|
||||||
|
stats.getFilesNeeded() + "/" + targetFilesNeeded +
|
||||||
|
", filesCached: " +
|
||||||
|
stats.getFilesCached() + "/" + targetFilesCached +
|
||||||
|
", bytesNeeded: " +
|
||||||
|
stats.getBytesNeeded() + "/" + targetBytesNeeded +
|
||||||
|
", bytesCached: " +
|
||||||
|
stats.getBytesCached() + "/" + targetBytesCached);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, 500, 60000);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void waitForCachePoolStats(final DistributedFileSystem dfs,
|
||||||
|
final long targetBytesNeeded, final long targetBytesCached,
|
||||||
|
final long targetFilesNeeded, final long targetFilesCached,
|
||||||
|
final CachePoolInfo pool, final String infoString)
|
||||||
|
throws Exception {
|
||||||
|
LOG.info("Polling listCachePools " + pool.toString() + " for " +
|
||||||
|
targetBytesNeeded + " targetBytesNeeded, " +
|
||||||
|
targetBytesCached + " targetBytesCached, " +
|
||||||
|
targetFilesNeeded + " targetFilesNeeded, " +
|
||||||
|
targetFilesCached + " targetFilesCached");
|
||||||
|
GenericTestUtils.waitFor(new Supplier<Boolean>() {
|
||||||
|
@Override
|
||||||
|
public Boolean get() {
|
||||||
|
RemoteIterator<CachePoolEntry> iter = null;
|
||||||
|
try {
|
||||||
|
iter = dfs.listCachePools();
|
||||||
|
} catch (IOException e) {
|
||||||
|
fail("got IOException while calling " +
|
||||||
|
"listCachePools: " + e.getMessage());
|
||||||
|
}
|
||||||
|
while (true) {
|
||||||
|
CachePoolEntry entry = null;
|
||||||
try {
|
try {
|
||||||
iter = dfs.listCacheDirectives(filter);
|
if (!iter.hasNext()) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
entry = iter.next();
|
entry = iter.next();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
fail("got IOException while calling " +
|
fail("got IOException while iterating through " +
|
||||||
"listCacheDirectives: " + e.getMessage());
|
"listCachePools: " + e.getMessage());
|
||||||
}
|
}
|
||||||
Assert.assertNotNull(entry);
|
if (entry == null) {
|
||||||
CacheDirectiveStats stats = entry.getStats();
|
break;
|
||||||
if ((targetFilesAffected == stats.getFilesAffected()) &&
|
}
|
||||||
(targetBytesNeeded == stats.getBytesNeeded()) &&
|
if (!entry.getInfo().getPoolName().equals(pool.getPoolName())) {
|
||||||
(targetBytesCached == stats.getBytesCached())) {
|
continue;
|
||||||
|
}
|
||||||
|
CachePoolStats stats = entry.getStats();
|
||||||
|
if ((targetBytesNeeded == stats.getBytesNeeded()) &&
|
||||||
|
(targetBytesCached == stats.getBytesCached()) &&
|
||||||
|
(targetFilesNeeded == stats.getFilesNeeded()) &&
|
||||||
|
(targetFilesCached == stats.getFilesCached())) {
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
LOG.info(infoString + ": filesAffected: " +
|
LOG.info(infoString + ": " +
|
||||||
stats.getFilesAffected() + "/" + targetFilesAffected +
|
"filesNeeded: " +
|
||||||
", bytesNeeded: " +
|
stats.getFilesNeeded() + "/" + targetFilesNeeded +
|
||||||
|
", filesCached: " +
|
||||||
|
stats.getFilesCached() + "/" + targetFilesCached +
|
||||||
|
", bytesNeeded: " +
|
||||||
stats.getBytesNeeded() + "/" + targetBytesNeeded +
|
stats.getBytesNeeded() + "/" + targetBytesNeeded +
|
||||||
", bytesCached: " +
|
", bytesCached: " +
|
||||||
stats.getBytesCached() + "/" + targetBytesCached);
|
stats.getBytesCached() + "/" + targetBytesCached);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, 500, 60000);
|
return false;
|
||||||
|
}
|
||||||
|
}, 500, 60000);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void checkNumCachedReplicas(final DistributedFileSystem dfs,
|
private static void checkNumCachedReplicas(final DistributedFileSystem dfs,
|
||||||
|
@ -837,7 +904,8 @@ public class TestCacheDirectives {
|
||||||
NameNode namenode = cluster.getNameNode();
|
NameNode namenode = cluster.getNameNode();
|
||||||
// Create the pool
|
// Create the pool
|
||||||
final String pool = "friendlyPool";
|
final String pool = "friendlyPool";
|
||||||
dfs.addCachePool(new CachePoolInfo(pool));
|
final CachePoolInfo poolInfo = new CachePoolInfo(pool);
|
||||||
|
dfs.addCachePool(poolInfo);
|
||||||
// Create some test files
|
// Create some test files
|
||||||
final List<Path> paths = new LinkedList<Path>();
|
final List<Path> paths = new LinkedList<Path>();
|
||||||
paths.add(new Path("/foo/bar"));
|
paths.add(new Path("/foo/bar"));
|
||||||
|
@ -853,6 +921,7 @@ public class TestCacheDirectives {
|
||||||
}
|
}
|
||||||
waitForCachedBlocks(namenode, 0, 0,
|
waitForCachedBlocks(namenode, 0, 0,
|
||||||
"testWaitForCachedReplicasInDirectory:0");
|
"testWaitForCachedReplicasInDirectory:0");
|
||||||
|
|
||||||
// cache entire directory
|
// cache entire directory
|
||||||
long id = dfs.addCacheDirective(
|
long id = dfs.addCacheDirective(
|
||||||
new CacheDirectiveInfo.Builder().
|
new CacheDirectiveInfo.Builder().
|
||||||
|
@ -861,14 +930,20 @@ public class TestCacheDirectives {
|
||||||
setPool(pool).
|
setPool(pool).
|
||||||
build());
|
build());
|
||||||
waitForCachedBlocks(namenode, 4, 8,
|
waitForCachedBlocks(namenode, 4, 8,
|
||||||
"testWaitForCachedReplicasInDirectory:1");
|
"testWaitForCachedReplicasInDirectory:1:blocks");
|
||||||
// Verify that listDirectives gives the stats we want.
|
// Verify that listDirectives gives the stats we want.
|
||||||
waitForCachedStats(dfs, 2,
|
waitForCacheDirectiveStats(dfs,
|
||||||
8 * BLOCK_SIZE, 8 * BLOCK_SIZE,
|
4 * numBlocksPerFile * BLOCK_SIZE, 4 * numBlocksPerFile * BLOCK_SIZE,
|
||||||
|
2, 2,
|
||||||
new CacheDirectiveInfo.Builder().
|
new CacheDirectiveInfo.Builder().
|
||||||
setPath(new Path("/foo")).
|
setPath(new Path("/foo")).
|
||||||
build(),
|
build(),
|
||||||
"testWaitForCachedReplicasInDirectory:2");
|
"testWaitForCachedReplicasInDirectory:1:directive");
|
||||||
|
waitForCachePoolStats(dfs,
|
||||||
|
4 * numBlocksPerFile * BLOCK_SIZE, 4 * numBlocksPerFile * BLOCK_SIZE,
|
||||||
|
2, 2,
|
||||||
|
poolInfo, "testWaitForCachedReplicasInDirectory:1:pool");
|
||||||
|
|
||||||
long id2 = dfs.addCacheDirective(
|
long id2 = dfs.addCacheDirective(
|
||||||
new CacheDirectiveInfo.Builder().
|
new CacheDirectiveInfo.Builder().
|
||||||
setPath(new Path("/foo/bar")).
|
setPath(new Path("/foo/bar")).
|
||||||
|
@ -877,28 +952,42 @@ public class TestCacheDirectives {
|
||||||
build());
|
build());
|
||||||
// wait for an additional 2 cached replicas to come up
|
// wait for an additional 2 cached replicas to come up
|
||||||
waitForCachedBlocks(namenode, 4, 10,
|
waitForCachedBlocks(namenode, 4, 10,
|
||||||
"testWaitForCachedReplicasInDirectory:3");
|
"testWaitForCachedReplicasInDirectory:2:blocks");
|
||||||
// the directory directive's stats are unchanged
|
// the directory directive's stats are unchanged
|
||||||
waitForCachedStats(dfs, 2,
|
waitForCacheDirectiveStats(dfs,
|
||||||
8 * BLOCK_SIZE, 8 * BLOCK_SIZE,
|
4 * numBlocksPerFile * BLOCK_SIZE, 4 * numBlocksPerFile * BLOCK_SIZE,
|
||||||
|
2, 2,
|
||||||
new CacheDirectiveInfo.Builder().
|
new CacheDirectiveInfo.Builder().
|
||||||
setPath(new Path("/foo")).
|
setPath(new Path("/foo")).
|
||||||
build(),
|
build(),
|
||||||
"testWaitForCachedReplicasInDirectory:4");
|
"testWaitForCachedReplicasInDirectory:2:directive-1");
|
||||||
// verify /foo/bar's stats
|
// verify /foo/bar's stats
|
||||||
waitForCachedStats(dfs, 1,
|
waitForCacheDirectiveStats(dfs,
|
||||||
4 * numBlocksPerFile * BLOCK_SIZE,
|
4 * numBlocksPerFile * BLOCK_SIZE,
|
||||||
// only 3 because the file only has 3 replicas, not 4 as requested.
|
// only 3 because the file only has 3 replicas, not 4 as requested.
|
||||||
3 * numBlocksPerFile * BLOCK_SIZE,
|
3 * numBlocksPerFile * BLOCK_SIZE,
|
||||||
|
1,
|
||||||
|
// only 0 because the file can't be fully cached
|
||||||
|
0,
|
||||||
new CacheDirectiveInfo.Builder().
|
new CacheDirectiveInfo.Builder().
|
||||||
setPath(new Path("/foo/bar")).
|
setPath(new Path("/foo/bar")).
|
||||||
build(),
|
build(),
|
||||||
"testWaitForCachedReplicasInDirectory:5");
|
"testWaitForCachedReplicasInDirectory:2:directive-2");
|
||||||
|
waitForCachePoolStats(dfs,
|
||||||
|
(4+4) * numBlocksPerFile * BLOCK_SIZE,
|
||||||
|
(4+3) * numBlocksPerFile * BLOCK_SIZE,
|
||||||
|
3, 2,
|
||||||
|
poolInfo, "testWaitForCachedReplicasInDirectory:2:pool");
|
||||||
|
|
||||||
// remove and watch numCached go to 0
|
// remove and watch numCached go to 0
|
||||||
dfs.removeCacheDirective(id);
|
dfs.removeCacheDirective(id);
|
||||||
dfs.removeCacheDirective(id2);
|
dfs.removeCacheDirective(id2);
|
||||||
waitForCachedBlocks(namenode, 0, 0,
|
waitForCachedBlocks(namenode, 0, 0,
|
||||||
"testWaitForCachedReplicasInDirectory:6");
|
"testWaitForCachedReplicasInDirectory:3:blocks");
|
||||||
|
waitForCachePoolStats(dfs,
|
||||||
|
0, 0,
|
||||||
|
0, 0,
|
||||||
|
poolInfo, "testWaitForCachedReplicasInDirectory:3:pool");
|
||||||
} finally {
|
} finally {
|
||||||
cluster.shutdown();
|
cluster.shutdown();
|
||||||
}
|
}
|
||||||
|
|
|
@ -399,5 +399,63 @@
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
|
||||||
|
<test> <!--Tested -->
|
||||||
|
<description>Testing listing cache pool statistics</description>
|
||||||
|
<test-commands>
|
||||||
|
<cache-admin-command>-addPool foo -owner bob -group bob -mode 0664</cache-admin-command>
|
||||||
|
<cache-admin-command>-addPool bar -owner alice -group alicegroup -mode 0755</cache-admin-command>
|
||||||
|
<cache-admin-command>-listPools -stats</cache-admin-command>
|
||||||
|
</test-commands>
|
||||||
|
<cleanup-commands>
|
||||||
|
<cache-admin-command>-removePool foo</cache-admin-command>
|
||||||
|
<cache-admin-command>-removePool bar</cache-admin-command>
|
||||||
|
</cleanup-commands>
|
||||||
|
<comparators>
|
||||||
|
<comparator>
|
||||||
|
<type>SubstringComparator</type>
|
||||||
|
<expected-output>Found 2 results.</expected-output>
|
||||||
|
</comparator>
|
||||||
|
<comparator>
|
||||||
|
<type>SubstringComparator</type>
|
||||||
|
<expected-output>bar alice alicegroup rwxr-xr-x 100 0 0 0 0</expected-output>
|
||||||
|
</comparator>
|
||||||
|
<comparator>
|
||||||
|
<type>SubstringComparator</type>
|
||||||
|
<expected-output>foo bob bob rw-rw-r-- 100 0 0 0 0</expected-output>
|
||||||
|
</comparator>
|
||||||
|
</comparators>
|
||||||
|
</test>
|
||||||
|
|
||||||
|
<test> <!--Tested -->
|
||||||
|
<description>Testing listing cache directive statistics</description>
|
||||||
|
<test-commands>
|
||||||
|
<cache-admin-command>-addPool pool1</cache-admin-command>
|
||||||
|
<cache-admin-command>-addDirective -path /foo -pool pool1 -ttl 2d</cache-admin-command>
|
||||||
|
<cache-admin-command>-addDirective -path /bar -pool pool1 -ttl 24h</cache-admin-command>
|
||||||
|
<cache-admin-command>-addDirective -path /baz -replication 2 -pool pool1 -ttl 60m</cache-admin-command>
|
||||||
|
<cache-admin-command>-listDirectives -pool pool1 -stats</cache-admin-command>
|
||||||
|
</test-commands>
|
||||||
|
<cleanup-commands>
|
||||||
|
<cache-admin-command>-removePool pool1</cache-admin-command>
|
||||||
|
</cleanup-commands>
|
||||||
|
<comparators>
|
||||||
|
<comparator>
|
||||||
|
<type>SubstringComparator</type>
|
||||||
|
<expected-output>Found 3 entries</expected-output>
|
||||||
|
</comparator>
|
||||||
|
<comparator>
|
||||||
|
<type>SubstringComparator</type>
|
||||||
|
<expected-output>/foo 0 0 0 0</expected-output>
|
||||||
|
</comparator>
|
||||||
|
<comparator>
|
||||||
|
<type>SubstringComparator</type>
|
||||||
|
<expected-output>/bar 0 0 0 0</expected-output>
|
||||||
|
</comparator>
|
||||||
|
<comparator>
|
||||||
|
<type>SubstringComparator</type>
|
||||||
|
<expected-output>/baz 0 0 0 0</expected-output>
|
||||||
|
</comparator>
|
||||||
|
</comparators>
|
||||||
|
</test>
|
||||||
</tests>
|
</tests>
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|
Loading…
Reference in New Issue