HDFS-5430. Support TTL on CacheDirectives. Contributed by Andrew Wang.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1546301 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
bb11d47758
commit
9da451cac5
|
@ -226,6 +226,8 @@ Trunk (Unreleased)
|
||||||
|
|
||||||
HDFS-5537. Remove FileWithSnapshot interface. (jing9 via szetszwo)
|
HDFS-5537. Remove FileWithSnapshot interface. (jing9 via szetszwo)
|
||||||
|
|
||||||
|
HDFS-5430. Support TTL on CacheDirectives. (wang)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
HDFS-5349. DNA_CACHE and DNA_UNCACHE should be by blockId only. (cmccabe)
|
HDFS-5349. DNA_CACHE and DNA_UNCACHE should be by blockId only. (cmccabe)
|
||||||
|
|
||||||
|
|
|
@ -38,12 +38,15 @@ import java.net.InetSocketAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.security.SecureRandom;
|
import java.security.SecureRandom;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -1426,4 +1429,64 @@ public class DFSUtil {
|
||||||
sslConf.get("ssl.server.truststore.password"),
|
sslConf.get("ssl.server.truststore.password"),
|
||||||
sslConf.get("ssl.server.truststore.type", "jks"));
|
sslConf.get("ssl.server.truststore.type", "jks"));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
/**
|
||||||
|
* Converts a Date into an ISO-8601 formatted datetime string.
|
||||||
|
*/
|
||||||
|
public static String dateToIso8601String(Date date) {
|
||||||
|
SimpleDateFormat df =
|
||||||
|
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ", Locale.ENGLISH);
|
||||||
|
return df.format(date);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a time duration in milliseconds into DDD:HH:MM:SS format.
|
||||||
|
*/
|
||||||
|
public static String durationToString(long durationMs) {
|
||||||
|
Preconditions.checkArgument(durationMs >= 0, "Invalid negative duration");
|
||||||
|
// Chop off the milliseconds
|
||||||
|
long durationSec = durationMs / 1000;
|
||||||
|
final int secondsPerMinute = 60;
|
||||||
|
final int secondsPerHour = 60*60;
|
||||||
|
final int secondsPerDay = 60*60*24;
|
||||||
|
final long days = durationSec / secondsPerDay;
|
||||||
|
durationSec -= days * secondsPerDay;
|
||||||
|
final long hours = durationSec / secondsPerHour;
|
||||||
|
durationSec -= hours * secondsPerHour;
|
||||||
|
final long minutes = durationSec / secondsPerMinute;
|
||||||
|
durationSec -= minutes * secondsPerMinute;
|
||||||
|
final long seconds = durationSec;
|
||||||
|
return String.format("%03d:%02d:%02d:%02d", days, hours, minutes, seconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a relative time string into a duration in milliseconds.
|
||||||
|
*/
|
||||||
|
public static long parseRelativeTime(String relTime) throws IOException {
|
||||||
|
if (relTime.length() < 2) {
|
||||||
|
throw new IOException("Unable to parse relative time value of " + relTime
|
||||||
|
+ ": too short");
|
||||||
|
}
|
||||||
|
String ttlString = relTime.substring(0, relTime.length()-1);
|
||||||
|
int ttl;
|
||||||
|
try {
|
||||||
|
ttl = Integer.parseInt(ttlString);
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
throw new IOException("Unable to parse relative time value of " + relTime
|
||||||
|
+ ": " + ttlString + " is not a number");
|
||||||
|
}
|
||||||
|
if (relTime.endsWith("s")) {
|
||||||
|
// pass
|
||||||
|
} else if (relTime.endsWith("m")) {
|
||||||
|
ttl *= 60;
|
||||||
|
} else if (relTime.endsWith("h")) {
|
||||||
|
ttl *= 60*60;
|
||||||
|
} else if (relTime.endsWith("d")) {
|
||||||
|
ttl *= 60*60*24;
|
||||||
|
} else {
|
||||||
|
throw new IOException("Unable to parse relative time value of " + relTime
|
||||||
|
+ ": unknown time unit " + relTime.charAt(relTime.length() - 1));
|
||||||
|
}
|
||||||
|
return ttl*1000;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -17,9 +17,14 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.protocol;
|
package org.apache.hadoop.hdfs.protocol;
|
||||||
|
|
||||||
|
import static com.google.common.base.Preconditions.checkNotNull;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
import org.apache.commons.lang.builder.HashCodeBuilder;
|
import org.apache.commons.lang.builder.HashCodeBuilder;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.CachePool;
|
import org.apache.hadoop.hdfs.server.namenode.CachePool;
|
||||||
import org.apache.hadoop.util.IntrusiveCollection;
|
import org.apache.hadoop.util.IntrusiveCollection;
|
||||||
import org.apache.hadoop.util.IntrusiveCollection.Element;
|
import org.apache.hadoop.util.IntrusiveCollection.Element;
|
||||||
|
@ -27,7 +32,7 @@ import org.apache.hadoop.util.IntrusiveCollection.Element;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents an entry in the PathBasedCache on the NameNode.
|
* Namenode class that tracks state related to a cached path.
|
||||||
*
|
*
|
||||||
* This is an implementation class, not part of the public API.
|
* This is an implementation class, not part of the public API.
|
||||||
*/
|
*/
|
||||||
|
@ -37,6 +42,8 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
private final String path;
|
private final String path;
|
||||||
private final short replication;
|
private final short replication;
|
||||||
private CachePool pool;
|
private CachePool pool;
|
||||||
|
private final long expiryTime;
|
||||||
|
|
||||||
private long bytesNeeded;
|
private long bytesNeeded;
|
||||||
private long bytesCached;
|
private long bytesCached;
|
||||||
private long filesAffected;
|
private long filesAffected;
|
||||||
|
@ -44,13 +51,13 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
private Element next;
|
private Element next;
|
||||||
|
|
||||||
public CacheDirective(long id, String path,
|
public CacheDirective(long id, String path,
|
||||||
short replication) {
|
short replication, long expiryTime) {
|
||||||
Preconditions.checkArgument(id > 0);
|
Preconditions.checkArgument(id > 0);
|
||||||
this.id = id;
|
this.id = id;
|
||||||
|
this.path = checkNotNull(path);
|
||||||
Preconditions.checkArgument(replication > 0);
|
Preconditions.checkArgument(replication > 0);
|
||||||
this.path = path;
|
|
||||||
this.replication = replication;
|
this.replication = replication;
|
||||||
Preconditions.checkNotNull(path);
|
this.expiryTime = expiryTime;
|
||||||
this.bytesNeeded = 0;
|
this.bytesNeeded = 0;
|
||||||
this.bytesCached = 0;
|
this.bytesCached = 0;
|
||||||
this.filesAffected = 0;
|
this.filesAffected = 0;
|
||||||
|
@ -64,20 +71,40 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
public CachePool getPool() {
|
|
||||||
return pool;
|
|
||||||
}
|
|
||||||
|
|
||||||
public short getReplication() {
|
public short getReplication() {
|
||||||
return replication;
|
return replication;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public CachePool getPool() {
|
||||||
|
return pool;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return When this directive expires, in milliseconds since Unix epoch
|
||||||
|
*/
|
||||||
|
public long getExpiryTime() {
|
||||||
|
return expiryTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return When this directive expires, as an ISO-8601 formatted string.
|
||||||
|
*/
|
||||||
|
public String getExpiryTimeString() {
|
||||||
|
return DFSUtil.dateToIso8601String(new Date(expiryTime));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@link CacheDirectiveInfo} based on this CacheDirective.
|
||||||
|
* <p>
|
||||||
|
* This always sets an absolute expiry time, never a relative TTL.
|
||||||
|
*/
|
||||||
public CacheDirectiveInfo toInfo() {
|
public CacheDirectiveInfo toInfo() {
|
||||||
return new CacheDirectiveInfo.Builder().
|
return new CacheDirectiveInfo.Builder().
|
||||||
setId(id).
|
setId(id).
|
||||||
setPath(new Path(path)).
|
setPath(new Path(path)).
|
||||||
setReplication(replication).
|
setReplication(replication).
|
||||||
setPool(pool.getPoolName()).
|
setPool(pool.getPoolName()).
|
||||||
|
setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(expiryTime)).
|
||||||
build();
|
build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,6 +113,7 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
setBytesNeeded(bytesNeeded).
|
setBytesNeeded(bytesNeeded).
|
||||||
setBytesCached(bytesCached).
|
setBytesCached(bytesCached).
|
||||||
setFilesAffected(filesAffected).
|
setFilesAffected(filesAffected).
|
||||||
|
setHasExpired(new Date().getTime() > expiryTime).
|
||||||
build();
|
build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,6 +128,7 @@ public final class CacheDirective implements IntrusiveCollection.Element {
|
||||||
append(", path:").append(path).
|
append(", path:").append(path).
|
||||||
append(", replication:").append(replication).
|
append(", replication:").append(replication).
|
||||||
append(", pool:").append(pool).
|
append(", pool:").append(pool).
|
||||||
|
append(", expiryTime: ").append(getExpiryTimeString()).
|
||||||
append(", bytesNeeded:").append(bytesNeeded).
|
append(", bytesNeeded:").append(bytesNeeded).
|
||||||
append(", bytesCached:").append(bytesCached).
|
append(", bytesCached:").append(bytesCached).
|
||||||
append(", filesAffected:").append(filesAffected).
|
append(", filesAffected:").append(filesAffected).
|
||||||
|
|
|
@ -17,11 +17,14 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.protocol;
|
package org.apache.hadoop.hdfs.protocol;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
import org.apache.commons.lang.builder.EqualsBuilder;
|
import org.apache.commons.lang.builder.EqualsBuilder;
|
||||||
import org.apache.commons.lang.builder.HashCodeBuilder;
|
import org.apache.commons.lang.builder.HashCodeBuilder;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Describes a path-based cache directive.
|
* Describes a path-based cache directive.
|
||||||
|
@ -37,6 +40,7 @@ public class CacheDirectiveInfo {
|
||||||
private Path path;
|
private Path path;
|
||||||
private Short replication;
|
private Short replication;
|
||||||
private String pool;
|
private String pool;
|
||||||
|
private Expiration expiration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Builds a new CacheDirectiveInfo populated with the set properties.
|
* Builds a new CacheDirectiveInfo populated with the set properties.
|
||||||
|
@ -44,7 +48,7 @@ public class CacheDirectiveInfo {
|
||||||
* @return New CacheDirectiveInfo.
|
* @return New CacheDirectiveInfo.
|
||||||
*/
|
*/
|
||||||
public CacheDirectiveInfo build() {
|
public CacheDirectiveInfo build() {
|
||||||
return new CacheDirectiveInfo(id, path, replication, pool);
|
return new CacheDirectiveInfo(id, path, replication, pool, expiration);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -62,6 +66,7 @@ public class CacheDirectiveInfo {
|
||||||
this.path = directive.getPath();
|
this.path = directive.getPath();
|
||||||
this.replication = directive.getReplication();
|
this.replication = directive.getReplication();
|
||||||
this.pool = directive.getPool();
|
this.pool = directive.getPool();
|
||||||
|
this.expiration = directive.getExpiration();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -107,18 +112,134 @@ public class CacheDirectiveInfo {
|
||||||
this.pool = pool;
|
this.pool = pool;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets when the CacheDirective should expire. A
|
||||||
|
* {@link CacheDirectiveInfo.Expiration} can specify either an absolute or
|
||||||
|
* relative expiration time.
|
||||||
|
*
|
||||||
|
* @param expiration when this CacheDirective should expire
|
||||||
|
* @return This builder, for call chaining
|
||||||
|
*/
|
||||||
|
public Builder setExpiration(Expiration expiration) {
|
||||||
|
this.expiration = expiration;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Denotes a relative or absolute expiration time for a CacheDirective. Use
|
||||||
|
* factory methods {@link CacheDirectiveInfo.Expiration#newAbsolute(Date)} and
|
||||||
|
* {@link CacheDirectiveInfo.Expiration#newRelative(long)} to create an
|
||||||
|
* Expiration.
|
||||||
|
* <p>
|
||||||
|
* In either case, the server-side clock is used to determine when a
|
||||||
|
* CacheDirective expires.
|
||||||
|
*/
|
||||||
|
public static class Expiration {
|
||||||
|
|
||||||
|
/** Denotes a CacheDirectiveInfo that never expires **/
|
||||||
|
public static final int EXPIRY_NEVER = -1;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new relative Expiration.
|
||||||
|
*
|
||||||
|
* @param ms how long until the CacheDirective expires, in milliseconds
|
||||||
|
* @return A relative Expiration
|
||||||
|
*/
|
||||||
|
public static Expiration newRelative(long ms) {
|
||||||
|
return new Expiration(ms, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new absolute Expiration.
|
||||||
|
*
|
||||||
|
* @param date when the CacheDirective expires
|
||||||
|
* @return An absolute Expiration
|
||||||
|
*/
|
||||||
|
public static Expiration newAbsolute(Date date) {
|
||||||
|
return new Expiration(date.getTime(), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new absolute Expiration.
|
||||||
|
*
|
||||||
|
* @param ms when the CacheDirective expires, in milliseconds since the Unix
|
||||||
|
* epoch.
|
||||||
|
* @return An absolute Expiration
|
||||||
|
*/
|
||||||
|
public static Expiration newAbsolute(long ms) {
|
||||||
|
return new Expiration(ms, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final long ms;
|
||||||
|
private final boolean isRelative;
|
||||||
|
|
||||||
|
private Expiration(long ms, boolean isRelative) {
|
||||||
|
this.ms = ms;
|
||||||
|
this.isRelative = isRelative;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return true if Expiration was specified as a relative duration, false if
|
||||||
|
* specified as an absolute time.
|
||||||
|
*/
|
||||||
|
public boolean isRelative() {
|
||||||
|
return isRelative;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return The raw underlying millisecond value, either a relative duration
|
||||||
|
* or an absolute time as milliseconds since the Unix epoch.
|
||||||
|
*/
|
||||||
|
public long getMillis() {
|
||||||
|
return ms;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return Expiration time as a {@link Date} object. This converts a
|
||||||
|
* relative Expiration into an absolute Date based on the local
|
||||||
|
* clock.
|
||||||
|
*/
|
||||||
|
public Date getAbsoluteDate() {
|
||||||
|
return new Date(getAbsoluteMillis());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return Expiration time in milliseconds from the Unix epoch. This
|
||||||
|
* converts a relative Expiration into an absolute time based on the
|
||||||
|
* local clock.
|
||||||
|
*/
|
||||||
|
public long getAbsoluteMillis() {
|
||||||
|
if (!isRelative) {
|
||||||
|
return ms;
|
||||||
|
} else {
|
||||||
|
return new Date().getTime() + ms;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
if (isRelative) {
|
||||||
|
return DFSUtil.durationToString(ms);
|
||||||
|
}
|
||||||
|
return DFSUtil.dateToIso8601String(new Date(ms));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private final Long id;
|
private final Long id;
|
||||||
private final Path path;
|
private final Path path;
|
||||||
private final Short replication;
|
private final Short replication;
|
||||||
private final String pool;
|
private final String pool;
|
||||||
|
private final Expiration expiration;
|
||||||
|
|
||||||
CacheDirectiveInfo(Long id, Path path, Short replication, String pool) {
|
CacheDirectiveInfo(Long id, Path path, Short replication, String pool,
|
||||||
|
Expiration expiration) {
|
||||||
this.id = id;
|
this.id = id;
|
||||||
this.path = path;
|
this.path = path;
|
||||||
this.replication = replication;
|
this.replication = replication;
|
||||||
this.pool = pool;
|
this.pool = pool;
|
||||||
|
this.expiration = expiration;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -148,7 +269,14 @@ public class CacheDirectiveInfo {
|
||||||
public String getPool() {
|
public String getPool() {
|
||||||
return pool;
|
return pool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return When this directive expires.
|
||||||
|
*/
|
||||||
|
public Expiration getExpiration() {
|
||||||
|
return expiration;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (o == null) {
|
if (o == null) {
|
||||||
|
@ -162,6 +290,7 @@ public class CacheDirectiveInfo {
|
||||||
append(getPath(), other.getPath()).
|
append(getPath(), other.getPath()).
|
||||||
append(getReplication(), other.getReplication()).
|
append(getReplication(), other.getReplication()).
|
||||||
append(getPool(), other.getPool()).
|
append(getPool(), other.getPool()).
|
||||||
|
append(getExpiration(), other.getExpiration()).
|
||||||
isEquals();
|
isEquals();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,6 +300,7 @@ public class CacheDirectiveInfo {
|
||||||
append(path).
|
append(path).
|
||||||
append(replication).
|
append(replication).
|
||||||
append(pool).
|
append(pool).
|
||||||
|
append(expiration).
|
||||||
hashCode();
|
hashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -181,19 +311,23 @@ public class CacheDirectiveInfo {
|
||||||
String prefix = "";
|
String prefix = "";
|
||||||
if (id != null) {
|
if (id != null) {
|
||||||
builder.append(prefix).append("id: ").append(id);
|
builder.append(prefix).append("id: ").append(id);
|
||||||
prefix = ",";
|
prefix = ", ";
|
||||||
}
|
}
|
||||||
if (path != null) {
|
if (path != null) {
|
||||||
builder.append(prefix).append("path: ").append(path);
|
builder.append(prefix).append("path: ").append(path);
|
||||||
prefix = ",";
|
prefix = ", ";
|
||||||
}
|
}
|
||||||
if (replication != null) {
|
if (replication != null) {
|
||||||
builder.append(prefix).append("replication: ").append(replication);
|
builder.append(prefix).append("replication: ").append(replication);
|
||||||
prefix = ",";
|
prefix = ", ";
|
||||||
}
|
}
|
||||||
if (pool != null) {
|
if (pool != null) {
|
||||||
builder.append(prefix).append("pool: ").append(pool);
|
builder.append(prefix).append("pool: ").append(pool);
|
||||||
prefix = ",";
|
prefix = ", ";
|
||||||
|
}
|
||||||
|
if (expiration != null) {
|
||||||
|
builder.append(prefix).append("expiration: ").append(expiration);
|
||||||
|
prefix = ", ";
|
||||||
}
|
}
|
||||||
builder.append("}");
|
builder.append("}");
|
||||||
return builder.toString();
|
return builder.toString();
|
||||||
|
|
|
@ -30,6 +30,7 @@ public class CacheDirectiveStats {
|
||||||
private long bytesNeeded;
|
private long bytesNeeded;
|
||||||
private long bytesCached;
|
private long bytesCached;
|
||||||
private long filesAffected;
|
private long filesAffected;
|
||||||
|
private boolean hasExpired;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Builds a new CacheDirectiveStats populated with the set properties.
|
* Builds a new CacheDirectiveStats populated with the set properties.
|
||||||
|
@ -37,7 +38,8 @@ public class CacheDirectiveStats {
|
||||||
* @return New CacheDirectiveStats.
|
* @return New CacheDirectiveStats.
|
||||||
*/
|
*/
|
||||||
public CacheDirectiveStats build() {
|
public CacheDirectiveStats build() {
|
||||||
return new CacheDirectiveStats(bytesNeeded, bytesCached, filesAffected);
|
return new CacheDirectiveStats(bytesNeeded, bytesCached, filesAffected,
|
||||||
|
hasExpired);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -52,7 +54,7 @@ public class CacheDirectiveStats {
|
||||||
* @param bytesNeeded The bytes needed.
|
* @param bytesNeeded The bytes needed.
|
||||||
* @return This builder, for call chaining.
|
* @return This builder, for call chaining.
|
||||||
*/
|
*/
|
||||||
public Builder setBytesNeeded(Long bytesNeeded) {
|
public Builder setBytesNeeded(long bytesNeeded) {
|
||||||
this.bytesNeeded = bytesNeeded;
|
this.bytesNeeded = bytesNeeded;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -63,7 +65,7 @@ public class CacheDirectiveStats {
|
||||||
* @param bytesCached The bytes cached.
|
* @param bytesCached The bytes cached.
|
||||||
* @return This builder, for call chaining.
|
* @return This builder, for call chaining.
|
||||||
*/
|
*/
|
||||||
public Builder setBytesCached(Long bytesCached) {
|
public Builder setBytesCached(long bytesCached) {
|
||||||
this.bytesCached = bytesCached;
|
this.bytesCached = bytesCached;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -74,21 +76,34 @@ public class CacheDirectiveStats {
|
||||||
* @param filesAffected The files affected.
|
* @param filesAffected The files affected.
|
||||||
* @return This builder, for call chaining.
|
* @return This builder, for call chaining.
|
||||||
*/
|
*/
|
||||||
public Builder setFilesAffected(Long filesAffected) {
|
public Builder setFilesAffected(long filesAffected) {
|
||||||
this.filesAffected = filesAffected;
|
this.filesAffected = filesAffected;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets whether this directive has expired.
|
||||||
|
*
|
||||||
|
* @param hasExpired if this directive has expired
|
||||||
|
* @return This builder, for call chaining.
|
||||||
|
*/
|
||||||
|
public Builder setHasExpired(boolean hasExpired) {
|
||||||
|
this.hasExpired = hasExpired;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private final long bytesNeeded;
|
private final long bytesNeeded;
|
||||||
private final long bytesCached;
|
private final long bytesCached;
|
||||||
private final long filesAffected;
|
private final long filesAffected;
|
||||||
|
private final boolean hasExpired;
|
||||||
|
|
||||||
private CacheDirectiveStats(long bytesNeeded, long bytesCached,
|
private CacheDirectiveStats(long bytesNeeded, long bytesCached,
|
||||||
long filesAffected) {
|
long filesAffected, boolean hasExpired) {
|
||||||
this.bytesNeeded = bytesNeeded;
|
this.bytesNeeded = bytesNeeded;
|
||||||
this.bytesCached = bytesCached;
|
this.bytesCached = bytesCached;
|
||||||
this.filesAffected = filesAffected;
|
this.filesAffected = filesAffected;
|
||||||
|
this.hasExpired = hasExpired;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -112,6 +127,13 @@ public class CacheDirectiveStats {
|
||||||
return filesAffected;
|
return filesAffected;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return Whether this directive has expired.
|
||||||
|
*/
|
||||||
|
public boolean hasExpired() {
|
||||||
|
return hasExpired;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder builder = new StringBuilder();
|
StringBuilder builder = new StringBuilder();
|
||||||
|
@ -119,6 +141,7 @@ public class CacheDirectiveStats {
|
||||||
builder.append("bytesNeeded: ").append(bytesNeeded);
|
builder.append("bytesNeeded: ").append(bytesNeeded);
|
||||||
builder.append(", ").append("bytesCached: ").append(bytesCached);
|
builder.append(", ").append("bytesCached: ").append(bytesCached);
|
||||||
builder.append(", ").append("filesAffected: ").append(filesAffected);
|
builder.append(", ").append("filesAffected: ").append(filesAffected);
|
||||||
|
builder.append(", ").append("hasExpired: ").append(hasExpired);
|
||||||
builder.append("}");
|
builder.append("}");
|
||||||
return builder.toString();
|
return builder.toString();
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,6 +61,7 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
||||||
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
|
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos;
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveEntryProto;
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveEntryProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveStatsProto;
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveStatsProto;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolEntryProto;
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolEntryProto;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto;
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto;
|
||||||
|
@ -1591,6 +1592,9 @@ public class PBHelper {
|
||||||
if (info.getPool() != null) {
|
if (info.getPool() != null) {
|
||||||
builder.setPool(info.getPool());
|
builder.setPool(info.getPool());
|
||||||
}
|
}
|
||||||
|
if (info.getExpiration() != null) {
|
||||||
|
builder.setExpiration(convert(info.getExpiration()));
|
||||||
|
}
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1611,15 +1615,35 @@ public class PBHelper {
|
||||||
if (proto.hasPool()) {
|
if (proto.hasPool()) {
|
||||||
builder.setPool(proto.getPool());
|
builder.setPool(proto.getPool());
|
||||||
}
|
}
|
||||||
|
if (proto.hasExpiration()) {
|
||||||
|
builder.setExpiration(convert(proto.getExpiration()));
|
||||||
|
}
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static CacheDirectiveInfoExpirationProto convert(
|
||||||
|
CacheDirectiveInfo.Expiration expiration) {
|
||||||
|
return CacheDirectiveInfoExpirationProto.newBuilder()
|
||||||
|
.setIsRelative(expiration.isRelative())
|
||||||
|
.setMillis(expiration.getMillis())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static CacheDirectiveInfo.Expiration convert(
|
||||||
|
CacheDirectiveInfoExpirationProto proto) {
|
||||||
|
if (proto.getIsRelative()) {
|
||||||
|
return CacheDirectiveInfo.Expiration.newRelative(proto.getMillis());
|
||||||
|
}
|
||||||
|
return CacheDirectiveInfo.Expiration.newAbsolute(proto.getMillis());
|
||||||
|
}
|
||||||
|
|
||||||
public static CacheDirectiveStatsProto convert(CacheDirectiveStats stats) {
|
public static CacheDirectiveStatsProto convert(CacheDirectiveStats stats) {
|
||||||
CacheDirectiveStatsProto.Builder builder =
|
CacheDirectiveStatsProto.Builder builder =
|
||||||
CacheDirectiveStatsProto.newBuilder();
|
CacheDirectiveStatsProto.newBuilder();
|
||||||
builder.setBytesNeeded(stats.getBytesNeeded());
|
builder.setBytesNeeded(stats.getBytesNeeded());
|
||||||
builder.setBytesCached(stats.getBytesCached());
|
builder.setBytesCached(stats.getBytesCached());
|
||||||
builder.setFilesAffected(stats.getFilesAffected());
|
builder.setFilesAffected(stats.getFilesAffected());
|
||||||
|
builder.setHasExpired(stats.hasExpired());
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1628,6 +1652,7 @@ public class PBHelper {
|
||||||
builder.setBytesNeeded(proto.getBytesNeeded());
|
builder.setBytesNeeded(proto.getBytesNeeded());
|
||||||
builder.setBytesCached(proto.getBytesCached());
|
builder.setBytesCached(proto.getBytesCached());
|
||||||
builder.setFilesAffected(proto.getFilesAffected());
|
builder.setFilesAffected(proto.getFilesAffected());
|
||||||
|
builder.setHasExpired(proto.getHasExpired());
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ import static org.apache.hadoop.util.ExitUtil.terminate;
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -211,12 +212,24 @@ public class CacheReplicationMonitor extends Thread implements Closeable {
|
||||||
*/
|
*/
|
||||||
private void rescanCacheDirectives() {
|
private void rescanCacheDirectives() {
|
||||||
FSDirectory fsDir = namesystem.getFSDirectory();
|
FSDirectory fsDir = namesystem.getFSDirectory();
|
||||||
for (CacheDirective pce : cacheManager.getEntriesById().values()) {
|
final long now = new Date().getTime();
|
||||||
|
for (CacheDirective directive : cacheManager.getEntriesById().values()) {
|
||||||
|
// Reset the directive
|
||||||
|
directive.clearBytesNeeded();
|
||||||
|
directive.clearBytesCached();
|
||||||
|
directive.clearFilesAffected();
|
||||||
|
// Skip processing this entry if it has expired
|
||||||
|
LOG.info("Directive expiry is at " + directive.getExpiryTime());
|
||||||
|
if (directive.getExpiryTime() > 0 && directive.getExpiryTime() <= now) {
|
||||||
|
if (LOG.isDebugEnabled()) {
|
||||||
|
LOG.debug("Skipping directive id " + directive.getId()
|
||||||
|
+ " because it has expired (" + directive.getExpiryTime() + ">="
|
||||||
|
+ now);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
scannedDirectives++;
|
scannedDirectives++;
|
||||||
pce.clearBytesNeeded();
|
String path = directive.getPath();
|
||||||
pce.clearBytesCached();
|
|
||||||
pce.clearFilesAffected();
|
|
||||||
String path = pce.getPath();
|
|
||||||
INode node;
|
INode node;
|
||||||
try {
|
try {
|
||||||
node = fsDir.getINode(path);
|
node = fsDir.getINode(path);
|
||||||
|
@ -233,11 +246,11 @@ public class CacheReplicationMonitor extends Thread implements Closeable {
|
||||||
ReadOnlyList<INode> children = dir.getChildrenList(null);
|
ReadOnlyList<INode> children = dir.getChildrenList(null);
|
||||||
for (INode child : children) {
|
for (INode child : children) {
|
||||||
if (child.isFile()) {
|
if (child.isFile()) {
|
||||||
rescanFile(pce, child.asFile());
|
rescanFile(directive, child.asFile());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (node.isFile()) {
|
} else if (node.isFile()) {
|
||||||
rescanFile(pce, node.asFile());
|
rescanFile(directive, node.asFile());
|
||||||
} else {
|
} else {
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Ignoring non-directory, non-file inode " + node +
|
LOG.debug("Ignoring non-directory, non-file inode " + node +
|
||||||
|
|
|
@ -17,12 +17,12 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.namenode;
|
package org.apache.hadoop.hdfs.server.namenode;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CACHING_ENABLED_DEFAULT;
|
||||||
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CACHING_ENABLED_KEY;
|
||||||
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES;
|
||||||
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES_DEFAULT;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES_DEFAULT;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES_DEFAULT;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES;
|
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CACHING_ENABLED_KEY;
|
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CACHING_ENABLED_DEFAULT;
|
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES_DEFAULT;
|
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_PATH_BASED_CACHE_REFRESH_INTERVAL_MS;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_PATH_BASED_CACHE_REFRESH_INTERVAL_MS;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_PATH_BASED_CACHE_REFRESH_INTERVAL_MS_DEFAULT;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_PATH_BASED_CACHE_REFRESH_INTERVAL_MS_DEFAULT;
|
||||||
|
|
||||||
|
@ -43,18 +43,18 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.InvalidRequestException;
|
|
||||||
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
|
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries;
|
||||||
|
import org.apache.hadoop.fs.InvalidRequestException;
|
||||||
import org.apache.hadoop.fs.permission.FsAction;
|
import org.apache.hadoop.fs.permission.FsAction;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.CacheDirective;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirective;
|
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor;
|
import org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor;
|
||||||
|
@ -249,7 +249,7 @@ public final class CacheManager {
|
||||||
return cachedBlocks;
|
return cachedBlocks;
|
||||||
}
|
}
|
||||||
|
|
||||||
private long getNextEntryId() throws IOException {
|
private long getNextDirectiveId() throws IOException {
|
||||||
assert namesystem.hasWriteLock();
|
assert namesystem.hasWriteLock();
|
||||||
if (nextDirectiveId >= Long.MAX_VALUE - 1) {
|
if (nextDirectiveId >= Long.MAX_VALUE - 1) {
|
||||||
throw new IOException("No more available IDs.");
|
throw new IOException("No more available IDs.");
|
||||||
|
@ -301,6 +301,34 @@ public final class CacheManager {
|
||||||
return repl;
|
return repl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates the absolute expiry time of the directive from the
|
||||||
|
* {@link CacheDirectiveInfo.Expiration}. This converts a relative Expiration
|
||||||
|
* into an absolute time based on the local clock.
|
||||||
|
*
|
||||||
|
* @param directive from which to get the expiry time
|
||||||
|
* @param defaultValue to use if Expiration is not set
|
||||||
|
* @return Absolute expiry time in milliseconds since Unix epoch
|
||||||
|
* @throws InvalidRequestException if the Expiration is invalid
|
||||||
|
*/
|
||||||
|
private static long validateExpiryTime(CacheDirectiveInfo directive,
|
||||||
|
long defaultValue) throws InvalidRequestException {
|
||||||
|
long expiryTime;
|
||||||
|
CacheDirectiveInfo.Expiration expiration = directive.getExpiration();
|
||||||
|
if (expiration != null) {
|
||||||
|
if (expiration.getMillis() < 0) {
|
||||||
|
throw new InvalidRequestException("Cannot set a negative expiration: "
|
||||||
|
+ expiration.getMillis());
|
||||||
|
}
|
||||||
|
// Converts a relative duration into an absolute time based on the local
|
||||||
|
// clock
|
||||||
|
expiryTime = expiration.getAbsoluteMillis();
|
||||||
|
} else {
|
||||||
|
expiryTime = defaultValue;
|
||||||
|
}
|
||||||
|
return expiryTime;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a CacheDirective by ID, validating the ID and that the directive
|
* Get a CacheDirective by ID, validating the ID and that the directive
|
||||||
* exists.
|
* exists.
|
||||||
|
@ -346,6 +374,26 @@ public final class CacheManager {
|
||||||
directives.add(directive);
|
directives.add(directive);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* To be called only from the edit log loading code
|
||||||
|
*/
|
||||||
|
CacheDirectiveInfo addDirectiveFromEditLog(CacheDirectiveInfo directive)
|
||||||
|
throws InvalidRequestException {
|
||||||
|
long id = directive.getId();
|
||||||
|
CacheDirective entry =
|
||||||
|
new CacheDirective(
|
||||||
|
directive.getId(),
|
||||||
|
directive.getPath().toUri().getPath(),
|
||||||
|
directive.getReplication(),
|
||||||
|
directive.getExpiration().getAbsoluteMillis());
|
||||||
|
CachePool pool = cachePools.get(directive.getPool());
|
||||||
|
addInternal(entry, pool);
|
||||||
|
if (nextDirectiveId <= id) {
|
||||||
|
nextDirectiveId = id + 1;
|
||||||
|
}
|
||||||
|
return entry.toInfo();
|
||||||
|
}
|
||||||
|
|
||||||
public CacheDirectiveInfo addDirective(
|
public CacheDirectiveInfo addDirective(
|
||||||
CacheDirectiveInfo info, FSPermissionChecker pc)
|
CacheDirectiveInfo info, FSPermissionChecker pc)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -356,27 +404,12 @@ public final class CacheManager {
|
||||||
checkWritePermission(pc, pool);
|
checkWritePermission(pc, pool);
|
||||||
String path = validatePath(info);
|
String path = validatePath(info);
|
||||||
short replication = validateReplication(info, (short)1);
|
short replication = validateReplication(info, (short)1);
|
||||||
long id;
|
long expiryTime = validateExpiryTime(info,
|
||||||
if (info.getId() != null) {
|
CacheDirectiveInfo.Expiration.EXPIRY_NEVER);
|
||||||
// We are loading a directive from the edit log.
|
// All validation passed
|
||||||
// Use the ID from the edit log.
|
// Add a new entry with the next available ID.
|
||||||
id = info.getId();
|
long id = getNextDirectiveId();
|
||||||
if (id <= 0) {
|
directive = new CacheDirective(id, path, replication, expiryTime);
|
||||||
throw new InvalidRequestException("can't add an ID " +
|
|
||||||
"of " + id + ": it is not positive.");
|
|
||||||
}
|
|
||||||
if (id >= Long.MAX_VALUE) {
|
|
||||||
throw new InvalidRequestException("can't add an ID " +
|
|
||||||
"of " + id + ": it is too big.");
|
|
||||||
}
|
|
||||||
if (nextDirectiveId <= id) {
|
|
||||||
nextDirectiveId = id + 1;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Add a new directive with the next available ID.
|
|
||||||
id = getNextEntryId();
|
|
||||||
}
|
|
||||||
directive = new CacheDirective(id, path, replication);
|
|
||||||
addInternal(directive, pool);
|
addInternal(directive, pool);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.warn("addDirective of " + info + " failed: ", e);
|
LOG.warn("addDirective of " + info + " failed: ", e);
|
||||||
|
@ -407,10 +440,13 @@ public final class CacheManager {
|
||||||
if (info.getPath() != null) {
|
if (info.getPath() != null) {
|
||||||
path = validatePath(info);
|
path = validatePath(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
short replication = prevEntry.getReplication();
|
short replication = prevEntry.getReplication();
|
||||||
if (info.getReplication() != null) {
|
replication = validateReplication(info, replication);
|
||||||
replication = validateReplication(info, replication);
|
|
||||||
}
|
long expiryTime = prevEntry.getExpiryTime();
|
||||||
|
expiryTime = validateExpiryTime(info, expiryTime);
|
||||||
|
|
||||||
CachePool pool = prevEntry.getPool();
|
CachePool pool = prevEntry.getPool();
|
||||||
if (info.getPool() != null) {
|
if (info.getPool() != null) {
|
||||||
pool = getCachePool(validatePoolName(info));
|
pool = getCachePool(validatePoolName(info));
|
||||||
|
@ -418,7 +454,7 @@ public final class CacheManager {
|
||||||
}
|
}
|
||||||
removeInternal(prevEntry);
|
removeInternal(prevEntry);
|
||||||
CacheDirective newEntry =
|
CacheDirective newEntry =
|
||||||
new CacheDirective(id, path, replication);
|
new CacheDirective(id, path, replication, expiryTime);
|
||||||
addInternal(newEntry, pool);
|
addInternal(newEntry, pool);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.warn("modifyDirective of " + idString + " failed: ", e);
|
LOG.warn("modifyDirective of " + idString + " failed: ", e);
|
||||||
|
@ -788,6 +824,7 @@ public final class CacheManager {
|
||||||
Text.writeString(out, directive.getPath());
|
Text.writeString(out, directive.getPath());
|
||||||
out.writeShort(directive.getReplication());
|
out.writeShort(directive.getReplication());
|
||||||
Text.writeString(out, directive.getPool().getPoolName());
|
Text.writeString(out, directive.getPool().getPoolName());
|
||||||
|
out.writeLong(directive.getExpiryTime());
|
||||||
counter.increment();
|
counter.increment();
|
||||||
}
|
}
|
||||||
prog.endStep(Phase.SAVING_CHECKPOINT, step);
|
prog.endStep(Phase.SAVING_CHECKPOINT, step);
|
||||||
|
@ -826,6 +863,7 @@ public final class CacheManager {
|
||||||
String path = Text.readString(in);
|
String path = Text.readString(in);
|
||||||
short replication = in.readShort();
|
short replication = in.readShort();
|
||||||
String poolName = Text.readString(in);
|
String poolName = Text.readString(in);
|
||||||
|
long expiryTime = in.readLong();
|
||||||
// Get pool reference by looking it up in the map
|
// Get pool reference by looking it up in the map
|
||||||
CachePool pool = cachePools.get(poolName);
|
CachePool pool = cachePools.get(poolName);
|
||||||
if (pool == null) {
|
if (pool == null) {
|
||||||
|
@ -833,7 +871,7 @@ public final class CacheManager {
|
||||||
", which does not exist.");
|
", which does not exist.");
|
||||||
}
|
}
|
||||||
CacheDirective directive =
|
CacheDirective directive =
|
||||||
new CacheDirective(directiveId, path, replication);
|
new CacheDirective(directiveId, path, replication, expiryTime);
|
||||||
boolean addedDirective = pool.getDirectiveList().add(directive);
|
boolean addedDirective = pool.getDirectiveList().add(directive);
|
||||||
assert addedDirective;
|
assert addedDirective;
|
||||||
if (directivesById.put(directive.getId(), directive) != null) {
|
if (directivesById.put(directive.getId(), directive) != null) {
|
||||||
|
|
|
@ -953,7 +953,11 @@ public class FSEditLog implements LogsPurgeable {
|
||||||
.setSnapshotRoot(path);
|
.setSnapshotRoot(path);
|
||||||
logEdit(op);
|
logEdit(op);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log a CacheDirectiveInfo returned from
|
||||||
|
* {@link CacheManager#addDirective(CacheDirectiveInfo, FSPermissionChecker)}
|
||||||
|
*/
|
||||||
void logAddCacheDirectiveInfo(CacheDirectiveInfo directive,
|
void logAddCacheDirectiveInfo(CacheDirectiveInfo directive,
|
||||||
boolean toLogRpcIds) {
|
boolean toLogRpcIds) {
|
||||||
AddCacheDirectiveInfoOp op =
|
AddCacheDirectiveInfoOp op =
|
||||||
|
|
|
@ -636,17 +636,17 @@ public class FSEditLogLoader {
|
||||||
fsNamesys.setLastAllocatedBlockId(allocateBlockIdOp.blockId);
|
fsNamesys.setLastAllocatedBlockId(allocateBlockIdOp.blockId);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case OP_ADD_PATH_BASED_CACHE_DIRECTIVE: {
|
case OP_ADD_CACHE_DIRECTIVE: {
|
||||||
AddCacheDirectiveInfoOp addOp = (AddCacheDirectiveInfoOp) op;
|
AddCacheDirectiveInfoOp addOp = (AddCacheDirectiveInfoOp) op;
|
||||||
CacheDirectiveInfo result = fsNamesys.
|
CacheDirectiveInfo result = fsNamesys.
|
||||||
getCacheManager().addDirective(addOp.directive, null);
|
getCacheManager().addDirectiveFromEditLog(addOp.directive);
|
||||||
if (toAddRetryCache) {
|
if (toAddRetryCache) {
|
||||||
Long id = result.getId();
|
Long id = result.getId();
|
||||||
fsNamesys.addCacheEntryWithPayload(op.rpcClientId, op.rpcCallId, id);
|
fsNamesys.addCacheEntryWithPayload(op.rpcClientId, op.rpcCallId, id);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case OP_MODIFY_PATH_BASED_CACHE_DIRECTIVE: {
|
case OP_MODIFY_CACHE_DIRECTIVE: {
|
||||||
ModifyCacheDirectiveInfoOp modifyOp =
|
ModifyCacheDirectiveInfoOp modifyOp =
|
||||||
(ModifyCacheDirectiveInfoOp) op;
|
(ModifyCacheDirectiveInfoOp) op;
|
||||||
fsNamesys.getCacheManager().modifyDirective(
|
fsNamesys.getCacheManager().modifyDirective(
|
||||||
|
@ -656,7 +656,7 @@ public class FSEditLogLoader {
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case OP_REMOVE_PATH_BASED_CACHE_DIRECTIVE: {
|
case OP_REMOVE_CACHE_DIRECTIVE: {
|
||||||
RemoveCacheDirectiveInfoOp removeOp =
|
RemoveCacheDirectiveInfoOp removeOp =
|
||||||
(RemoveCacheDirectiveInfoOp) op;
|
(RemoveCacheDirectiveInfoOp) op;
|
||||||
fsNamesys.getCacheManager().removeDirective(removeOp.id, null);
|
fsNamesys.getCacheManager().removeDirective(removeOp.id, null);
|
||||||
|
|
|
@ -18,9 +18,8 @@
|
||||||
package org.apache.hadoop.hdfs.server.namenode;
|
package org.apache.hadoop.hdfs.server.namenode;
|
||||||
|
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ADD;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ADD;
|
||||||
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ADD_CACHE_DIRECTIVE;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ADD_CACHE_POOL;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ADD_CACHE_POOL;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ADD_PATH_BASED_CACHE_DIRECTIVE;
|
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_MODIFY_PATH_BASED_CACHE_DIRECTIVE;
|
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ALLOCATE_BLOCK_ID;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ALLOCATE_BLOCK_ID;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ALLOW_SNAPSHOT;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_ALLOW_SNAPSHOT;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_CANCEL_DELEGATION_TOKEN;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_CANCEL_DELEGATION_TOKEN;
|
||||||
|
@ -35,10 +34,11 @@ import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_END_LOG
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_GET_DELEGATION_TOKEN;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_GET_DELEGATION_TOKEN;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_INVALID;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_INVALID;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_MKDIR;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_MKDIR;
|
||||||
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_MODIFY_CACHE_DIRECTIVE;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_MODIFY_CACHE_POOL;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_MODIFY_CACHE_POOL;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_REASSIGN_LEASE;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_REASSIGN_LEASE;
|
||||||
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_REMOVE_CACHE_DIRECTIVE;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_REMOVE_CACHE_POOL;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_REMOVE_CACHE_POOL;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_REMOVE_PATH_BASED_CACHE_DIRECTIVE;
|
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_RENAME;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_RENAME;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_RENAME_OLD;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_RENAME_OLD;
|
||||||
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_RENAME_SNAPSHOT;
|
import static org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes.OP_RENAME_SNAPSHOT;
|
||||||
|
@ -64,6 +64,7 @@ import java.io.EOFException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.EnumMap;
|
import java.util.EnumMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.zip.CheckedInputStream;
|
import java.util.zip.CheckedInputStream;
|
||||||
|
@ -81,12 +82,12 @@ import org.apache.hadoop.fs.permission.PermissionStatus;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.DeprecatedUTF8;
|
import org.apache.hadoop.hdfs.DeprecatedUTF8;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
import org.apache.hadoop.hdfs.protocol.LayoutVersion;
|
import org.apache.hadoop.hdfs.protocol.LayoutVersion;
|
||||||
import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature;
|
import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
|
||||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
||||||
import org.apache.hadoop.hdfs.util.XMLUtils;
|
import org.apache.hadoop.hdfs.util.XMLUtils;
|
||||||
import org.apache.hadoop.hdfs.util.XMLUtils.InvalidXmlException;
|
import org.apache.hadoop.hdfs.util.XMLUtils.InvalidXmlException;
|
||||||
|
@ -109,7 +110,6 @@ import org.xml.sax.helpers.AttributesImpl;
|
||||||
|
|
||||||
import com.google.common.base.Joiner;
|
import com.google.common.base.Joiner;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.base.Strings;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper classes for reading the ops from an InputStream.
|
* Helper classes for reading the ops from an InputStream.
|
||||||
|
@ -165,11 +165,11 @@ public abstract class FSEditLogOp {
|
||||||
inst.put(OP_RENAME_SNAPSHOT, new RenameSnapshotOp());
|
inst.put(OP_RENAME_SNAPSHOT, new RenameSnapshotOp());
|
||||||
inst.put(OP_SET_GENSTAMP_V2, new SetGenstampV2Op());
|
inst.put(OP_SET_GENSTAMP_V2, new SetGenstampV2Op());
|
||||||
inst.put(OP_ALLOCATE_BLOCK_ID, new AllocateBlockIdOp());
|
inst.put(OP_ALLOCATE_BLOCK_ID, new AllocateBlockIdOp());
|
||||||
inst.put(OP_ADD_PATH_BASED_CACHE_DIRECTIVE,
|
inst.put(OP_ADD_CACHE_DIRECTIVE,
|
||||||
new AddCacheDirectiveInfoOp());
|
new AddCacheDirectiveInfoOp());
|
||||||
inst.put(OP_MODIFY_PATH_BASED_CACHE_DIRECTIVE,
|
inst.put(OP_MODIFY_CACHE_DIRECTIVE,
|
||||||
new ModifyCacheDirectiveInfoOp());
|
new ModifyCacheDirectiveInfoOp());
|
||||||
inst.put(OP_REMOVE_PATH_BASED_CACHE_DIRECTIVE,
|
inst.put(OP_REMOVE_CACHE_DIRECTIVE,
|
||||||
new RemoveCacheDirectiveInfoOp());
|
new RemoveCacheDirectiveInfoOp());
|
||||||
inst.put(OP_ADD_CACHE_POOL, new AddCachePoolOp());
|
inst.put(OP_ADD_CACHE_POOL, new AddCachePoolOp());
|
||||||
inst.put(OP_MODIFY_CACHE_POOL, new ModifyCachePoolOp());
|
inst.put(OP_MODIFY_CACHE_POOL, new ModifyCachePoolOp());
|
||||||
|
@ -2874,12 +2874,12 @@ public abstract class FSEditLogOp {
|
||||||
CacheDirectiveInfo directive;
|
CacheDirectiveInfo directive;
|
||||||
|
|
||||||
public AddCacheDirectiveInfoOp() {
|
public AddCacheDirectiveInfoOp() {
|
||||||
super(OP_ADD_PATH_BASED_CACHE_DIRECTIVE);
|
super(OP_ADD_CACHE_DIRECTIVE);
|
||||||
}
|
}
|
||||||
|
|
||||||
static AddCacheDirectiveInfoOp getInstance(OpInstanceCache cache) {
|
static AddCacheDirectiveInfoOp getInstance(OpInstanceCache cache) {
|
||||||
return (AddCacheDirectiveInfoOp) cache
|
return (AddCacheDirectiveInfoOp) cache
|
||||||
.get(OP_ADD_PATH_BASED_CACHE_DIRECTIVE);
|
.get(OP_ADD_CACHE_DIRECTIVE);
|
||||||
}
|
}
|
||||||
|
|
||||||
public AddCacheDirectiveInfoOp setDirective(
|
public AddCacheDirectiveInfoOp setDirective(
|
||||||
|
@ -2889,6 +2889,7 @@ public abstract class FSEditLogOp {
|
||||||
assert(directive.getPath() != null);
|
assert(directive.getPath() != null);
|
||||||
assert(directive.getReplication() != null);
|
assert(directive.getReplication() != null);
|
||||||
assert(directive.getPool() != null);
|
assert(directive.getPool() != null);
|
||||||
|
assert(directive.getExpiration() != null);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2898,11 +2899,13 @@ public abstract class FSEditLogOp {
|
||||||
String path = FSImageSerialization.readString(in);
|
String path = FSImageSerialization.readString(in);
|
||||||
short replication = FSImageSerialization.readShort(in);
|
short replication = FSImageSerialization.readShort(in);
|
||||||
String pool = FSImageSerialization.readString(in);
|
String pool = FSImageSerialization.readString(in);
|
||||||
|
long expiryTime = FSImageSerialization.readLong(in);
|
||||||
directive = new CacheDirectiveInfo.Builder().
|
directive = new CacheDirectiveInfo.Builder().
|
||||||
setId(id).
|
setId(id).
|
||||||
setPath(new Path(path)).
|
setPath(new Path(path)).
|
||||||
setReplication(replication).
|
setReplication(replication).
|
||||||
setPool(pool).
|
setPool(pool).
|
||||||
|
setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(expiryTime)).
|
||||||
build();
|
build();
|
||||||
readRpcIds(in, logVersion);
|
readRpcIds(in, logVersion);
|
||||||
}
|
}
|
||||||
|
@ -2913,6 +2916,8 @@ public abstract class FSEditLogOp {
|
||||||
FSImageSerialization.writeString(directive.getPath().toUri().getPath(), out);
|
FSImageSerialization.writeString(directive.getPath().toUri().getPath(), out);
|
||||||
FSImageSerialization.writeShort(directive.getReplication(), out);
|
FSImageSerialization.writeShort(directive.getReplication(), out);
|
||||||
FSImageSerialization.writeString(directive.getPool(), out);
|
FSImageSerialization.writeString(directive.getPool(), out);
|
||||||
|
FSImageSerialization.writeLong(
|
||||||
|
directive.getExpiration().getMillis(), out);
|
||||||
writeRpcIds(rpcClientId, rpcCallId, out);
|
writeRpcIds(rpcClientId, rpcCallId, out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2925,6 +2930,8 @@ public abstract class FSEditLogOp {
|
||||||
XMLUtils.addSaxString(contentHandler, "REPLICATION",
|
XMLUtils.addSaxString(contentHandler, "REPLICATION",
|
||||||
Short.toString(directive.getReplication()));
|
Short.toString(directive.getReplication()));
|
||||||
XMLUtils.addSaxString(contentHandler, "POOL", directive.getPool());
|
XMLUtils.addSaxString(contentHandler, "POOL", directive.getPool());
|
||||||
|
XMLUtils.addSaxString(contentHandler, "EXPIRATION",
|
||||||
|
"" + directive.getExpiration().getMillis());
|
||||||
appendRpcIdsToXml(contentHandler, rpcClientId, rpcCallId);
|
appendRpcIdsToXml(contentHandler, rpcClientId, rpcCallId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2935,6 +2942,8 @@ public abstract class FSEditLogOp {
|
||||||
setPath(new Path(st.getValue("PATH"))).
|
setPath(new Path(st.getValue("PATH"))).
|
||||||
setReplication(Short.parseShort(st.getValue("REPLICATION"))).
|
setReplication(Short.parseShort(st.getValue("REPLICATION"))).
|
||||||
setPool(st.getValue("POOL")).
|
setPool(st.getValue("POOL")).
|
||||||
|
setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(
|
||||||
|
Long.parseLong(st.getValue("EXPIRATION")))).
|
||||||
build();
|
build();
|
||||||
readRpcIdsFromXml(st);
|
readRpcIdsFromXml(st);
|
||||||
}
|
}
|
||||||
|
@ -2946,7 +2955,8 @@ public abstract class FSEditLogOp {
|
||||||
builder.append("id=" + directive.getId() + ",");
|
builder.append("id=" + directive.getId() + ",");
|
||||||
builder.append("path=" + directive.getPath().toUri().getPath() + ",");
|
builder.append("path=" + directive.getPath().toUri().getPath() + ",");
|
||||||
builder.append("replication=" + directive.getReplication() + ",");
|
builder.append("replication=" + directive.getReplication() + ",");
|
||||||
builder.append("pool=" + directive.getPool());
|
builder.append("pool=" + directive.getPool() + ",");
|
||||||
|
builder.append("expiration=" + directive.getExpiration().getMillis());
|
||||||
appendRpcIdsToString(builder, rpcClientId, rpcCallId);
|
appendRpcIdsToString(builder, rpcClientId, rpcCallId);
|
||||||
builder.append("]");
|
builder.append("]");
|
||||||
return builder.toString();
|
return builder.toString();
|
||||||
|
@ -2961,12 +2971,12 @@ public abstract class FSEditLogOp {
|
||||||
CacheDirectiveInfo directive;
|
CacheDirectiveInfo directive;
|
||||||
|
|
||||||
public ModifyCacheDirectiveInfoOp() {
|
public ModifyCacheDirectiveInfoOp() {
|
||||||
super(OP_MODIFY_PATH_BASED_CACHE_DIRECTIVE);
|
super(OP_MODIFY_CACHE_DIRECTIVE);
|
||||||
}
|
}
|
||||||
|
|
||||||
static ModifyCacheDirectiveInfoOp getInstance(OpInstanceCache cache) {
|
static ModifyCacheDirectiveInfoOp getInstance(OpInstanceCache cache) {
|
||||||
return (ModifyCacheDirectiveInfoOp) cache
|
return (ModifyCacheDirectiveInfoOp) cache
|
||||||
.get(OP_MODIFY_PATH_BASED_CACHE_DIRECTIVE);
|
.get(OP_MODIFY_CACHE_DIRECTIVE);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ModifyCacheDirectiveInfoOp setDirective(
|
public ModifyCacheDirectiveInfoOp setDirective(
|
||||||
|
@ -2991,7 +3001,12 @@ public abstract class FSEditLogOp {
|
||||||
if ((flags & 0x4) != 0) {
|
if ((flags & 0x4) != 0) {
|
||||||
builder.setPool(FSImageSerialization.readString(in));
|
builder.setPool(FSImageSerialization.readString(in));
|
||||||
}
|
}
|
||||||
if ((flags & ~0x7) != 0) {
|
if ((flags & 0x8) != 0) {
|
||||||
|
builder.setExpiration(
|
||||||
|
CacheDirectiveInfo.Expiration.newAbsolute(
|
||||||
|
FSImageSerialization.readLong(in)));
|
||||||
|
}
|
||||||
|
if ((flags & ~0xF) != 0) {
|
||||||
throw new IOException("unknown flags set in " +
|
throw new IOException("unknown flags set in " +
|
||||||
"ModifyCacheDirectiveInfoOp: " + flags);
|
"ModifyCacheDirectiveInfoOp: " + flags);
|
||||||
}
|
}
|
||||||
|
@ -3005,7 +3020,8 @@ public abstract class FSEditLogOp {
|
||||||
byte flags = (byte)(
|
byte flags = (byte)(
|
||||||
((directive.getPath() != null) ? 0x1 : 0) |
|
((directive.getPath() != null) ? 0x1 : 0) |
|
||||||
((directive.getReplication() != null) ? 0x2 : 0) |
|
((directive.getReplication() != null) ? 0x2 : 0) |
|
||||||
((directive.getPool() != null) ? 0x4 : 0)
|
((directive.getPool() != null) ? 0x4 : 0) |
|
||||||
|
((directive.getExpiration() != null) ? 0x8 : 0)
|
||||||
);
|
);
|
||||||
out.writeByte(flags);
|
out.writeByte(flags);
|
||||||
if (directive.getPath() != null) {
|
if (directive.getPath() != null) {
|
||||||
|
@ -3018,6 +3034,10 @@ public abstract class FSEditLogOp {
|
||||||
if (directive.getPool() != null) {
|
if (directive.getPool() != null) {
|
||||||
FSImageSerialization.writeString(directive.getPool(), out);
|
FSImageSerialization.writeString(directive.getPool(), out);
|
||||||
}
|
}
|
||||||
|
if (directive.getExpiration() != null) {
|
||||||
|
FSImageSerialization.writeLong(directive.getExpiration().getMillis(),
|
||||||
|
out);
|
||||||
|
}
|
||||||
writeRpcIds(rpcClientId, rpcCallId, out);
|
writeRpcIds(rpcClientId, rpcCallId, out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3036,6 +3056,10 @@ public abstract class FSEditLogOp {
|
||||||
if (directive.getPool() != null) {
|
if (directive.getPool() != null) {
|
||||||
XMLUtils.addSaxString(contentHandler, "POOL", directive.getPool());
|
XMLUtils.addSaxString(contentHandler, "POOL", directive.getPool());
|
||||||
}
|
}
|
||||||
|
if (directive.getExpiration() != null) {
|
||||||
|
XMLUtils.addSaxString(contentHandler, "EXPIRATION",
|
||||||
|
"" + directive.getExpiration().getMillis());
|
||||||
|
}
|
||||||
appendRpcIdsToXml(contentHandler, rpcClientId, rpcCallId);
|
appendRpcIdsToXml(contentHandler, rpcClientId, rpcCallId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3056,6 +3080,11 @@ public abstract class FSEditLogOp {
|
||||||
if (pool != null) {
|
if (pool != null) {
|
||||||
builder.setPool(pool);
|
builder.setPool(pool);
|
||||||
}
|
}
|
||||||
|
String expiryTime = st.getValueOrNull("EXPIRATION");
|
||||||
|
if (expiryTime != null) {
|
||||||
|
builder.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(
|
||||||
|
Long.parseLong(expiryTime)));
|
||||||
|
}
|
||||||
this.directive = builder.build();
|
this.directive = builder.build();
|
||||||
readRpcIdsFromXml(st);
|
readRpcIdsFromXml(st);
|
||||||
}
|
}
|
||||||
|
@ -3075,6 +3104,10 @@ public abstract class FSEditLogOp {
|
||||||
if (directive.getPool() != null) {
|
if (directive.getPool() != null) {
|
||||||
builder.append(",").append("pool=").append(directive.getPool());
|
builder.append(",").append("pool=").append(directive.getPool());
|
||||||
}
|
}
|
||||||
|
if (directive.getExpiration() != null) {
|
||||||
|
builder.append(",").append("expiration=").
|
||||||
|
append(directive.getExpiration().getMillis());
|
||||||
|
}
|
||||||
appendRpcIdsToString(builder, rpcClientId, rpcCallId);
|
appendRpcIdsToString(builder, rpcClientId, rpcCallId);
|
||||||
builder.append("]");
|
builder.append("]");
|
||||||
return builder.toString();
|
return builder.toString();
|
||||||
|
@ -3089,12 +3122,12 @@ public abstract class FSEditLogOp {
|
||||||
long id;
|
long id;
|
||||||
|
|
||||||
public RemoveCacheDirectiveInfoOp() {
|
public RemoveCacheDirectiveInfoOp() {
|
||||||
super(OP_REMOVE_PATH_BASED_CACHE_DIRECTIVE);
|
super(OP_REMOVE_CACHE_DIRECTIVE);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RemoveCacheDirectiveInfoOp getInstance(OpInstanceCache cache) {
|
static RemoveCacheDirectiveInfoOp getInstance(OpInstanceCache cache) {
|
||||||
return (RemoveCacheDirectiveInfoOp) cache
|
return (RemoveCacheDirectiveInfoOp) cache
|
||||||
.get(OP_REMOVE_PATH_BASED_CACHE_DIRECTIVE);
|
.get(OP_REMOVE_CACHE_DIRECTIVE);
|
||||||
}
|
}
|
||||||
|
|
||||||
public RemoveCacheDirectiveInfoOp setId(long id) {
|
public RemoveCacheDirectiveInfoOp setId(long id) {
|
||||||
|
@ -3162,7 +3195,7 @@ public abstract class FSEditLogOp {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeFields(DataOutputStream out) throws IOException {
|
public void writeFields(DataOutputStream out) throws IOException {
|
||||||
info .writeTo(out);
|
info.writeTo(out);
|
||||||
writeRpcIds(rpcClientId, rpcCallId, out);
|
writeRpcIds(rpcClientId, rpcCallId, out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,12 +64,12 @@ public enum FSEditLogOpCodes {
|
||||||
OP_DISALLOW_SNAPSHOT ((byte) 30),
|
OP_DISALLOW_SNAPSHOT ((byte) 30),
|
||||||
OP_SET_GENSTAMP_V2 ((byte) 31),
|
OP_SET_GENSTAMP_V2 ((byte) 31),
|
||||||
OP_ALLOCATE_BLOCK_ID ((byte) 32),
|
OP_ALLOCATE_BLOCK_ID ((byte) 32),
|
||||||
OP_ADD_PATH_BASED_CACHE_DIRECTIVE ((byte) 33),
|
OP_ADD_CACHE_DIRECTIVE ((byte) 33),
|
||||||
OP_REMOVE_PATH_BASED_CACHE_DIRECTIVE ((byte) 34),
|
OP_REMOVE_CACHE_DIRECTIVE ((byte) 34),
|
||||||
OP_ADD_CACHE_POOL ((byte) 35),
|
OP_ADD_CACHE_POOL ((byte) 35),
|
||||||
OP_MODIFY_CACHE_POOL ((byte) 36),
|
OP_MODIFY_CACHE_POOL ((byte) 36),
|
||||||
OP_REMOVE_CACHE_POOL ((byte) 37),
|
OP_REMOVE_CACHE_POOL ((byte) 37),
|
||||||
OP_MODIFY_PATH_BASED_CACHE_DIRECTIVE ((byte) 38);
|
OP_MODIFY_CACHE_DIRECTIVE ((byte) 38);
|
||||||
|
|
||||||
private byte opCode;
|
private byte opCode;
|
||||||
|
|
||||||
|
|
|
@ -29,12 +29,13 @@ import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.RemoteIterator;
|
import org.apache.hadoop.fs.RemoteIterator;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
|
||||||
import org.apache.hadoop.hdfs.server.namenode.CachePool;
|
import org.apache.hadoop.hdfs.server.namenode.CachePool;
|
||||||
import org.apache.hadoop.hdfs.tools.TableListing.Justification;
|
import org.apache.hadoop.hdfs.tools.TableListing.Justification;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
|
@ -132,7 +133,8 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
@Override
|
@Override
|
||||||
public String getShortUsage() {
|
public String getShortUsage() {
|
||||||
return "[" + getName() +
|
return "[" + getName() +
|
||||||
" -path <path> -replication <replication> -pool <pool-name>]\n";
|
" -path <path> -pool <pool-name> " +
|
||||||
|
"[-replication <replication>] [-ttl <time-to-live>]]\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -140,11 +142,15 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
TableListing listing = getOptionDescriptionListing();
|
TableListing listing = getOptionDescriptionListing();
|
||||||
listing.addRow("<path>", "A path to cache. The path can be " +
|
listing.addRow("<path>", "A path to cache. The path can be " +
|
||||||
"a directory or a file.");
|
"a directory or a file.");
|
||||||
listing.addRow("<replication>", "The cache replication factor to use. " +
|
|
||||||
"Defaults to 1.");
|
|
||||||
listing.addRow("<pool-name>", "The pool to which the directive will be " +
|
listing.addRow("<pool-name>", "The pool to which the directive will be " +
|
||||||
"added. You must have write permission on the cache pool "
|
"added. You must have write permission on the cache pool "
|
||||||
+ "in order to add new directives.");
|
+ "in order to add new directives.");
|
||||||
|
listing.addRow("<replication>", "The cache replication factor to use. " +
|
||||||
|
"Defaults to 1.");
|
||||||
|
listing.addRow("<time-to-live>", "How long the directive is " +
|
||||||
|
"valid. Can be specified in minutes, hours, and days via e.g. " +
|
||||||
|
"30m, 4h, 2d. Valid units are [smhd]." +
|
||||||
|
" If unspecified, the directive never expires.");
|
||||||
return getShortUsage() + "\n" +
|
return getShortUsage() + "\n" +
|
||||||
"Add a new cache directive.\n\n" +
|
"Add a new cache directive.\n\n" +
|
||||||
listing.toString();
|
listing.toString();
|
||||||
|
@ -152,33 +158,48 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int run(Configuration conf, List<String> args) throws IOException {
|
public int run(Configuration conf, List<String> args) throws IOException {
|
||||||
|
CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder();
|
||||||
|
|
||||||
String path = StringUtils.popOptionWithArgument("-path", args);
|
String path = StringUtils.popOptionWithArgument("-path", args);
|
||||||
if (path == null) {
|
if (path == null) {
|
||||||
System.err.println("You must specify a path with -path.");
|
System.err.println("You must specify a path with -path.");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
short replication = 1;
|
builder.setPath(new Path(path));
|
||||||
String replicationString =
|
|
||||||
StringUtils.popOptionWithArgument("-replication", args);
|
|
||||||
if (replicationString != null) {
|
|
||||||
replication = Short.parseShort(replicationString);
|
|
||||||
}
|
|
||||||
String poolName = StringUtils.popOptionWithArgument("-pool", args);
|
String poolName = StringUtils.popOptionWithArgument("-pool", args);
|
||||||
if (poolName == null) {
|
if (poolName == null) {
|
||||||
System.err.println("You must specify a pool name with -pool.");
|
System.err.println("You must specify a pool name with -pool.");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
builder.setPool(poolName);
|
||||||
|
|
||||||
|
String replicationString =
|
||||||
|
StringUtils.popOptionWithArgument("-replication", args);
|
||||||
|
if (replicationString != null) {
|
||||||
|
Short replication = Short.parseShort(replicationString);
|
||||||
|
builder.setReplication(replication);
|
||||||
|
}
|
||||||
|
|
||||||
|
String ttlString = StringUtils.popOptionWithArgument("-ttl", args);
|
||||||
|
if (ttlString != null) {
|
||||||
|
try {
|
||||||
|
long ttl = DFSUtil.parseRelativeTime(ttlString);
|
||||||
|
builder.setExpiration(CacheDirectiveInfo.Expiration.newRelative(ttl));
|
||||||
|
} catch (IOException e) {
|
||||||
|
System.err.println(
|
||||||
|
"Error while parsing ttl value: " + e.getMessage());
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!args.isEmpty()) {
|
if (!args.isEmpty()) {
|
||||||
System.err.println("Can't understand argument: " + args.get(0));
|
System.err.println("Can't understand argument: " + args.get(0));
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
DistributedFileSystem dfs = getDFS(conf);
|
DistributedFileSystem dfs = getDFS(conf);
|
||||||
CacheDirectiveInfo directive = new CacheDirectiveInfo.Builder().
|
CacheDirectiveInfo directive = builder.build();
|
||||||
setPath(new Path(path)).
|
|
||||||
setReplication(replication).
|
|
||||||
setPool(poolName).
|
|
||||||
build();
|
|
||||||
try {
|
try {
|
||||||
long id = dfs.addCacheDirective(directive);
|
long id = dfs.addCacheDirective(directive);
|
||||||
System.out.println("Added cache directive " + id);
|
System.out.println("Added cache directive " + id);
|
||||||
|
@ -261,7 +282,7 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
public String getShortUsage() {
|
public String getShortUsage() {
|
||||||
return "[" + getName() +
|
return "[" + getName() +
|
||||||
" -id <id> [-path <path>] [-replication <replication>] " +
|
" -id <id> [-path <path>] [-replication <replication>] " +
|
||||||
"[-pool <pool-name>] ]\n";
|
"[-pool <pool-name>] [-ttl <time-to-live>]]\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -275,6 +296,10 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
listing.addRow("<pool-name>", "The pool to which the directive will be " +
|
listing.addRow("<pool-name>", "The pool to which the directive will be " +
|
||||||
"added. You must have write permission on the cache pool "
|
"added. You must have write permission on the cache pool "
|
||||||
+ "in order to move a directive into it. (optional)");
|
+ "in order to move a directive into it. (optional)");
|
||||||
|
listing.addRow("<time-to-live>", "How long the directive is " +
|
||||||
|
"valid. Can be specified in minutes, hours, and days via e.g. " +
|
||||||
|
"30m, 4h, 2d. Valid units are [smhd]." +
|
||||||
|
" If unspecified, the directive never expires.");
|
||||||
return getShortUsage() + "\n" +
|
return getShortUsage() + "\n" +
|
||||||
"Modify a cache directive.\n\n" +
|
"Modify a cache directive.\n\n" +
|
||||||
listing.toString();
|
listing.toString();
|
||||||
|
@ -308,6 +333,19 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
builder.setPool(poolName);
|
builder.setPool(poolName);
|
||||||
modified = true;
|
modified = true;
|
||||||
}
|
}
|
||||||
|
String ttlString = StringUtils.popOptionWithArgument("-ttl", args);
|
||||||
|
if (ttlString != null) {
|
||||||
|
long ttl;
|
||||||
|
try {
|
||||||
|
ttl = DFSUtil.parseRelativeTime(ttlString);
|
||||||
|
} catch (IOException e) {
|
||||||
|
System.err.println(
|
||||||
|
"Error while parsing ttl value: " + e.getMessage());
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
builder.setExpiration(CacheDirectiveInfo.Expiration.newRelative(ttl));
|
||||||
|
modified = true;
|
||||||
|
}
|
||||||
if (!args.isEmpty()) {
|
if (!args.isEmpty()) {
|
||||||
System.err.println("Can't understand argument: " + args.get(0));
|
System.err.println("Can't understand argument: " + args.get(0));
|
||||||
System.err.println("Usage is " + getShortUsage());
|
System.err.println("Usage is " + getShortUsage());
|
||||||
|
@ -435,7 +473,8 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
TableListing.Builder tableBuilder = new TableListing.Builder().
|
TableListing.Builder tableBuilder = new TableListing.Builder().
|
||||||
addField("ID", Justification.RIGHT).
|
addField("ID", Justification.RIGHT).
|
||||||
addField("POOL", Justification.LEFT).
|
addField("POOL", Justification.LEFT).
|
||||||
addField("REPLICATION", Justification.RIGHT).
|
addField("REPL", Justification.RIGHT).
|
||||||
|
addField("EXPIRY", Justification.LEFT).
|
||||||
addField("PATH", Justification.LEFT);
|
addField("PATH", Justification.LEFT);
|
||||||
if (printStats) {
|
if (printStats) {
|
||||||
tableBuilder.addField("NEEDED", Justification.RIGHT).
|
tableBuilder.addField("NEEDED", Justification.RIGHT).
|
||||||
|
@ -456,6 +495,14 @@ public class CacheAdmin extends Configured implements Tool {
|
||||||
row.add("" + directive.getId());
|
row.add("" + directive.getId());
|
||||||
row.add(directive.getPool());
|
row.add(directive.getPool());
|
||||||
row.add("" + directive.getReplication());
|
row.add("" + directive.getReplication());
|
||||||
|
String expiry;
|
||||||
|
if (directive.getExpiration().getMillis() ==
|
||||||
|
CacheDirectiveInfo.Expiration.EXPIRY_NEVER) {
|
||||||
|
expiry = "never";
|
||||||
|
} else {
|
||||||
|
expiry = directive.getExpiration().toString();
|
||||||
|
}
|
||||||
|
row.add(expiry);
|
||||||
row.add(directive.getPath().toUri().getPath());
|
row.add(directive.getPath().toUri().getPath());
|
||||||
if (printStats) {
|
if (printStats) {
|
||||||
row.add("" + stats.getBytesNeeded());
|
row.add("" + stats.getBytesNeeded());
|
||||||
|
|
|
@ -368,12 +368,19 @@ message CacheDirectiveInfoProto {
|
||||||
optional string path = 2;
|
optional string path = 2;
|
||||||
optional uint32 replication = 3;
|
optional uint32 replication = 3;
|
||||||
optional string pool = 4;
|
optional string pool = 4;
|
||||||
|
optional CacheDirectiveInfoExpirationProto expiration = 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
message CacheDirectiveInfoExpirationProto {
|
||||||
|
required int64 millis = 1;
|
||||||
|
required bool isRelative = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message CacheDirectiveStatsProto {
|
message CacheDirectiveStatsProto {
|
||||||
required int64 bytesNeeded = 1;
|
required int64 bytesNeeded = 1;
|
||||||
required int64 bytesCached = 2;
|
required int64 bytesCached = 2;
|
||||||
required int64 filesAffected = 3;
|
required int64 filesAffected = 3;
|
||||||
|
required bool hasExpired = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddCacheDirectiveRequestProto {
|
message AddCacheDirectiveRequestProto {
|
||||||
|
|
|
@ -31,6 +31,7 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_A
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICES;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICES;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICE_ID;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICE_ID;
|
||||||
|
import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
|
||||||
import static org.hamcrest.CoreMatchers.not;
|
import static org.hamcrest.CoreMatchers.not;
|
||||||
import static org.junit.Assert.assertArrayEquals;
|
import static org.junit.Assert.assertArrayEquals;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
@ -62,6 +63,7 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider;
|
import org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.Shell;
|
import org.apache.hadoop.util.Shell;
|
||||||
import org.junit.Assume;
|
import org.junit.Assume;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -724,4 +726,43 @@ public class TestDFSUtil {
|
||||||
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
|
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
|
||||||
DFSUtil.getSpnegoKeytabKey(conf, defaultKey));
|
DFSUtil.getSpnegoKeytabKey(conf, defaultKey));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=1000)
|
||||||
|
public void testDurationToString() throws Exception {
|
||||||
|
assertEquals("000:00:00:00", DFSUtil.durationToString(0));
|
||||||
|
try {
|
||||||
|
DFSUtil.durationToString(-199);
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
GenericTestUtils.assertExceptionContains("Invalid negative duration", e);
|
||||||
|
}
|
||||||
|
assertEquals("001:01:01:01",
|
||||||
|
DFSUtil.durationToString(((24*60*60)+(60*60)+(60)+1)*1000));
|
||||||
|
assertEquals("000:23:59:59",
|
||||||
|
DFSUtil.durationToString(((23*60*60)+(59*60)+(59))*1000));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(timeout=5000)
|
||||||
|
public void testRelativeTimeConversion() throws Exception {
|
||||||
|
try {
|
||||||
|
DFSUtil.parseRelativeTime("1");
|
||||||
|
} catch (IOException e) {
|
||||||
|
assertExceptionContains("too short", e);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
DFSUtil.parseRelativeTime("1z");
|
||||||
|
} catch (IOException e) {
|
||||||
|
assertExceptionContains("unknown time unit", e);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
DFSUtil.parseRelativeTime("yyz");
|
||||||
|
} catch (IOException e) {
|
||||||
|
assertExceptionContains("is not a number", e);
|
||||||
|
}
|
||||||
|
assertEquals(61*1000, DFSUtil.parseRelativeTime("61s"));
|
||||||
|
assertEquals(61*60*1000, DFSUtil.parseRelativeTime("61m"));
|
||||||
|
assertEquals(0, DFSUtil.parseRelativeTime("0s"));
|
||||||
|
assertEquals(25*60*60*1000, DFSUtil.parseRelativeTime("25h"));
|
||||||
|
assertEquals(4*24*60*60*1000, DFSUtil.parseRelativeTime("4d"));
|
||||||
|
assertEquals(999*24*60*60*1000, DFSUtil.parseRelativeTime("999d"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,10 +33,12 @@ import static org.junit.Assert.fail;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.lang.time.DateUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -54,13 +56,13 @@ import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor;
|
import org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.CachedBlocksList.Type;
|
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.CachedBlocksList.Type;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream;
|
|
||||||
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIO.POSIX.CacheManipulator;
|
import org.apache.hadoop.io.nativeio.NativeIO.POSIX.CacheManipulator;
|
||||||
|
@ -521,10 +523,14 @@ public class TestCacheDirectives {
|
||||||
int numEntries = 10;
|
int numEntries = 10;
|
||||||
String entryPrefix = "/party-";
|
String entryPrefix = "/party-";
|
||||||
long prevId = -1;
|
long prevId = -1;
|
||||||
|
final Date expiry = new Date();
|
||||||
for (int i=0; i<numEntries; i++) {
|
for (int i=0; i<numEntries; i++) {
|
||||||
prevId = dfs.addCacheDirective(
|
prevId = dfs.addCacheDirective(
|
||||||
new CacheDirectiveInfo.Builder().
|
new CacheDirectiveInfo.Builder().
|
||||||
setPath(new Path(entryPrefix + i)).setPool(pool).build());
|
setPath(new Path(entryPrefix + i)).setPool(pool).
|
||||||
|
setExpiration(
|
||||||
|
CacheDirectiveInfo.Expiration.newAbsolute(expiry.getTime())).
|
||||||
|
build());
|
||||||
}
|
}
|
||||||
RemoteIterator<CacheDirectiveEntry> dit
|
RemoteIterator<CacheDirectiveEntry> dit
|
||||||
= dfs.listCacheDirectives(null);
|
= dfs.listCacheDirectives(null);
|
||||||
|
@ -558,6 +564,7 @@ public class TestCacheDirectives {
|
||||||
assertEquals(i+1, cd.getId().longValue());
|
assertEquals(i+1, cd.getId().longValue());
|
||||||
assertEquals(entryPrefix + i, cd.getPath().toUri().getPath());
|
assertEquals(entryPrefix + i, cd.getPath().toUri().getPath());
|
||||||
assertEquals(pool, cd.getPool());
|
assertEquals(pool, cd.getPool());
|
||||||
|
assertEquals(expiry.getTime(), cd.getExpiration().getMillis());
|
||||||
}
|
}
|
||||||
assertFalse("Unexpected # of cache directives found", dit.hasNext());
|
assertFalse("Unexpected # of cache directives found", dit.hasNext());
|
||||||
|
|
||||||
|
@ -1001,4 +1008,58 @@ public class TestCacheDirectives {
|
||||||
info.getMode().toShort());
|
info.getMode().toShort());
|
||||||
assertEquals("Mismatched weight", 99, (int)info.getWeight());
|
assertEquals("Mismatched weight", 99, (int)info.getWeight());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=60000)
|
||||||
|
public void testExpiry() throws Exception {
|
||||||
|
HdfsConfiguration conf = createCachingConf();
|
||||||
|
MiniDFSCluster cluster =
|
||||||
|
new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATANODES).build();
|
||||||
|
try {
|
||||||
|
DistributedFileSystem dfs = cluster.getFileSystem();
|
||||||
|
String pool = "pool1";
|
||||||
|
dfs.addCachePool(new CachePoolInfo(pool));
|
||||||
|
Path p = new Path("/mypath");
|
||||||
|
DFSTestUtil.createFile(dfs, p, BLOCK_SIZE*2, (short)2, 0x999);
|
||||||
|
// Expire after test timeout
|
||||||
|
Date start = new Date();
|
||||||
|
Date expiry = DateUtils.addSeconds(start, 120);
|
||||||
|
final long id = dfs.addCacheDirective(new CacheDirectiveInfo.Builder()
|
||||||
|
.setPath(p)
|
||||||
|
.setPool(pool)
|
||||||
|
.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(expiry))
|
||||||
|
.setReplication((short)2)
|
||||||
|
.build());
|
||||||
|
waitForCachedBlocks(cluster.getNameNode(), 2, 4, "testExpiry:1");
|
||||||
|
// Change it to expire sooner
|
||||||
|
dfs.modifyCacheDirective(new CacheDirectiveInfo.Builder().setId(id)
|
||||||
|
.setExpiration(Expiration.newRelative(0)).build());
|
||||||
|
waitForCachedBlocks(cluster.getNameNode(), 0, 0, "testExpiry:2");
|
||||||
|
RemoteIterator<CacheDirectiveEntry> it = dfs.listCacheDirectives(null);
|
||||||
|
CacheDirectiveEntry ent = it.next();
|
||||||
|
assertFalse(it.hasNext());
|
||||||
|
Date entryExpiry = new Date(ent.getInfo().getExpiration().getMillis());
|
||||||
|
assertTrue("Directive should have expired",
|
||||||
|
entryExpiry.before(new Date()));
|
||||||
|
// Change it back to expire later
|
||||||
|
dfs.modifyCacheDirective(new CacheDirectiveInfo.Builder().setId(id)
|
||||||
|
.setExpiration(Expiration.newRelative(120000)).build());
|
||||||
|
waitForCachedBlocks(cluster.getNameNode(), 2, 4, "testExpiry:3");
|
||||||
|
it = dfs.listCacheDirectives(null);
|
||||||
|
ent = it.next();
|
||||||
|
assertFalse(it.hasNext());
|
||||||
|
entryExpiry = new Date(ent.getInfo().getExpiration().getMillis());
|
||||||
|
assertTrue("Directive should not have expired",
|
||||||
|
entryExpiry.after(new Date()));
|
||||||
|
// Verify that setting a negative TTL throws an error
|
||||||
|
try {
|
||||||
|
dfs.modifyCacheDirective(new CacheDirectiveInfo.Builder().setId(id)
|
||||||
|
.setExpiration(Expiration.newRelative(-1)).build());
|
||||||
|
} catch (InvalidRequestException e) {
|
||||||
|
GenericTestUtils
|
||||||
|
.assertExceptionContains("Cannot set a negative expiration", e);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
cluster.shutdown();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Binary file not shown.
|
@ -13,8 +13,8 @@
|
||||||
<TXID>2</TXID>
|
<TXID>2</TXID>
|
||||||
<DELEGATION_KEY>
|
<DELEGATION_KEY>
|
||||||
<KEY_ID>1</KEY_ID>
|
<KEY_ID>1</KEY_ID>
|
||||||
<EXPIRY_DATE>1381946377599</EXPIRY_DATE>
|
<EXPIRY_DATE>1386314182272</EXPIRY_DATE>
|
||||||
<KEY>4f37c0db7342fb35</KEY>
|
<KEY>39c9f9e9c1e88712</KEY>
|
||||||
</DELEGATION_KEY>
|
</DELEGATION_KEY>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
|
@ -24,8 +24,8 @@
|
||||||
<TXID>3</TXID>
|
<TXID>3</TXID>
|
||||||
<DELEGATION_KEY>
|
<DELEGATION_KEY>
|
||||||
<KEY_ID>2</KEY_ID>
|
<KEY_ID>2</KEY_ID>
|
||||||
<EXPIRY_DATE>1381946377609</EXPIRY_DATE>
|
<EXPIRY_DATE>1386314182280</EXPIRY_DATE>
|
||||||
<KEY>471d4ddd00402ba6</KEY>
|
<KEY>2b35e969a178a2f6</KEY>
|
||||||
</DELEGATION_KEY>
|
</DELEGATION_KEY>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
|
@ -37,18 +37,18 @@
|
||||||
<INODEID>16386</INODEID>
|
<INODEID>16386</INODEID>
|
||||||
<PATH>/file_create_u\0001;F431</PATH>
|
<PATH>/file_create_u\0001;F431</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255179312</MTIME>
|
<MTIME>1385622983286</MTIME>
|
||||||
<ATIME>1381255179312</ATIME>
|
<ATIME>1385622983286</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-134124999_1</CLIENT_NAME>
|
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-1208536327_1</CLIENT_NAME>
|
||||||
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
||||||
<PERMISSION_STATUS>
|
<PERMISSION_STATUS>
|
||||||
<USERNAME>andrew</USERNAME>
|
<USERNAME>andrew</USERNAME>
|
||||||
<GROUPNAME>supergroup</GROUPNAME>
|
<GROUPNAME>supergroup</GROUPNAME>
|
||||||
<MODE>420</MODE>
|
<MODE>420</MODE>
|
||||||
</PERMISSION_STATUS>
|
</PERMISSION_STATUS>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>8</RPC_CALLID>
|
<RPC_CALLID>6</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -59,8 +59,8 @@
|
||||||
<INODEID>0</INODEID>
|
<INODEID>0</INODEID>
|
||||||
<PATH>/file_create_u\0001;F431</PATH>
|
<PATH>/file_create_u\0001;F431</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255179355</MTIME>
|
<MTIME>1385622983323</MTIME>
|
||||||
<ATIME>1381255179312</ATIME>
|
<ATIME>1385622983286</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME></CLIENT_NAME>
|
<CLIENT_NAME></CLIENT_NAME>
|
||||||
<CLIENT_MACHINE></CLIENT_MACHINE>
|
<CLIENT_MACHINE></CLIENT_MACHINE>
|
||||||
|
@ -78,9 +78,9 @@
|
||||||
<LENGTH>0</LENGTH>
|
<LENGTH>0</LENGTH>
|
||||||
<SRC>/file_create_u\0001;F431</SRC>
|
<SRC>/file_create_u\0001;F431</SRC>
|
||||||
<DST>/file_moved</DST>
|
<DST>/file_moved</DST>
|
||||||
<TIMESTAMP>1381255179373</TIMESTAMP>
|
<TIMESTAMP>1385622983331</TIMESTAMP>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>10</RPC_CALLID>
|
<RPC_CALLID>8</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -89,9 +89,9 @@
|
||||||
<TXID>7</TXID>
|
<TXID>7</TXID>
|
||||||
<LENGTH>0</LENGTH>
|
<LENGTH>0</LENGTH>
|
||||||
<PATH>/file_moved</PATH>
|
<PATH>/file_moved</PATH>
|
||||||
<TIMESTAMP>1381255179397</TIMESTAMP>
|
<TIMESTAMP>1385622983340</TIMESTAMP>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>11</RPC_CALLID>
|
<RPC_CALLID>9</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -101,7 +101,7 @@
|
||||||
<LENGTH>0</LENGTH>
|
<LENGTH>0</LENGTH>
|
||||||
<INODEID>16387</INODEID>
|
<INODEID>16387</INODEID>
|
||||||
<PATH>/directory_mkdir</PATH>
|
<PATH>/directory_mkdir</PATH>
|
||||||
<TIMESTAMP>1381255179424</TIMESTAMP>
|
<TIMESTAMP>1385622983351</TIMESTAMP>
|
||||||
<PERMISSION_STATUS>
|
<PERMISSION_STATUS>
|
||||||
<USERNAME>andrew</USERNAME>
|
<USERNAME>andrew</USERNAME>
|
||||||
<GROUPNAME>supergroup</GROUPNAME>
|
<GROUPNAME>supergroup</GROUPNAME>
|
||||||
|
@ -136,8 +136,8 @@
|
||||||
<TXID>12</TXID>
|
<TXID>12</TXID>
|
||||||
<SNAPSHOTROOT>/directory_mkdir</SNAPSHOTROOT>
|
<SNAPSHOTROOT>/directory_mkdir</SNAPSHOTROOT>
|
||||||
<SNAPSHOTNAME>snapshot1</SNAPSHOTNAME>
|
<SNAPSHOTNAME>snapshot1</SNAPSHOTNAME>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>16</RPC_CALLID>
|
<RPC_CALLID>14</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -147,8 +147,8 @@
|
||||||
<SNAPSHOTROOT>/directory_mkdir</SNAPSHOTROOT>
|
<SNAPSHOTROOT>/directory_mkdir</SNAPSHOTROOT>
|
||||||
<SNAPSHOTOLDNAME>snapshot1</SNAPSHOTOLDNAME>
|
<SNAPSHOTOLDNAME>snapshot1</SNAPSHOTOLDNAME>
|
||||||
<SNAPSHOTNEWNAME>snapshot2</SNAPSHOTNEWNAME>
|
<SNAPSHOTNEWNAME>snapshot2</SNAPSHOTNEWNAME>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>17</RPC_CALLID>
|
<RPC_CALLID>15</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -157,8 +157,8 @@
|
||||||
<TXID>14</TXID>
|
<TXID>14</TXID>
|
||||||
<SNAPSHOTROOT>/directory_mkdir</SNAPSHOTROOT>
|
<SNAPSHOTROOT>/directory_mkdir</SNAPSHOTROOT>
|
||||||
<SNAPSHOTNAME>snapshot2</SNAPSHOTNAME>
|
<SNAPSHOTNAME>snapshot2</SNAPSHOTNAME>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>18</RPC_CALLID>
|
<RPC_CALLID>16</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -169,18 +169,18 @@
|
||||||
<INODEID>16388</INODEID>
|
<INODEID>16388</INODEID>
|
||||||
<PATH>/file_create_u\0001;F431</PATH>
|
<PATH>/file_create_u\0001;F431</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255179522</MTIME>
|
<MTIME>1385622983397</MTIME>
|
||||||
<ATIME>1381255179522</ATIME>
|
<ATIME>1385622983397</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-134124999_1</CLIENT_NAME>
|
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-1208536327_1</CLIENT_NAME>
|
||||||
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
||||||
<PERMISSION_STATUS>
|
<PERMISSION_STATUS>
|
||||||
<USERNAME>andrew</USERNAME>
|
<USERNAME>andrew</USERNAME>
|
||||||
<GROUPNAME>supergroup</GROUPNAME>
|
<GROUPNAME>supergroup</GROUPNAME>
|
||||||
<MODE>420</MODE>
|
<MODE>420</MODE>
|
||||||
</PERMISSION_STATUS>
|
</PERMISSION_STATUS>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>19</RPC_CALLID>
|
<RPC_CALLID>17</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -191,8 +191,8 @@
|
||||||
<INODEID>0</INODEID>
|
<INODEID>0</INODEID>
|
||||||
<PATH>/file_create_u\0001;F431</PATH>
|
<PATH>/file_create_u\0001;F431</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255179531</MTIME>
|
<MTIME>1385622983402</MTIME>
|
||||||
<ATIME>1381255179522</ATIME>
|
<ATIME>1385622983397</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME></CLIENT_NAME>
|
<CLIENT_NAME></CLIENT_NAME>
|
||||||
<CLIENT_MACHINE></CLIENT_MACHINE>
|
<CLIENT_MACHINE></CLIENT_MACHINE>
|
||||||
|
@ -253,10 +253,10 @@
|
||||||
<LENGTH>0</LENGTH>
|
<LENGTH>0</LENGTH>
|
||||||
<SRC>/file_create_u\0001;F431</SRC>
|
<SRC>/file_create_u\0001;F431</SRC>
|
||||||
<DST>/file_moved</DST>
|
<DST>/file_moved</DST>
|
||||||
<TIMESTAMP>1381255179602</TIMESTAMP>
|
<TIMESTAMP>1385622983438</TIMESTAMP>
|
||||||
<OPTIONS>NONE</OPTIONS>
|
<OPTIONS>NONE</OPTIONS>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>26</RPC_CALLID>
|
<RPC_CALLID>24</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -267,18 +267,18 @@
|
||||||
<INODEID>16389</INODEID>
|
<INODEID>16389</INODEID>
|
||||||
<PATH>/file_concat_target</PATH>
|
<PATH>/file_concat_target</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255179619</MTIME>
|
<MTIME>1385622983445</MTIME>
|
||||||
<ATIME>1381255179619</ATIME>
|
<ATIME>1385622983445</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-134124999_1</CLIENT_NAME>
|
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-1208536327_1</CLIENT_NAME>
|
||||||
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
||||||
<PERMISSION_STATUS>
|
<PERMISSION_STATUS>
|
||||||
<USERNAME>andrew</USERNAME>
|
<USERNAME>andrew</USERNAME>
|
||||||
<GROUPNAME>supergroup</GROUPNAME>
|
<GROUPNAME>supergroup</GROUPNAME>
|
||||||
<MODE>420</MODE>
|
<MODE>420</MODE>
|
||||||
</PERMISSION_STATUS>
|
</PERMISSION_STATUS>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>28</RPC_CALLID>
|
<RPC_CALLID>26</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -388,8 +388,8 @@
|
||||||
<INODEID>0</INODEID>
|
<INODEID>0</INODEID>
|
||||||
<PATH>/file_concat_target</PATH>
|
<PATH>/file_concat_target</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255179862</MTIME>
|
<MTIME>1385622983524</MTIME>
|
||||||
<ATIME>1381255179619</ATIME>
|
<ATIME>1385622983445</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME></CLIENT_NAME>
|
<CLIENT_NAME></CLIENT_NAME>
|
||||||
<CLIENT_MACHINE></CLIENT_MACHINE>
|
<CLIENT_MACHINE></CLIENT_MACHINE>
|
||||||
|
@ -423,18 +423,18 @@
|
||||||
<INODEID>16390</INODEID>
|
<INODEID>16390</INODEID>
|
||||||
<PATH>/file_concat_0</PATH>
|
<PATH>/file_concat_0</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255179876</MTIME>
|
<MTIME>1385622983530</MTIME>
|
||||||
<ATIME>1381255179876</ATIME>
|
<ATIME>1385622983530</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-134124999_1</CLIENT_NAME>
|
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-1208536327_1</CLIENT_NAME>
|
||||||
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
||||||
<PERMISSION_STATUS>
|
<PERMISSION_STATUS>
|
||||||
<USERNAME>andrew</USERNAME>
|
<USERNAME>andrew</USERNAME>
|
||||||
<GROUPNAME>supergroup</GROUPNAME>
|
<GROUPNAME>supergroup</GROUPNAME>
|
||||||
<MODE>420</MODE>
|
<MODE>420</MODE>
|
||||||
</PERMISSION_STATUS>
|
</PERMISSION_STATUS>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>41</RPC_CALLID>
|
<RPC_CALLID>39</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -544,8 +544,8 @@
|
||||||
<INODEID>0</INODEID>
|
<INODEID>0</INODEID>
|
||||||
<PATH>/file_concat_0</PATH>
|
<PATH>/file_concat_0</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255179957</MTIME>
|
<MTIME>1385622983582</MTIME>
|
||||||
<ATIME>1381255179876</ATIME>
|
<ATIME>1385622983530</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME></CLIENT_NAME>
|
<CLIENT_NAME></CLIENT_NAME>
|
||||||
<CLIENT_MACHINE></CLIENT_MACHINE>
|
<CLIENT_MACHINE></CLIENT_MACHINE>
|
||||||
|
@ -579,18 +579,18 @@
|
||||||
<INODEID>16391</INODEID>
|
<INODEID>16391</INODEID>
|
||||||
<PATH>/file_concat_1</PATH>
|
<PATH>/file_concat_1</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255179967</MTIME>
|
<MTIME>1385622983593</MTIME>
|
||||||
<ATIME>1381255179967</ATIME>
|
<ATIME>1385622983593</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-134124999_1</CLIENT_NAME>
|
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-1208536327_1</CLIENT_NAME>
|
||||||
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
||||||
<PERMISSION_STATUS>
|
<PERMISSION_STATUS>
|
||||||
<USERNAME>andrew</USERNAME>
|
<USERNAME>andrew</USERNAME>
|
||||||
<GROUPNAME>supergroup</GROUPNAME>
|
<GROUPNAME>supergroup</GROUPNAME>
|
||||||
<MODE>420</MODE>
|
<MODE>420</MODE>
|
||||||
</PERMISSION_STATUS>
|
</PERMISSION_STATUS>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>53</RPC_CALLID>
|
<RPC_CALLID>51</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -700,8 +700,8 @@
|
||||||
<INODEID>0</INODEID>
|
<INODEID>0</INODEID>
|
||||||
<PATH>/file_concat_1</PATH>
|
<PATH>/file_concat_1</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255180085</MTIME>
|
<MTIME>1385622983655</MTIME>
|
||||||
<ATIME>1381255179967</ATIME>
|
<ATIME>1385622983593</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME></CLIENT_NAME>
|
<CLIENT_NAME></CLIENT_NAME>
|
||||||
<CLIENT_MACHINE></CLIENT_MACHINE>
|
<CLIENT_MACHINE></CLIENT_MACHINE>
|
||||||
|
@ -733,13 +733,13 @@
|
||||||
<TXID>56</TXID>
|
<TXID>56</TXID>
|
||||||
<LENGTH>0</LENGTH>
|
<LENGTH>0</LENGTH>
|
||||||
<TRG>/file_concat_target</TRG>
|
<TRG>/file_concat_target</TRG>
|
||||||
<TIMESTAMP>1381255180099</TIMESTAMP>
|
<TIMESTAMP>1385622983667</TIMESTAMP>
|
||||||
<SOURCES>
|
<SOURCES>
|
||||||
<SOURCE1>/file_concat_0</SOURCE1>
|
<SOURCE1>/file_concat_0</SOURCE1>
|
||||||
<SOURCE2>/file_concat_1</SOURCE2>
|
<SOURCE2>/file_concat_1</SOURCE2>
|
||||||
</SOURCES>
|
</SOURCES>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>64</RPC_CALLID>
|
<RPC_CALLID>62</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -750,15 +750,15 @@
|
||||||
<INODEID>16392</INODEID>
|
<INODEID>16392</INODEID>
|
||||||
<PATH>/file_symlink</PATH>
|
<PATH>/file_symlink</PATH>
|
||||||
<VALUE>/file_concat_target</VALUE>
|
<VALUE>/file_concat_target</VALUE>
|
||||||
<MTIME>1381255180116</MTIME>
|
<MTIME>1385622983683</MTIME>
|
||||||
<ATIME>1381255180116</ATIME>
|
<ATIME>1385622983683</ATIME>
|
||||||
<PERMISSION_STATUS>
|
<PERMISSION_STATUS>
|
||||||
<USERNAME>andrew</USERNAME>
|
<USERNAME>andrew</USERNAME>
|
||||||
<GROUPNAME>supergroup</GROUPNAME>
|
<GROUPNAME>supergroup</GROUPNAME>
|
||||||
<MODE>511</MODE>
|
<MODE>511</MODE>
|
||||||
</PERMISSION_STATUS>
|
</PERMISSION_STATUS>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>65</RPC_CALLID>
|
<RPC_CALLID>63</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -771,11 +771,11 @@
|
||||||
<OWNER>andrew</OWNER>
|
<OWNER>andrew</OWNER>
|
||||||
<RENEWER>JobTracker</RENEWER>
|
<RENEWER>JobTracker</RENEWER>
|
||||||
<REALUSER></REALUSER>
|
<REALUSER></REALUSER>
|
||||||
<ISSUE_DATE>1381255180128</ISSUE_DATE>
|
<ISSUE_DATE>1385622983698</ISSUE_DATE>
|
||||||
<MAX_DATE>1381859980128</MAX_DATE>
|
<MAX_DATE>1386227783698</MAX_DATE>
|
||||||
<MASTER_KEY_ID>2</MASTER_KEY_ID>
|
<MASTER_KEY_ID>2</MASTER_KEY_ID>
|
||||||
</DELEGATION_TOKEN_IDENTIFIER>
|
</DELEGATION_TOKEN_IDENTIFIER>
|
||||||
<EXPIRY_TIME>1381341580128</EXPIRY_TIME>
|
<EXPIRY_TIME>1385709383698</EXPIRY_TIME>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -788,11 +788,11 @@
|
||||||
<OWNER>andrew</OWNER>
|
<OWNER>andrew</OWNER>
|
||||||
<RENEWER>JobTracker</RENEWER>
|
<RENEWER>JobTracker</RENEWER>
|
||||||
<REALUSER></REALUSER>
|
<REALUSER></REALUSER>
|
||||||
<ISSUE_DATE>1381255180128</ISSUE_DATE>
|
<ISSUE_DATE>1385622983698</ISSUE_DATE>
|
||||||
<MAX_DATE>1381859980128</MAX_DATE>
|
<MAX_DATE>1386227783698</MAX_DATE>
|
||||||
<MASTER_KEY_ID>2</MASTER_KEY_ID>
|
<MASTER_KEY_ID>2</MASTER_KEY_ID>
|
||||||
</DELEGATION_TOKEN_IDENTIFIER>
|
</DELEGATION_TOKEN_IDENTIFIER>
|
||||||
<EXPIRY_TIME>1381341580177</EXPIRY_TIME>
|
<EXPIRY_TIME>1385709383746</EXPIRY_TIME>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -805,8 +805,8 @@
|
||||||
<OWNER>andrew</OWNER>
|
<OWNER>andrew</OWNER>
|
||||||
<RENEWER>JobTracker</RENEWER>
|
<RENEWER>JobTracker</RENEWER>
|
||||||
<REALUSER></REALUSER>
|
<REALUSER></REALUSER>
|
||||||
<ISSUE_DATE>1381255180128</ISSUE_DATE>
|
<ISSUE_DATE>1385622983698</ISSUE_DATE>
|
||||||
<MAX_DATE>1381859980128</MAX_DATE>
|
<MAX_DATE>1386227783698</MAX_DATE>
|
||||||
<MASTER_KEY_ID>2</MASTER_KEY_ID>
|
<MASTER_KEY_ID>2</MASTER_KEY_ID>
|
||||||
</DELEGATION_TOKEN_IDENTIFIER>
|
</DELEGATION_TOKEN_IDENTIFIER>
|
||||||
</DATA>
|
</DATA>
|
||||||
|
@ -822,8 +822,8 @@
|
||||||
<MODE>493</MODE>
|
<MODE>493</MODE>
|
||||||
</PERMISSION_STATUS>
|
</PERMISSION_STATUS>
|
||||||
<WEIGHT>100</WEIGHT>
|
<WEIGHT>100</WEIGHT>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>75</RPC_CALLID>
|
<RPC_CALLID>67</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -835,39 +835,40 @@
|
||||||
<GROUPNAME>party</GROUPNAME>
|
<GROUPNAME>party</GROUPNAME>
|
||||||
<MODE>448</MODE>
|
<MODE>448</MODE>
|
||||||
<WEIGHT>1989</WEIGHT>
|
<WEIGHT>1989</WEIGHT>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>76</RPC_CALLID>
|
<RPC_CALLID>68</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
<OPCODE>OP_ADD_PATH_BASED_CACHE_DIRECTIVE</OPCODE>
|
<OPCODE>OP_ADD_CACHE_DIRECTIVE</OPCODE>
|
||||||
<DATA>
|
<DATA>
|
||||||
<TXID>63</TXID>
|
<TXID>63</TXID>
|
||||||
<ID>1</ID>
|
<ID>1</ID>
|
||||||
<PATH>/bar</PATH>
|
<PATH>/bar</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<POOL>poolparty</POOL>
|
<POOL>poolparty</POOL>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<EXPIRATION>-1</EXPIRATION>
|
||||||
<RPC_CALLID>77</RPC_CALLID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
|
<RPC_CALLID>69</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
<OPCODE>OP_MODIFY_PATH_BASED_CACHE_DIRECTIVE</OPCODE>
|
<OPCODE>OP_MODIFY_CACHE_DIRECTIVE</OPCODE>
|
||||||
<DATA>
|
<DATA>
|
||||||
<TXID>64</TXID>
|
<TXID>64</TXID>
|
||||||
<ID>1</ID>
|
<ID>1</ID>
|
||||||
<REPLICATION>2</REPLICATION>
|
<PATH>/bar2</PATH>
|
||||||
<RPC_CLIENTID></RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>-2</RPC_CALLID>
|
<RPC_CALLID>70</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
<OPCODE>OP_REMOVE_PATH_BASED_CACHE_DIRECTIVE</OPCODE>
|
<OPCODE>OP_REMOVE_CACHE_DIRECTIVE</OPCODE>
|
||||||
<DATA>
|
<DATA>
|
||||||
<TXID>65</TXID>
|
<TXID>65</TXID>
|
||||||
<ID>1</ID>
|
<ID>1</ID>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>78</RPC_CALLID>
|
<RPC_CALLID>71</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -875,8 +876,8 @@
|
||||||
<DATA>
|
<DATA>
|
||||||
<TXID>66</TXID>
|
<TXID>66</TXID>
|
||||||
<POOLNAME>poolparty</POOLNAME>
|
<POOLNAME>poolparty</POOLNAME>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>79</RPC_CALLID>
|
<RPC_CALLID>72</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -887,18 +888,18 @@
|
||||||
<INODEID>16393</INODEID>
|
<INODEID>16393</INODEID>
|
||||||
<PATH>/hard-lease-recovery-test</PATH>
|
<PATH>/hard-lease-recovery-test</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255180288</MTIME>
|
<MTIME>1385622983896</MTIME>
|
||||||
<ATIME>1381255180288</ATIME>
|
<ATIME>1385622983896</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-134124999_1</CLIENT_NAME>
|
<CLIENT_NAME>DFSClient_NONMAPREDUCE_-1208536327_1</CLIENT_NAME>
|
||||||
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
<CLIENT_MACHINE>127.0.0.1</CLIENT_MACHINE>
|
||||||
<PERMISSION_STATUS>
|
<PERMISSION_STATUS>
|
||||||
<USERNAME>andrew</USERNAME>
|
<USERNAME>andrew</USERNAME>
|
||||||
<GROUPNAME>supergroup</GROUPNAME>
|
<GROUPNAME>supergroup</GROUPNAME>
|
||||||
<MODE>420</MODE>
|
<MODE>420</MODE>
|
||||||
</PERMISSION_STATUS>
|
</PERMISSION_STATUS>
|
||||||
<RPC_CLIENTID>27ac79f0-d378-4933-824b-c2a188968d97</RPC_CLIENTID>
|
<RPC_CLIENTID>26a8071a-18f8-42ce-ad7e-75692493e45c</RPC_CLIENTID>
|
||||||
<RPC_CALLID>74</RPC_CALLID>
|
<RPC_CALLID>73</RPC_CALLID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
<RECORD>
|
<RECORD>
|
||||||
|
@ -936,7 +937,7 @@
|
||||||
<PATH>/hard-lease-recovery-test</PATH>
|
<PATH>/hard-lease-recovery-test</PATH>
|
||||||
<BLOCK>
|
<BLOCK>
|
||||||
<BLOCK_ID>1073741834</BLOCK_ID>
|
<BLOCK_ID>1073741834</BLOCK_ID>
|
||||||
<NUM_BYTES>0</NUM_BYTES>
|
<NUM_BYTES>11</NUM_BYTES>
|
||||||
<GENSTAMP>1010</GENSTAMP>
|
<GENSTAMP>1010</GENSTAMP>
|
||||||
</BLOCK>
|
</BLOCK>
|
||||||
<RPC_CLIENTID></RPC_CLIENTID>
|
<RPC_CLIENTID></RPC_CLIENTID>
|
||||||
|
@ -954,23 +955,7 @@
|
||||||
<OPCODE>OP_REASSIGN_LEASE</OPCODE>
|
<OPCODE>OP_REASSIGN_LEASE</OPCODE>
|
||||||
<DATA>
|
<DATA>
|
||||||
<TXID>73</TXID>
|
<TXID>73</TXID>
|
||||||
<LEASEHOLDER>DFSClient_NONMAPREDUCE_-134124999_1</LEASEHOLDER>
|
<LEASEHOLDER>DFSClient_NONMAPREDUCE_-1208536327_1</LEASEHOLDER>
|
||||||
<PATH>/hard-lease-recovery-test</PATH>
|
|
||||||
<NEWHOLDER>HDFS_NameNode</NEWHOLDER>
|
|
||||||
</DATA>
|
|
||||||
</RECORD>
|
|
||||||
<RECORD>
|
|
||||||
<OPCODE>OP_SET_GENSTAMP_V2</OPCODE>
|
|
||||||
<DATA>
|
|
||||||
<TXID>74</TXID>
|
|
||||||
<GENSTAMPV2>1012</GENSTAMPV2>
|
|
||||||
</DATA>
|
|
||||||
</RECORD>
|
|
||||||
<RECORD>
|
|
||||||
<OPCODE>OP_REASSIGN_LEASE</OPCODE>
|
|
||||||
<DATA>
|
|
||||||
<TXID>75</TXID>
|
|
||||||
<LEASEHOLDER>HDFS_NameNode</LEASEHOLDER>
|
|
||||||
<PATH>/hard-lease-recovery-test</PATH>
|
<PATH>/hard-lease-recovery-test</PATH>
|
||||||
<NEWHOLDER>HDFS_NameNode</NEWHOLDER>
|
<NEWHOLDER>HDFS_NameNode</NEWHOLDER>
|
||||||
</DATA>
|
</DATA>
|
||||||
|
@ -978,20 +963,20 @@
|
||||||
<RECORD>
|
<RECORD>
|
||||||
<OPCODE>OP_CLOSE</OPCODE>
|
<OPCODE>OP_CLOSE</OPCODE>
|
||||||
<DATA>
|
<DATA>
|
||||||
<TXID>76</TXID>
|
<TXID>74</TXID>
|
||||||
<LENGTH>0</LENGTH>
|
<LENGTH>0</LENGTH>
|
||||||
<INODEID>0</INODEID>
|
<INODEID>0</INODEID>
|
||||||
<PATH>/hard-lease-recovery-test</PATH>
|
<PATH>/hard-lease-recovery-test</PATH>
|
||||||
<REPLICATION>1</REPLICATION>
|
<REPLICATION>1</REPLICATION>
|
||||||
<MTIME>1381255185142</MTIME>
|
<MTIME>1385622986265</MTIME>
|
||||||
<ATIME>1381255180288</ATIME>
|
<ATIME>1385622983896</ATIME>
|
||||||
<BLOCKSIZE>512</BLOCKSIZE>
|
<BLOCKSIZE>512</BLOCKSIZE>
|
||||||
<CLIENT_NAME></CLIENT_NAME>
|
<CLIENT_NAME></CLIENT_NAME>
|
||||||
<CLIENT_MACHINE></CLIENT_MACHINE>
|
<CLIENT_MACHINE></CLIENT_MACHINE>
|
||||||
<BLOCK>
|
<BLOCK>
|
||||||
<BLOCK_ID>1073741834</BLOCK_ID>
|
<BLOCK_ID>1073741834</BLOCK_ID>
|
||||||
<NUM_BYTES>11</NUM_BYTES>
|
<NUM_BYTES>11</NUM_BYTES>
|
||||||
<GENSTAMP>1012</GENSTAMP>
|
<GENSTAMP>1011</GENSTAMP>
|
||||||
</BLOCK>
|
</BLOCK>
|
||||||
<PERMISSION_STATUS>
|
<PERMISSION_STATUS>
|
||||||
<USERNAME>andrew</USERNAME>
|
<USERNAME>andrew</USERNAME>
|
||||||
|
@ -1003,7 +988,7 @@
|
||||||
<RECORD>
|
<RECORD>
|
||||||
<OPCODE>OP_END_LOG_SEGMENT</OPCODE>
|
<OPCODE>OP_END_LOG_SEGMENT</OPCODE>
|
||||||
<DATA>
|
<DATA>
|
||||||
<TXID>77</TXID>
|
<TXID>75</TXID>
|
||||||
</DATA>
|
</DATA>
|
||||||
</RECORD>
|
</RECORD>
|
||||||
</EDITS>
|
</EDITS>
|
||||||
|
|
|
@ -165,9 +165,9 @@
|
||||||
<description>Testing creating cache paths</description>
|
<description>Testing creating cache paths</description>
|
||||||
<test-commands>
|
<test-commands>
|
||||||
<cache-admin-command>-addPool pool1</cache-admin-command>
|
<cache-admin-command>-addPool pool1</cache-admin-command>
|
||||||
<cache-admin-command>-addDirective -path /foo -pool pool1</cache-admin-command>
|
<cache-admin-command>-addDirective -path /foo -pool pool1 -ttl 2d</cache-admin-command>
|
||||||
<cache-admin-command>-addDirective -path /bar -pool pool1</cache-admin-command>
|
<cache-admin-command>-addDirective -path /bar -pool pool1 -ttl 24h</cache-admin-command>
|
||||||
<cache-admin-command>-addDirective -path /baz -replication 2 -pool pool1</cache-admin-command>
|
<cache-admin-command>-addDirective -path /baz -replication 2 -pool pool1 -ttl 60m</cache-admin-command>
|
||||||
<cache-admin-command>-listDirectives -pool pool1</cache-admin-command>
|
<cache-admin-command>-listDirectives -pool pool1</cache-admin-command>
|
||||||
</test-commands>
|
</test-commands>
|
||||||
<cleanup-commands>
|
<cleanup-commands>
|
||||||
|
@ -180,15 +180,15 @@
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 1 pool1 1 /foo</expected-output>
|
<expected-output> 1 pool1 1</expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 2 pool1 1 /bar</expected-output>
|
<expected-output> 2 pool1 1</expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 3 pool1 2 /baz</expected-output>
|
<expected-output> 3 pool1 2 </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
@ -234,11 +234,11 @@
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 8 pool2 1 /baz</expected-output>
|
<expected-output> 8 pool2 1 never /baz </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 9 pool2 1 /buz</expected-output>
|
<expected-output> 9 pool2 1 never /buz </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
@ -265,11 +265,11 @@
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 10 pool1 1 /foo</expected-output>
|
<expected-output> 10 pool1 1 never /foo </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 12 pool2 1 /foo</expected-output>
|
<expected-output> 12 pool2 1 never /foo </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
@ -296,7 +296,7 @@
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 16 pool2 1 /foo</expected-output>
|
<expected-output> 16 pool2 1 never /foo </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
@ -320,7 +320,7 @@
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 19 pool1 1 /bar</expected-output>
|
<expected-output> 19 pool1 1 never /bar </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
@ -349,11 +349,11 @@
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 22 pool1 1 /bar</expected-output>
|
<expected-output> 22 pool1 1 never /bar </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 24 pool2 1 /bar</expected-output>
|
<expected-output> 24 pool2 1 never /bar </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
@ -379,7 +379,7 @@
|
||||||
</comparator>
|
</comparator>
|
||||||
<comparator>
|
<comparator>
|
||||||
<type>SubstringComparator</type>
|
<type>SubstringComparator</type>
|
||||||
<expected-output> 25 pool1 1 /bar3</expected-output>
|
<expected-output> 25 pool1 1 never /bar3 </expected-output>
|
||||||
</comparator>
|
</comparator>
|
||||||
</comparators>
|
</comparators>
|
||||||
</test>
|
</test>
|
||||||
|
|
Loading…
Reference in New Issue