diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index cb76c8cf652..d754ad460d8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -498,6 +498,9 @@ Release 2.5.0 - UNRELEASED HDFS-6591. while loop is executed tens of thousands of times in Hedged Read (Liang Xie via cnauroth) + HDFS-6604. The short-circuit cache doesn't correctly time out replicas that + haven't been used in a while (cmccabe) + BREAKDOWN OF HDFS-2006 SUBTASKS AND RELATED JIRAS HDFS-6299. Protobuf for XAttr and client-side implementation. (Yi Liu via umamahesh) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java index 7df340ac2a6..a4b852f32e4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java @@ -111,7 +111,7 @@ public class ShortCircuitCache implements Closeable { Long evictionTimeNs = Long.valueOf(0); while (true) { Entry entry = - evictableMmapped.ceilingEntry(evictionTimeNs); + evictable.ceilingEntry(evictionTimeNs); if (entry == null) break; evictionTimeNs = entry.getKey(); long evictionTimeMs = @@ -384,10 +384,6 @@ public class ShortCircuitCache implements Closeable { this.shmManager = shmManager; } - public long getMmapRetryTimeoutMs() { - return mmapRetryTimeoutMs; - } - public long getStaleThresholdMs() { return staleThresholdMs; } @@ -847,7 +843,7 @@ public class ShortCircuitCache implements Closeable { } else if (replica.mmapData instanceof Long) { long lastAttemptTimeMs = (Long)replica.mmapData; long delta = Time.monotonicNow() - lastAttemptTimeMs; - if (delta < staleThresholdMs) { + if (delta < mmapRetryTimeoutMs) { if (LOG.isTraceEnabled()) { LOG.trace(this + ": can't create client mmap for " + replica + " because we failed to " + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitCache.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitCache.java index d62b3eb3201..a2d2bf830f3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitCache.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitCache.java @@ -197,11 +197,12 @@ public class TestShortCircuitCache { @Test(timeout=60000) public void testExpiry() throws Exception { final ShortCircuitCache cache = - new ShortCircuitCache(2, 1, 1, 10000000, 1, 10000, 0); + new ShortCircuitCache(2, 1, 1, 10000000, 1, 10000000, 0); final TestFileDescriptorPair pair = new TestFileDescriptorPair(); ShortCircuitReplicaInfo replicaInfo1 = cache.fetchOrCreate( - new ExtendedBlockId(123, "test_bp1"), new SimpleReplicaCreator(123, cache, pair)); + new ExtendedBlockId(123, "test_bp1"), + new SimpleReplicaCreator(123, cache, pair)); Preconditions.checkNotNull(replicaInfo1.getReplica()); Preconditions.checkState(replicaInfo1.getInvalidTokenException() == null); pair.compareWith(replicaInfo1.getReplica().getDataStream(),