diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index afcc54cafac..baced419212 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -459,8 +459,6 @@ Release 2.0.3-alpha - Unreleased HADOOP-9231. Parametrize staging URL for the uniformity of distributionManagement. (Konstantin Boudnik via suresh) - HADOOP-9241. DU refresh interval is not configurable (harsh) - OPTIMIZATIONS HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index 5fb1d40784e..3a236cbc278 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -54,10 +54,6 @@ public class CommonConfigurationKeysPublic { public static final String FS_DF_INTERVAL_KEY = "fs.df.interval"; /** Default value for FS_DF_INTERVAL_KEY */ public static final long FS_DF_INTERVAL_DEFAULT = 60000; - /** See core-default.xml */ - public static final String FS_DU_INTERVAL_KEY = "fs.du.interval"; - /** Default value for FS_DU_INTERVAL_KEY */ - public static final long FS_DU_INTERVAL_DEFAULT = 60000; //Defaults are not specified for following keys diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java index 76f77cf4f5b..9a9f1e3efc7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java @@ -20,7 +20,6 @@ package org.apache.hadoop.fs; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.util.Shell; import java.io.BufferedReader; @@ -65,8 +64,8 @@ public class DU extends Shell { * @throws IOException if we fail to refresh the disk usage */ public DU(File path, Configuration conf) throws IOException { - this(path, conf.getLong(CommonConfigurationKeys.FS_DU_INTERVAL_KEY, - CommonConfigurationKeys.FS_DU_INTERVAL_DEFAULT)); + this(path, 600000L); + //10 minutes default refresh interval } /** diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index 8921eb3fd79..345995c4279 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -445,6 +445,12 @@ File space usage statistics refresh interval in msec. + + fs.du.interval + 60000 + File space usage statistics refresh interval in msec. + + fs.s3.block.size 67108864 diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java index ef59b85bd34..71024acd9e6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java @@ -24,9 +24,6 @@ import java.io.IOException; import java.io.RandomAccessFile; import java.util.Random; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeys; - /** This test makes sure that "DU" does not get to run on each call to getUsed */ public class TestDU extends TestCase { final static private File DU_DIR = new File( @@ -109,9 +106,7 @@ public class TestDU extends TestCase { public void testDUGetUsedWillNotReturnNegative() throws IOException { File file = new File(DU_DIR, "data"); assertTrue(file.createNewFile()); - Configuration conf = new Configuration(); - conf.setLong(CommonConfigurationKeys.FS_DU_INTERVAL_KEY, 10000L); - DU du = new DU(file, conf); + DU du = new DU(file, 10000); du.decDfsUsed(Long.MAX_VALUE); long duSize = du.getUsed(); assertTrue(String.valueOf(duSize), duSize >= 0L);