diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java index fb3acf71e12..d4ec567e732 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java @@ -642,10 +642,7 @@ public class DFSConfigKeys extends CommonConfigurationKeys { public static final String DFS_IMAGE_TRANSFER_CHUNKSIZE_KEY = "dfs.image.transfer.chunksize"; public static final int DFS_IMAGE_TRANSFER_CHUNKSIZE_DEFAULT = 64 * 1024; - //Keys with no defaults - public static final String DFS_DATANODE_PLUGINS_KEY = "dfs.datanode.plugins"; - public static final String DFS_DATANODE_FSDATASET_FACTORY_KEY = "dfs.datanode.fsdataset.factory"; - public static final String DFS_DATANODE_FSDATASET_VOLUME_CHOOSING_POLICY_KEY = "dfs.datanode.fsdataset.volume.choosing.policy"; + // Datanode File IO Stats public static final String DFS_DATANODE_ENABLE_FILEIO_PROFILING_KEY = "dfs.datanode.enable.fileio.profiling"; public static final boolean DFS_DATANODE_ENABLE_FILEIO_PROFILING_DEFAULT = @@ -654,6 +651,17 @@ public class DFSConfigKeys extends CommonConfigurationKeys { "dfs.datanode.enable.fileio.fault.injection"; public static final boolean DFS_DATANODE_ENABLE_FILEIO_FAULT_INJECTION_DEFAULT = false; + public static final String + DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY = + "dfs.datanode.fileio.profiling.sampling.fraction"; + public static final double + DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_DEAFULT = 1.0; + + //Keys with no defaults + public static final String DFS_DATANODE_PLUGINS_KEY = "dfs.datanode.plugins"; + public static final String DFS_DATANODE_FSDATASET_FACTORY_KEY = "dfs.datanode.fsdataset.factory"; + public static final String DFS_DATANODE_FSDATASET_VOLUME_CHOOSING_POLICY_KEY = "dfs.datanode.fsdataset.volume.choosing.policy"; + public static final String DFS_DATANODE_AVAILABLE_SPACE_VOLUME_CHOOSING_POLICY_BALANCED_SPACE_THRESHOLD_KEY = "dfs.datanode.available-space-volume-choosing-policy.balanced-space-threshold"; public static final long DFS_DATANODE_AVAILABLE_SPACE_VOLUME_CHOOSING_POLICY_BALANCED_SPACE_THRESHOLD_DEFAULT = 1024L * 1024L * 1024L * 10L; // 10 GB public static final String DFS_DATANODE_AVAILABLE_SPACE_VOLUME_CHOOSING_POLICY_BALANCED_SPACE_PREFERENCE_FRACTION_KEY = "dfs.datanode.available-space-volume-choosing-policy.balanced-space-preference-fraction"; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProfilingFileIoEvents.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProfilingFileIoEvents.java index 43ac495708b..61d0432c2c4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProfilingFileIoEvents.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProfilingFileIoEvents.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hdfs.server.datanode; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; @@ -26,6 +28,7 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.util.Time; import javax.annotation.Nullable; +import java.util.concurrent.ThreadLocalRandom; /** * Profiles the performance of the metadata and data related operations on @@ -33,16 +36,30 @@ import javax.annotation.Nullable; */ @InterfaceAudience.Private class ProfilingFileIoEvents { + static final Log LOG = LogFactory.getLog(ProfilingFileIoEvents.class); private final boolean isEnabled; + private final int sampleRangeMax; public ProfilingFileIoEvents(@Nullable Configuration conf) { if (conf != null) { isEnabled = conf.getBoolean(DFSConfigKeys .DFS_DATANODE_ENABLE_FILEIO_PROFILING_KEY, DFSConfigKeys .DFS_DATANODE_ENABLE_FILEIO_PROFILING_DEFAULT); + double fileIOSamplingFraction = conf.getDouble(DFSConfigKeys + .DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY, + DFSConfigKeys + .DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_DEAFULT); + if (fileIOSamplingFraction > 1) { + LOG.warn(DFSConfigKeys + .DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY + + " value cannot be more than 1. Setting value to 1"); + fileIOSamplingFraction = 1; + } + sampleRangeMax = (int) (fileIOSamplingFraction * Integer.MAX_VALUE); } else { isEnabled = false; + sampleRangeMax = 0; } } @@ -69,7 +86,7 @@ class ProfilingFileIoEvents { public long beforeFileIo(@Nullable FsVolumeSpi volume, FileIoProvider.OPERATION op, long len) { - if (isEnabled) { + if (isEnabled && ThreadLocalRandom.current().nextInt() < sampleRangeMax) { DataNodeVolumeMetrics metrics = getVolumeMetrics(volume); if (metrics != null) { return Time.monotonicNow(); @@ -80,7 +97,7 @@ class ProfilingFileIoEvents { public void afterFileIo(@Nullable FsVolumeSpi volume, FileIoProvider.OPERATION op, long begin, long len) { - if (isEnabled) { + if (isEnabled && begin != 0) { DataNodeVolumeMetrics metrics = getVolumeMetrics(volume); if (metrics != null) { long latency = Time.monotonicNow() - begin; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java index 71ad713d881..da10878f29c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java @@ -107,6 +107,8 @@ public class TestHdfsConfigFields extends TestConfigurationFieldsBase { .add(DFSConfigKeys.DFS_DATANODE_ENABLE_FILEIO_PROFILING_KEY); configurationPropsToSkipCompare.add(DFSConfigKeys .DFS_DATANODE_ENABLE_FILEIO_FAULT_INJECTION_KEY); + configurationPropsToSkipCompare.add(DFSConfigKeys + .DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY); // Allocate xmlPropsToSkipCompare = new HashSet();