HDFS-11477. Simplify file IO profiling configuration. Contributed by Hanisha Koneru.
This commit is contained in:
parent
a97833e0ed
commit
e61491d476
|
@ -332,7 +332,12 @@ Each metrics record contains tags such as SessionId and Hostname as additional i
|
||||||
FsVolume
|
FsVolume
|
||||||
--------
|
--------
|
||||||
|
|
||||||
Per-volume metrics contain Datanode Volume IO related statistics. Per-volume metrics are off by default. They can be enbabled by setting `dfs.datanode.enable.fileio.profiling` to **true**, but enabling per-volume metrics may have a performance impact. Each metrics record contains tags such as Hostname as additional information along with metrics.
|
Per-volume metrics contain Datanode Volume IO related statistics. Per-volume
|
||||||
|
metrics are off by default. They can be enabled by setting `dfs.datanode
|
||||||
|
.fileio.profiling.sampling.fraction` to a fraction between 0.0 and 1.0.
|
||||||
|
Setting this value to 0.0 would mean profiling is not enabled. But enabling
|
||||||
|
per-volume metrics may have a performance impact. Each metrics record
|
||||||
|
contains tags such as Hostname as additional information along with metrics.
|
||||||
|
|
||||||
| Name | Description |
|
| Name | Description |
|
||||||
|:---- |:---- |
|
|:---- |:---- |
|
||||||
|
|
|
@ -718,10 +718,6 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
|
||||||
public static final long DFS_EDIT_LOG_TRANSFER_RATE_DEFAULT = 0; //no throttling
|
public static final long DFS_EDIT_LOG_TRANSFER_RATE_DEFAULT = 0; //no throttling
|
||||||
|
|
||||||
// Datanode File IO Stats
|
// Datanode File IO Stats
|
||||||
public static final String DFS_DATANODE_ENABLE_FILEIO_PROFILING_KEY =
|
|
||||||
"dfs.datanode.enable.fileio.profiling";
|
|
||||||
public static final boolean DFS_DATANODE_ENABLE_FILEIO_PROFILING_DEFAULT =
|
|
||||||
false;
|
|
||||||
public static final String DFS_DATANODE_ENABLE_FILEIO_FAULT_INJECTION_KEY =
|
public static final String DFS_DATANODE_ENABLE_FILEIO_FAULT_INJECTION_KEY =
|
||||||
"dfs.datanode.enable.fileio.fault.injection";
|
"dfs.datanode.enable.fileio.fault.injection";
|
||||||
public static final boolean
|
public static final boolean
|
||||||
|
@ -730,7 +726,7 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
|
||||||
DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY =
|
DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY =
|
||||||
"dfs.datanode.fileio.profiling.sampling.fraction";
|
"dfs.datanode.fileio.profiling.sampling.fraction";
|
||||||
public static final double
|
public static final double
|
||||||
DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_DEAFULT = 1.0;
|
DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_DEFAULT = 0.0;
|
||||||
|
|
||||||
//Keys with no defaults
|
//Keys with no defaults
|
||||||
public static final String DFS_DATANODE_PLUGINS_KEY = "dfs.datanode.plugins";
|
public static final String DFS_DATANODE_PLUGINS_KEY = "dfs.datanode.plugins";
|
||||||
|
|
|
@ -62,7 +62,7 @@ import static org.apache.hadoop.hdfs.server.datanode.FileIoProvider.OPERATION.*;
|
||||||
*
|
*
|
||||||
* Behavior can be injected into these events by enabling the
|
* Behavior can be injected into these events by enabling the
|
||||||
* profiling and/or fault injection event hooks through
|
* profiling and/or fault injection event hooks through
|
||||||
* {@link DFSConfigKeys#DFS_DATANODE_ENABLE_FILEIO_PROFILING_KEY} and
|
* {@link DFSConfigKeys#DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY} and
|
||||||
* {@link DFSConfigKeys#DFS_DATANODE_ENABLE_FILEIO_FAULT_INJECTION_KEY}.
|
* {@link DFSConfigKeys#DFS_DATANODE_ENABLE_FILEIO_FAULT_INJECTION_KEY}.
|
||||||
* These event hooks are disabled by default.
|
* These event hooks are disabled by default.
|
||||||
*
|
*
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hdfs.server.datanode;
|
package org.apache.hadoop.hdfs.server.datanode;
|
||||||
|
|
||||||
|
import static java.lang.Math.abs;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -43,18 +44,32 @@ class ProfilingFileIoEvents {
|
||||||
|
|
||||||
public ProfilingFileIoEvents(@Nullable Configuration conf) {
|
public ProfilingFileIoEvents(@Nullable Configuration conf) {
|
||||||
if (conf != null) {
|
if (conf != null) {
|
||||||
isEnabled = conf.getBoolean(DFSConfigKeys
|
|
||||||
.DFS_DATANODE_ENABLE_FILEIO_PROFILING_KEY, DFSConfigKeys
|
|
||||||
.DFS_DATANODE_ENABLE_FILEIO_PROFILING_DEFAULT);
|
|
||||||
double fileIOSamplingFraction = conf.getDouble(DFSConfigKeys
|
double fileIOSamplingFraction = conf.getDouble(DFSConfigKeys
|
||||||
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY,
|
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY,
|
||||||
DFSConfigKeys
|
DFSConfigKeys
|
||||||
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_DEAFULT);
|
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_DEFAULT);
|
||||||
if (fileIOSamplingFraction > 1) {
|
if (abs(fileIOSamplingFraction) < 0.000001) {
|
||||||
|
LOG.info(DFSConfigKeys
|
||||||
|
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY + " set to "
|
||||||
|
+ fileIOSamplingFraction + ". Disabling file IO profiling");
|
||||||
|
isEnabled = false;
|
||||||
|
} else if (fileIOSamplingFraction < 0.000001) {
|
||||||
LOG.warn(DFSConfigKeys
|
LOG.warn(DFSConfigKeys
|
||||||
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY +
|
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY +
|
||||||
" value cannot be more than 1. Setting value to 1");
|
" value cannot be less than 0. Disabling file IO profiling.");
|
||||||
|
isEnabled = false;
|
||||||
|
} else if (fileIOSamplingFraction > 1) {
|
||||||
|
LOG.warn(DFSConfigKeys
|
||||||
|
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY +
|
||||||
|
" value cannot be more than 1. Setting value to 1 and enabling " +
|
||||||
|
"file IO profiling");
|
||||||
|
isEnabled = true;
|
||||||
fileIOSamplingFraction = 1;
|
fileIOSamplingFraction = 1;
|
||||||
|
} else {
|
||||||
|
LOG.info(DFSConfigKeys
|
||||||
|
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY + " set to "
|
||||||
|
+ fileIOSamplingFraction + ". Enabling file IO profiling");
|
||||||
|
isEnabled = true;
|
||||||
}
|
}
|
||||||
sampleRangeMax = (int) (fileIOSamplingFraction * Integer.MAX_VALUE);
|
sampleRangeMax = (int) (fileIOSamplingFraction * Integer.MAX_VALUE);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -121,8 +121,8 @@ public class TestDataNodeVolumeMetrics {
|
||||||
|
|
||||||
private MiniDFSCluster setupClusterForVolumeMetrics() throws IOException {
|
private MiniDFSCluster setupClusterForVolumeMetrics() throws IOException {
|
||||||
Configuration conf = new HdfsConfiguration();
|
Configuration conf = new HdfsConfiguration();
|
||||||
conf.setBoolean(DFSConfigKeys
|
conf.setDouble(DFSConfigKeys
|
||||||
.DFS_DATANODE_ENABLE_FILEIO_PROFILING_KEY, true);
|
.DFS_DATANODE_FILEIO_PROFILING_SAMPLING_FRACTION_KEY, 1.0);
|
||||||
SimulatedFSDataset.setFactory(conf);
|
SimulatedFSDataset.setFactory(conf);
|
||||||
return new MiniDFSCluster.Builder(conf)
|
return new MiniDFSCluster.Builder(conf)
|
||||||
.numDataNodes(NUM_DATANODES)
|
.numDataNodes(NUM_DATANODES)
|
||||||
|
|
|
@ -103,8 +103,6 @@ public class TestHdfsConfigFields extends TestConfigurationFieldsBase {
|
||||||
.add(DFSConfigKeys.DFS_DATANODE_STARTUP_KEY);
|
.add(DFSConfigKeys.DFS_DATANODE_STARTUP_KEY);
|
||||||
configurationPropsToSkipCompare
|
configurationPropsToSkipCompare
|
||||||
.add(DFSConfigKeys.DFS_NAMENODE_STARTUP_KEY);
|
.add(DFSConfigKeys.DFS_NAMENODE_STARTUP_KEY);
|
||||||
configurationPropsToSkipCompare
|
|
||||||
.add(DFSConfigKeys.DFS_DATANODE_ENABLE_FILEIO_PROFILING_KEY);
|
|
||||||
configurationPropsToSkipCompare.add(DFSConfigKeys
|
configurationPropsToSkipCompare.add(DFSConfigKeys
|
||||||
.DFS_DATANODE_ENABLE_FILEIO_FAULT_INJECTION_KEY);
|
.DFS_DATANODE_ENABLE_FILEIO_FAULT_INJECTION_KEY);
|
||||||
configurationPropsToSkipCompare.add(DFSConfigKeys
|
configurationPropsToSkipCompare.add(DFSConfigKeys
|
||||||
|
|
Loading…
Reference in New Issue