hbase/conf/hadoop-metrics.properties

89 lines
3.7 KiB
Properties
Raw Normal View History

# See http://wiki.apache.org/hadoop/GangliaMetrics
# Make sure you know whether you are using ganglia 3.0 or 3.1.
# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
# And, yes, this file is named hadoop-metrics.properties rather than
# hbase-metrics.properties because we're leveraging the hadoop metrics
# package and hadoop-metrics.properties is an hardcoded-name, at least
# for the moment.
#
# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
# GMETADHOST_IP is the hostname (or) IP address of the server on which the ganglia
# meta daemon (gmetad) service is running
# Configuration of the "hbase" context for NullContextWithUpdateThread
# NullContextWithUpdateThread is a null context which has a thread calling
# periodically when monitoring is started. This keeps the data sampled
# correctly.
hbase.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
hbase.period=10
# Configuration of the "hbase" context for file
# hbase.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
# hbase.fileName=/tmp/metrics_hbase.log
# HBase-specific configuration to reset long-running stats (e.g. compactions)
# If this variable is left out, then the default is no expiration.
hbase.extendedperiod = 3600
# Configuration of the "hbase" context for ganglia
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
# hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
# hbase.period=10
# hbase.servers=GMETADHOST_IP:8649
# "hbase" context does basic metrics, "hbase-dynamic" context adds a bunch more and
# would be expanded dynamically, but beware, enabling it may overwhelm your monitoring.
# Configuration of the "hbase-dynamic" context for null
hbase-dynamic.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
hbase-dynamic.period=10
# Configuration of the "hbase-dynamic" context for file
# hbase-dynamic.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
# hbase-dynamic.fileName=/tmp/metrics_hbase-dynamic.log
# Configuration of the "hbase-dynamic" context for ganglia
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
# hbase-dynamic.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# hbase-dynamic.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
# hbase-dynamic.period=10
# hbase-dynamic.servers=GMETADHOST_IP:8649
# Configuration of the "jvm" context for null
jvm.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
jvm.period=10
# Configuration of the "jvm" context for file
# jvm.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
# jvm.fileName=/tmp/metrics_jvm.log
# Configuration of the "jvm" context for ganglia
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
# jvm.period=10
# jvm.servers=GMETADHOST_IP:8649
# Configuration of the "rpc" context for null
rpc.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
rpc.period=10
# Configuration of the "rpc" context for file
# rpc.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
# rpc.fileName=/tmp/metrics_rpc.log
# Configuration of the "rpc" context for ganglia
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
# rpc.period=10
# rpc.servers=GMETADHOST_IP:8649
# Configuration of the "rest" context for ganglia
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
# rest.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# rest.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
# rest.period=10
# rest.servers=GMETADHOST_IP:8649