HADOOP-8316. Audit logging should be disabled by default. Contributed by Eli Collins
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1337334 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
2116f28d9e
commit
e2af2f1b87
|
@ -513,6 +513,8 @@ Release 2.0.0 - UNRELEASED
|
|||
HADOOP-7868. Hadoop native fails to compile when default linker
|
||||
option is -Wl,--as-needed. (Trevor Robinson via eli)
|
||||
|
||||
HADOOP-8316. Audit logging should be disabled by default. (eli)
|
||||
|
||||
Release 0.23.3 - UNRELEASED
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -102,7 +102,7 @@ log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
|||
#
|
||||
#Security appender
|
||||
#
|
||||
hadoop.security.logger=INFO,console
|
||||
hadoop.security.logger=INFO,NullAppender
|
||||
hadoop.security.log.maxfilesize=256MB
|
||||
hadoop.security.log.maxbackupindex=20
|
||||
log4j.category.SecurityLogger=${hadoop.security.logger}
|
||||
|
@ -126,7 +126,7 @@ log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
|
|||
#
|
||||
# hdfs audit logging
|
||||
#
|
||||
hdfs.audit.logger=INFO,console
|
||||
hdfs.audit.logger=INFO,NullAppender
|
||||
hdfs.audit.log.maxfilesize=256MB
|
||||
hdfs.audit.log.maxbackupindex=20
|
||||
log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}
|
||||
|
@ -141,7 +141,7 @@ log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex}
|
|||
#
|
||||
# mapred audit logging
|
||||
#
|
||||
mapred.audit.logger=INFO,console
|
||||
mapred.audit.logger=INFO,NullAppender
|
||||
mapred.audit.log.maxfilesize=256MB
|
||||
mapred.audit.log.maxbackupindex=20
|
||||
log4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}
|
||||
|
|
|
@ -48,10 +48,10 @@ done
|
|||
export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
|
||||
|
||||
# Command specific options appended to HADOOP_OPTS when specified
|
||||
export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,RFAAUDIT $HADOOP_NAMENODE_OPTS"
|
||||
export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,NullAppender $HADOOP_NAMENODE_OPTS"
|
||||
export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
|
||||
|
||||
export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,RFAAUDIT $HADOOP_SECONDARYNAMENODE_OPTS"
|
||||
export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=INFO,NullAppender $HADOOP_SECONDARYNAMENODE_OPTS"
|
||||
|
||||
# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
|
||||
export HADOOP_CLIENT_OPTS="-Xmx128m $HADOOP_CLIENT_OPTS"
|
||||
|
|
|
@ -102,7 +102,7 @@ log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
|||
#
|
||||
#Security appender
|
||||
#
|
||||
hadoop.security.logger=INFO,console
|
||||
hadoop.security.logger=INFO,NullAppender
|
||||
hadoop.security.log.maxfilesize=256MB
|
||||
hadoop.security.log.maxbackupindex=20
|
||||
log4j.category.SecurityLogger=${hadoop.security.logger}
|
||||
|
@ -126,7 +126,7 @@ log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
|
|||
#
|
||||
# hdfs audit logging
|
||||
#
|
||||
hdfs.audit.logger=INFO,console
|
||||
hdfs.audit.logger=INFO,NullAppender
|
||||
hdfs.audit.log.maxfilesize=256MB
|
||||
hdfs.audit.log.maxbackupindex=20
|
||||
log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}
|
||||
|
@ -141,7 +141,7 @@ log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex}
|
|||
#
|
||||
# mapred audit logging
|
||||
#
|
||||
mapred.audit.logger=INFO,console
|
||||
mapred.audit.logger=INFO,NullAppender
|
||||
mapred.audit.log.maxfilesize=256MB
|
||||
mapred.audit.log.maxbackupindex=20
|
||||
log4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}
|
||||
|
|
Loading…
Reference in New Issue