HBASE-19577 Use log4j2 instead of log4j for logging (#1708)

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
Duo Zhang 2021-03-20 09:21:25 +08:00 committed by GitHub
parent a3938c8725
commit ba3610d097
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
69 changed files with 3090 additions and 2602 deletions

View File

@ -301,10 +301,13 @@ else
# make it easier to check for shaded/not later on.
shaded_jar=""
fi
# here we will add slf4j-api, commons-logging, jul-to-slf4j, jcl-over-slf4j
# to classpath, as they are all logging bridges. Only exclude log4j* so we
# will not actually log anything out. Add it later if necessary
for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
[ "${f}" != "htrace-core.jar$" ] && \
[[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
[[ "${f}" != "htrace-core.jar$" ]] && \
[[ ! "${f}" =~ ^.*/log4j.*$ ]]; then
CLASSPATH="${CLASSPATH}:${f}"
fi
done
@ -658,7 +661,7 @@ elif [ "$COMMAND" = "mapredcp" ] ; then
for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
[ "${f}" != "htrace-core.jar$" ] && \
[[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
[[ ! "${f}" =~ ^.*/log4j.*$ ]]; then
echo -n ":${f}"
fi
done
@ -775,7 +778,11 @@ HBASE_OPTS="$HBASE_OPTS -Dhbase.log.dir=$HBASE_LOG_DIR"
HBASE_OPTS="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE"
HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
HBASE_OPTS="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING"
HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}"
# log4j2 does not support setting log level and appender at once, so we need to split HBASE_ROOT_LOGGER
HBASE_ROOT_LOGGER=${HBASE_ROOT_LOGGER:-INFO,console}
array=(${HBASE_ROOT_LOGGER//,/ })
HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger.level=${array[0]}"
HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger.appender=${array[1]}"
if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH"
@ -783,17 +790,19 @@ fi
# Enable security logging on the master and regionserver only
if [ "$COMMAND" = "master" ] || [ "$COMMAND" = "regionserver" ]; then
HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,RFAS}"
HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-INFO,RFAS}
else
HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}"
HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}
fi
array=(${HBASE_SECURITY_LOGGER//,/ })
HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.level=${array[0]}"
HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.appender=${array[1]}"
HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX"
# by now if we're running a command it means we need logging
for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do
for f in ${HBASE_HOME}/lib/client-facing-thirdparty/log4j*.jar; do
if [ -f "${f}" ]; then
CLASSPATH="${CLASSPATH}:${f}"
break
fi
done

View File

@ -155,12 +155,20 @@ JAVA=$JAVA_HOME/bin/java
export HBASE_LOG_PREFIX=hbase-$HBASE_IDENT_STRING-$command-$HOSTNAME
export HBASE_LOGFILE=$HBASE_LOG_PREFIX.log
if [ -z "${HBASE_ROOT_LOGGER}" ]; then
export HBASE_ROOT_LOGGER=${HBASE_ROOT_LOGGER:-"INFO,RFA"}
if [ -z "${HBASE_ROOT_LOGGER_LEVEL}" ]; then
export HBASE_ROOT_LOGGER_LEVEL=${HBASE_ROOT_LOGGER_LEVEL:-"INFO"}
fi
if [ -z "${HBASE_SECURITY_LOGGER}" ]; then
export HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-"INFO,RFAS"}
if [ -z "${HBASE_ROOT_LOGGER_APPENDER}" ]; then
export HBASE_ROOT_LOGGER_APPENDER=${HBASE_ROOT_LOGGER_APPENDER:-"RFA"}
fi
if [ -z "${HBASE_SECURITY_LOGGER_LEVEL}" ]; then
export HBASE_SECURITY_LOGGER_LEVEL=${HBASE_SECURITY_LOGGER_LEVEL:-"INFO"}
fi
if [ -z "${HBASE_SECURITY_LOGGER_APPENDER}" ]; then
export HBASE_SECURITY_LOGGER_APPENDER=${HBASE_SECURITY_LOGGER_APPENDER:-"RFAS"}
fi
HBASE_LOGOUT=${HBASE_LOGOUT:-"$HBASE_LOG_DIR/$HBASE_LOG_PREFIX.out"}

View File

@ -329,7 +329,13 @@ set HBASE_OPTS=%HBASE_OPTS% -XX:OnOutOfMemoryError="taskkill /F /PID %p"
if not defined HBASE_ROOT_LOGGER (
set HBASE_ROOT_LOGGER=INFO,console
)
set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger="%HBASE_ROOT_LOGGER%"
for /F "tokens=1,2 delims=," %%a in ("%HBASE_ROOT_LOGGER%") do (
set HBASE_ROOT_LOGGER_LEVEL=%%a
set HBASE_ROOT_LOGGER_APPENDER=%%b
)
set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger.level="%HBASE_ROOT_LOGGER_LEVEL% -Dhbase.root.logger.appender="%HBASE_ROOT_LOGGER_APPENDER% "
if defined JAVA_LIBRARY_PATH (
set HBASE_OPTS=%HBASE_OPTS% -Djava.library.path="%JAVA_LIBRARY_PATH%"
@ -345,7 +351,13 @@ if not defined HBASE_SECURITY_LOGGER (
set HBASE_SECURITY_LOGGER=INFO,DRFAS
)
)
set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger="%HBASE_SECURITY_LOGGER%"
for /F "tokens=1,2 delims=," %%a in ("%HBASE_SECURITY_LOGGER%") do (
set HBASE_SECURITY_LOGGER_LEVEL=%%a
set HBASE_SECURITY_LOGGER_APPENDER=%%b
)
set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger.level="%HBASE_SECURITY_LOGGER_LEVEL% -Dhbase.security.logger.appender="%HBASE_SECURITY_LOGGER_APPENDER%"
set HEAP_SETTINGS=%JAVA_HEAP_MAX% %JAVA_OFFHEAP_MAX%
set java_arguments=%HEAP_SETTINGS% %HBASE_OPTS% -classpath "%CLASSPATH%" %CLASS% %hbase-command-arguments%

View File

@ -1,27 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=WARN,console
log4j.threshold=WARN
# console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# ZooKeeper will still put stuff at WARN
log4j.logger.org.apache.zookeeper=ERROR

View File

@ -1,139 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.security.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
hbase.log.level=INFO
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
# Rolling File Appender properties
hbase.log.maxfilesize=256MB
hbase.log.maxbackupindex=20
# Rolling File Appender
log4j.appender.RFA=org.apache.log4j.RollingFileAppender
log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
#
# Security audit appender
#
hbase.security.log.file=SecurityAuth.audit
hbase.security.log.maxfilesize=256MB
hbase.security.log.maxbackupindex=20
log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %.1000m%n
log4j.category.SecurityLogger=${hbase.security.logger}
log4j.additivity.SecurityLogger=false
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
#
# Null Appender
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender
log4j.appender.asyncconsole.target=System.err
# Custom Logging levels
log4j.logger.org.apache.zookeeper=${hbase.log.level}
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.hbase=${hbase.log.level}
log4j.logger.org.apache.hadoop.hbase.META=${hbase.log.level}
# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=${hbase.log.level}
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=${hbase.log.level}
#log4j.logger.org.apache.hadoop.dfs=DEBUG
# Set this class to log INFO only otherwise its OTT
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE
# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
# Uncomment the below if you want to remove logging of client region caching'
# and scan of hbase:meta messages
# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=INFO
# EventCounter
# Add "EventCounter" to rootlogger if you want to use this
# Uncomment the line below to add EventCounter information
# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
# Prevent metrics subsystem start/stop messages (HBASE-17722)
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
# Disable request log by default, you can enable this by changing the appender
log4j.category.http.requests=INFO,NullAppender
log4j.additivity.http.requests=false
# Replace the above with this configuration if you want an http access.log
#log4j.appender.accessRFA=org.apache.log4j.RollingFileAppender
#log4j.appender.accessRFA.File=/var/log/hbase/access.log
#log4j.appender.accessRFA.layout=org.apache.log4j.PatternLayout
#log4j.appender.accessRFA.layout.ConversionPattern=%m%n
#log4j.appender.accessRFA.MaxFileSize=200MB
#log4j.appender.accessRFA.MaxBackupIndex=10
# route http.requests to the accessRFA appender
#log4j.logger.http.requests=INFO,accessRFA
# disable http.requests.* entries going up to the root logger
#log4j.additivity.http.requests=false

View File

@ -1,3 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -15,32 +17,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Logger class that buffers before trying to log to the specified console.
*/
@InterfaceAudience.Private
public class AsyncConsoleAppender extends org.apache.log4j.AsyncAppender {
private final org.apache.log4j.ConsoleAppender consoleAppender;
public AsyncConsoleAppender() {
super();
consoleAppender = new org.apache.log4j.ConsoleAppender(
new org.apache.log4j.PatternLayout("%d{ISO8601} %-5p [%t] %c{2}: %m%n"));
this.addAppender(consoleAppender);
}
public void setTarget(String value) {
consoleAppender.setTarget(value);
}
@Override
public void activateOptions() {
consoleAppender.activateOptions();
super.activateOptions();
}
}
-->
<Configuration>
<Appenders>
<!-- Console appender -->
<Console name="console" target="SYSTEM_ERR">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
</Console>
</Appenders>
<Loggers>
<Root level="warn">
<AppenderRef ref="console" />
</Root>
<!-- ZooKeeper will still put stuff at WARN -->
<Logger name="org.apache.zookeeper" level="error" />
</Loggers>
</Configuration>

103
conf/log4j2.xml Normal file
View File

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<Configuration>
<Appenders>
<!-- Console appender -->
<Console name="console" target="SYSTEM_ERR">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
</Console>
<!-- Daily Rolling File Appender -->
<RollingFile name="DRFA"
fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd}">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
<Policies>
<TimeBasedTriggeringPolicy interval="1" />
</Policies>
<DefaultRolloverStrategy max="30" />
</RollingFile>
<!-- Rolling File Appender -->
<RollingFile name="RFA"
fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
<Policies>
<SizeBasedTriggeringPolicy size="256MB" />
</Policies>
<DefaultRolloverStrategy max="20" />
</RollingFile>
<!-- Security audit appender -->
<RollingFile name="RFAS"
fileName="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}"
filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
<Policies>
<SizeBasedTriggeringPolicy size="256MB" />
</Policies>
<DefaultRolloverStrategy max="20" />
</RollingFile>
<!-- Http Access Log RFA -->
<RollingFile name="AccessRFA"
fileName="/var/log/hbase/access.log"
filePattern="/var/log/hbase/access.log.%i">
<PatternLayout pattern="%m%n" />
<Policies>
<SizeBasedTriggeringPolicy size="200MB" />
</Policies>
<DefaultRolloverStrategy max="10" />
</RollingFile>
<Null name="NullAppender" />
</Appenders>
<Loggers>
<Root level="${sys:hbase.root.logger.level:-info}">
<AppenderRef ref="${sys:hbase.root.logger.appender:-console}" />
</Root>
<Logger name="SecurityLogger" level="${sys:hbase.security.logger.level:-info}"
additivity="false">
<AppenderRef ref="${sys:hbase.security.logger.appender:-console}" />
</Logger>
<!-- Custom Logging levels -->
<!--
<Logger name="org.apache.zookeeper" level="debug"/>
<Logger name="org.apache.hadoop.fs.FSNamesystem" level="debug"/>
<Logger name="org.apache.hadoop.hbase" level="debug"/>
<Logger name="org.apache.hadoop.hbase.META" level="debug"/>
Make these two classes below DEBUG to see more zk debug.
<Logger name="org.apache.hadoop.hbase.zookeeper.ZKUtil" level="debug"/>
<Logger name="org.apache.hadoop.hbase.zookeeper.ZKWatcher" level="debug"/>
<Logger name="org.apache.hadoop.dfs" level="debug"/>
-->
<!-- Prevent metrics subsystem start/stop messages (HBASE-17722) -->
<Logger name="org.apache.hadoop.metrics2.impl.MetricsConfig" level="warn" />
<Logger name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter" level="warn" />
<Logger name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl" level="warn" />
<!-- Disable request log by default, you can enable this by changing the appender -->
<Logger name="http.requests" level="info" additivity="false">
<AppenderRef ref="NullAppender" />
</Logger>
<!--
Replace the above with this configuration if you want an http access.log
<Logger name="http.requests" level="info" additivity="false">
<AppenderRef ref="AccessRFA" />
</Logger>
-->
</Loggers>
</Configuration>

View File

@ -54,13 +54,23 @@
<artifactId>hbase-client</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>

View File

@ -1,121 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.security.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# Rolling File Appender properties
hbase.log.maxfilesize=256MB
hbase.log.maxbackupindex=20
# Rolling File Appender
log4j.appender.RFA=org.apache.log4j.RollingFileAppender
log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
#
# Security audit appender
#
hbase.security.log.file=SecurityAuth.audit
hbase.security.log.maxfilesize=256MB
hbase.security.log.maxbackupindex=20
log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.category.SecurityLogger=${hbase.security.logger}
log4j.additivity.SecurityLogger=false
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
#
# Null Appender
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# Custom Logging levels
log4j.logger.org.apache.zookeeper=INFO
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.hbase=INFO
# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO
#log4j.logger.org.apache.hadoop.dfs=DEBUG
# Set this class to log INFO only otherwise its OTT
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
# Uncomment the below if you want to remove logging of client region caching'
# and scan of hbase:meta messages
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
# EventCounter
# Add "EventCounter" to rootlogger if you want to use this
# Uncomment the line below to add EventCounter information
# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
# Prevent metrics subsystem start/stop messages (HBASE-17722)
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN

View File

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<Configuration>
<Appenders>
<!-- Console appender -->
<Console name="console" target="SYSTEM_ERR">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
</Console>
<!-- Daily Rolling File Appender -->
<RollingFile name="DRFA"
fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd}">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
<Policies>
<TimeBasedTriggeringPolicy interval="1" />
</Policies>
<DefaultRolloverStrategy max="30" />
</RollingFile>
<!-- Rolling File Appender -->
<RollingFile name="RFA"
fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
<Policies>
<SizeBasedTriggeringPolicy size="256MB" />
</Policies>
<DefaultRolloverStrategy max="20" />
</RollingFile>
<!-- Security audit appender -->
<RollingFile name="RFAS"
fileName="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}"
filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
<Policies>
<SizeBasedTriggeringPolicy size="256MB" />
</Policies>
<DefaultRolloverStrategy max="20" />
</RollingFile>
<!-- Http Access Log RFA -->
<RollingFile name="AccessRFA"
fileName="/var/log/hbase/access.log"
filePattern="/var/log/hbase/access.log.%i">
<PatternLayout pattern="%m%n" />
<Policies>
<SizeBasedTriggeringPolicy size="200MB" />
</Policies>
<DefaultRolloverStrategy max="10" />
</RollingFile>
<Null name="NullAppender" />
</Appenders>
<Loggers>
<Root level="${sys:hbase.root.logger.level:-info}">
<AppenderRef ref="${sys:hbase.root.logger.appender:-console}" />
</Root>
<Logger name="SecurityLogger" level="${sys:hbase.security.logger.level:-info}"
additivity="false">
<AppenderRef ref="${sys:hbase.security.logger.appender:-console}" />
</Logger>
<!-- Custom Logging levels -->
<!--
<Logger name="org.apache.zookeeper" level="debug"/>
<Logger name="org.apache.hadoop.fs.FSNamesystem" level="debug"/>
<Logger name="org.apache.hadoop.hbase" level="debug"/>
<Logger name="org.apache.hadoop.hbase.META" level="debug"/>
Make these two classes below DEBUG to see more zk debug.
<Logger name="org.apache.hadoop.hbase.zookeeper.ZKUtil" level="debug"/>
<Logger name="org.apache.hadoop.hbase.zookeeper.ZKWatcher" level="debug"/>
<Logger name="org.apache.hadoop.dfs" level="debug"/>
-->
<!-- Prevent metrics subsystem start/stop messages (HBASE-17722) -->
<Logger name="org.apache.hadoop.metrics2.impl.MetricsConfig" level="warn" />
<Logger name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter" level="warn" />
<Logger name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl" level="warn" />
<!-- Disable request log by default, you can enable this by changing the appender -->
<Logger name="http.requests" level="info" additivity="false">
<AppenderRef ref="NullAppender" />
</Logger>
<!--
Replace the above with this configuration if you want an http access.log
<Logger name="http.requests" level="info" additivity="false">
<AppenderRef ref="AccessRFA" />
</Logger>
-->
</Loggers>
</Configuration>

View File

@ -60,13 +60,23 @@
<artifactId>hbase-shaded-client</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>

View File

@ -1,121 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.security.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# Rolling File Appender properties
hbase.log.maxfilesize=256MB
hbase.log.maxbackupindex=20
# Rolling File Appender
log4j.appender.RFA=org.apache.log4j.RollingFileAppender
log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
#
# Security audit appender
#
hbase.security.log.file=SecurityAuth.audit
hbase.security.log.maxfilesize=256MB
hbase.security.log.maxbackupindex=20
log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.category.SecurityLogger=${hbase.security.logger}
log4j.additivity.SecurityLogger=false
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
#
# Null Appender
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# Custom Logging levels
log4j.logger.org.apache.zookeeper=INFO
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.hbase=INFO
# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO
#log4j.logger.org.apache.hadoop.dfs=DEBUG
# Set this class to log INFO only otherwise its OTT
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
# Uncomment the below if you want to remove logging of client region caching'
# and scan of hbase:meta messages
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
# EventCounter
# Add "EventCounter" to rootlogger if you want to use this
# Uncomment the line below to add EventCounter information
# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
# Prevent metrics subsystem start/stop messages (HBASE-17722)
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN

View File

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<Configuration>
<Appenders>
<!-- Console appender -->
<Console name="console" target="SYSTEM_ERR">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
</Console>
<!-- Daily Rolling File Appender -->
<RollingFile name="DRFA"
fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd}">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
<Policies>
<TimeBasedTriggeringPolicy interval="1" />
</Policies>
<DefaultRolloverStrategy max="30" />
</RollingFile>
<!-- Rolling File Appender -->
<RollingFile name="RFA"
fileName="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}"
filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
<Policies>
<SizeBasedTriggeringPolicy size="256MB" />
</Policies>
<DefaultRolloverStrategy max="20" />
</RollingFile>
<!-- Security audit appender -->
<RollingFile name="RFAS"
fileName="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}"
filePattern="${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n" />
<Policies>
<SizeBasedTriggeringPolicy size="256MB" />
</Policies>
<DefaultRolloverStrategy max="20" />
</RollingFile>
<!-- Http Access Log RFA -->
<RollingFile name="AccessRFA"
fileName="/var/log/hbase/access.log"
filePattern="/var/log/hbase/access.log.%i">
<PatternLayout pattern="%m%n" />
<Policies>
<SizeBasedTriggeringPolicy size="200MB" />
</Policies>
<DefaultRolloverStrategy max="10" />
</RollingFile>
<Null name="NullAppender" />
</Appenders>
<Loggers>
<Root level="${sys:hbase.root.logger.level:-info}">
<AppenderRef ref="${sys:hbase.root.logger.appender:-console}" />
</Root>
<Logger name="SecurityLogger" level="${sys:hbase.security.logger.level:-info}"
additivity="false">
<AppenderRef ref="${sys:hbase.security.logger.appender:-console}" />
</Logger>
<!-- Custom Logging levels -->
<!--
<Logger name="org.apache.zookeeper" level="debug"/>
<Logger name="org.apache.hadoop.fs.FSNamesystem" level="debug"/>
<Logger name="org.apache.hadoop.hbase" level="debug"/>
<Logger name="org.apache.hadoop.hbase.META" level="debug"/>
Make these two classes below DEBUG to see more zk debug.
<Logger name="org.apache.hadoop.hbase.zookeeper.ZKUtil" level="debug"/>
<Logger name="org.apache.hadoop.hbase.zookeeper.ZKWatcher" level="debug"/>
<Logger name="org.apache.hadoop.dfs" level="debug"/>
-->
<!-- Prevent metrics subsystem start/stop messages (HBASE-17722) -->
<Logger name="org.apache.hadoop.metrics2.impl.MetricsConfig" level="warn" />
<Logger name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter" level="warn" />
<Logger name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl" level="warn" />
<!-- Disable request log by default, you can enable this by changing the appender -->
<Logger name="http.requests" level="info" additivity="false">
<AppenderRef ref="NullAppender" />
</Logger>
<!--
Replace the above with this configuration if you want an http access.log
<Logger name="http.requests" level="info" additivity="false">
<AppenderRef ref="AccessRFA" />
</Logger>
-->
</Loggers>
</Configuration>

View File

@ -332,12 +332,20 @@
<artifactId>jul-to-slf4j</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -61,10 +61,8 @@
<exclude>org.apache.htrace:htrace-core4</exclude>
<exclude>org.apache.htrace:htrace-core</exclude>
<exclude>org.apache.yetus:audience-annotations</exclude>
<exclude>org.slf4j:slf4j-api</exclude>
<exclude>org.slf4j:jcl-over-slf4j</exclude>
<exclude>org.slf4j:jul-to-slf4j</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude>
<exclude>org.slf4j:*</exclude>
<exclude>org.apache.logging.log4j:*</exclude>
</excludes>
</dependencySet>
</dependencySets>
@ -149,10 +147,8 @@
<include>org.apache.htrace:htrace-core4</include>
<include>org.apache.htrace:htrace-core</include>
<include>org.apache.yetus:audience-annotations</include>
<include>org.slf4j:slf4j-api</include>
<include>org.slf4j:jcl-over-slf4j</include>
<include>org.slf4j:jul-to-slf4j</include>
<include>org.slf4j:slf4j-log4j12</include>
<include>org.slf4j:*</include>
<include>org.apache.logging.log4j:*</include>
</includes>
</dependencySet>
</dependencySets>

View File

@ -50,11 +50,9 @@
<include>org.apache.hbase:hbase-metrics</include>
<include>org.apache.hbase:hbase-metrics-api</include>
<include>org.apache.hbase:hbase-procedure</include>
<include>org.apache.hbase:hbase-protocol</include>
<include>org.apache.hbase:hbase-protocol-shaded</include>
<include>org.apache.hbase:hbase-replication</include>
<include>org.apache.hbase:hbase-rest</include>
<include>org.apache.hbase:hbase-rsgroup</include>
<include>org.apache.hbase:hbase-server</include>
<include>org.apache.hbase:hbase-shell</include>
<include>org.apache.hbase:hbase-testing-util</include>
@ -111,8 +109,8 @@
<exclude>org.apache.htrace:htrace-core4</exclude>
<exclude>org.apache.htrace:htrace-core</exclude>
<exclude>org.apache.yetus:audience-annotations</exclude>
<exclude>org.slf4j:slf4j-api</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude>
<exclude>org.slf4j:*</exclude>
<exclude>org.apache.logging.log4j:*</exclude>
</excludes>
</dependencySet>
</dependencySets>
@ -209,10 +207,8 @@
<include>org.apache.htrace:htrace-core4</include>
<include>org.apache.htrace:htrace-core</include>
<include>org.apache.yetus:audience-annotations</include>
<include>org.slf4j:slf4j-api</include>
<include>org.slf4j:jcl-over-slf4j</include>
<include>org.slf4j:jul-to-slf4j</include>
<include>org.slf4j:slf4j-log4j12</include>
<include>org.slf4j:*</include>
<include>org.apache.logging.log4j:*</include>
</includes>
</dependencySet>
<dependencySet>

View File

@ -149,13 +149,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -98,11 +98,6 @@ public abstract class AsyncFSTestBase {
createDirsAndSetProperties();
Configuration conf = UTIL.getConfiguration();
// Error level to skip some warnings specific to the minicluster. See HBASE-4709
org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.util.MBeans.class)
.setLevel(org.apache.log4j.Level.ERROR);
org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class)
.setLevel(org.apache.log4j.Level.ERROR);
TraceUtil.initTracer(conf);
CLUSTER = new MiniDFSCluster.Builder(conf).numDataNodes(servers).build();

View File

@ -148,13 +148,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>

View File

@ -108,13 +108,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -155,13 +155,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>

View File

@ -17,32 +17,28 @@
*/
package org.apache.hadoop.hbase.ipc;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.log4j.Appender;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@RunWith(MockitoJUnitRunner.class)
@Category({ ClientTests.class, SmallTests.class })
public class TestFailedServersLog {
@ -51,39 +47,52 @@ public class TestFailedServersLog {
HBaseClassTestRule.forClass(TestFailedServersLog.class);
static final int TEST_PORT = 9999;
private Address addr;
@Mock
private Appender mockAppender;
@Captor
private ArgumentCaptor captorLoggingEvent;
private org.apache.logging.log4j.core.Appender mockAppender;
@Before
public void setup() {
LogManager.getRootLogger().addAppender(mockAppender);
mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
when(mockAppender.getName()).thenReturn("mockAppender");
when(mockAppender.isStarted()).thenReturn(true);
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(FailedServers.class)).addAppender(mockAppender);
}
@After
public void teardown() {
LogManager.getRootLogger().removeAppender(mockAppender);
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(FailedServers.class)).removeAppender(mockAppender);
}
@Test
public void testAddToFailedServersLogging() {
Throwable nullException = new NullPointerException();
AtomicReference<org.apache.logging.log4j.Level> level = new AtomicReference<>();
AtomicReference<String> msg = new AtomicReference<String>();
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
org.apache.logging.log4j.core.LogEvent logEvent =
invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
level.set(logEvent.getLevel());
msg.set(logEvent.getMessage().getFormattedMessage());
return null;
}
}).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class));
Throwable nullException = new NullPointerException();
FailedServers fs = new FailedServers(new Configuration());
addr = Address.fromParts("localhost", TEST_PORT);
fs.addToFailedServers(addr, nullException);
Mockito.verify(mockAppender).doAppend((LoggingEvent) captorLoggingEvent.capture());
LoggingEvent loggingEvent = (LoggingEvent) captorLoggingEvent.getValue();
assertThat(loggingEvent.getLevel(), is(Level.DEBUG));
assertEquals("Added failed server with address " + addr.toString() + " to list caused by "
+ nullException.toString(),
loggingEvent.getRenderedMessage());
verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class));
assertEquals(org.apache.logging.log4j.Level.DEBUG, level.get());
assertEquals("Added failed server with address " + addr.toString() + " to list caused by " +
nullException.toString(), msg.get());
}
}

View File

@ -30,7 +30,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
@ -39,7 +38,6 @@ import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.sasl.RealmCallback;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
@ -55,16 +53,15 @@ import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Strings;
@ -83,17 +80,12 @@ public class TestHBaseSaslRpcClient {
static final String DEFAULT_USER_NAME = "principal";
static final String DEFAULT_USER_PASSWORD = "password";
private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class);
private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class);
@Rule
public ExpectedException exception = ExpectedException.none();
@BeforeClass
public static void before() {
Logger.getRootLogger().setLevel(Level.DEBUG);
}
@Test
public void testSaslClientUsesGivenRpcProtection() throws Exception {
Token<? extends TokenIdentifier> token = createTokenMockWithCredentials(DEFAULT_USER_NAME,

View File

@ -232,13 +232,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>

View File

@ -24,9 +24,6 @@ import java.io.IOException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -44,23 +41,29 @@ public class TestLog4jUtils {
@Test
public void test() {
Logger zk = LogManager.getLogger("org.apache.zookeeper");
Level zkLevel = zk.getEffectiveLevel();
Logger hbaseZk = LogManager.getLogger("org.apache.hadoop.hbase.zookeeper");
Level hbaseZkLevel = hbaseZk.getEffectiveLevel();
Logger client = LogManager.getLogger("org.apache.hadoop.hbase.client");
Level clientLevel = client.getEffectiveLevel();
org.apache.logging.log4j.Logger zk =
org.apache.logging.log4j.LogManager.getLogger("org.apache.zookeeper");
org.apache.logging.log4j.Level zkLevel = zk.getLevel();
org.apache.logging.log4j.Logger hbaseZk =
org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.zookeeper");
org.apache.logging.log4j.Level hbaseZkLevel = hbaseZk.getLevel();
org.apache.logging.log4j.Logger client =
org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.client");
org.apache.logging.log4j.Level clientLevel = client.getLevel();
Log4jUtils.disableZkAndClientLoggers();
assertEquals(Level.OFF, zk.getLevel());
assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(zk.getName()));
assertEquals(Level.OFF, hbaseZk.getLevel());
assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(hbaseZk.getName()));
assertEquals(Level.OFF, client.getLevel());
assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(client.getName()));
assertEquals(org.apache.logging.log4j.Level.OFF, zk.getLevel());
assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
Log4jUtils.getEffectiveLevel(zk.getName()));
assertEquals(org.apache.logging.log4j.Level.OFF, hbaseZk.getLevel());
assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
Log4jUtils.getEffectiveLevel(hbaseZk.getName()));
assertEquals(org.apache.logging.log4j.Level.OFF, client.getLevel());
assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
Log4jUtils.getEffectiveLevel(client.getName()));
// restore the level
zk.setLevel(zkLevel);
hbaseZk.setLevel(hbaseZkLevel);
client.setLevel(clientLevel);
org.apache.logging.log4j.core.config.Configurator.setLevel(zk.getName(), zkLevel);
org.apache.logging.log4j.core.config.Configurator.setLevel(hbaseZk.getName(), hbaseZkLevel);
org.apache.logging.log4j.core.config.Configurator.setLevel(client.getName(), clientLevel);
}
@Test

View File

@ -194,13 +194,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -272,13 +272,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -1,5 +1,7 @@
<?xml version="1.0"?>
<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="https://maven.apache.org/POM/4.0.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
@ -143,13 +145,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -92,13 +92,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -241,13 +241,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -44,7 +44,6 @@ import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.Tool;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -302,8 +301,7 @@ public final class LogLevel {
/**
* A servlet implementation
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Unstable
@InterfaceAudience.Private
public static class Servlet extends HttpServlet {
private static final long serialVersionUID = 1L;

View File

@ -31,8 +31,7 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Utility functions for reading the log4j logs that are
* being written by HBase.
* Utility functions for reading the log4j logs that are being written by HBase.
*/
@InterfaceAudience.Private
public abstract class LogMonitoring {
@ -54,13 +53,12 @@ public abstract class LogMonitoring {
}
}
private static void dumpTailOfLog(File f, PrintWriter out, long tailKb)
throws IOException {
private static void dumpTailOfLog(File f, PrintWriter out, long tailKb) throws IOException {
FileInputStream fis = new FileInputStream(f);
BufferedReader r = null;
try {
FileChannel channel = fis.getChannel();
channel.position(Math.max(0, channel.size() - tailKb*1024));
channel.position(Math.max(0, channel.size() - tailKb * 1024));
r = new BufferedReader(new InputStreamReader(fis));
r.readLine(); // skip the first partial line
String line;

View File

@ -22,6 +22,7 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.net.BindException;
import java.net.SocketException;
@ -51,9 +52,6 @@ import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@ -63,7 +61,7 @@ import org.junit.experimental.categories.Category;
/**
* Test LogLevel.
*/
@Category({MiscTests.class, SmallTests.class})
@Category({ MiscTests.class, SmallTests.class })
public class TestLogLevel {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
@ -75,7 +73,8 @@ public class TestLogLevel {
private static Configuration clientConf;
private static Configuration sslConf;
private static final String logName = TestLogLevel.class.getName();
private static final Logger log = LogManager.getLogger(logName);
private static final org.apache.logging.log4j.Logger log =
org.apache.logging.log4j.LogManager.getLogger(logName);
private final static String PRINCIPAL = "loglevel.principal";
private final static String KEYTAB = "loglevel.keytab";
@ -106,8 +105,7 @@ public class TestLogLevel {
}
/**
* Sets up {@link MiniKdc} for testing security.
* Copied from HBaseTestingUtility#setupMiniKdc().
* Sets up {@link MiniKdc} for testing security. Copied from HBaseTestingUtility#setupMiniKdc().
*/
static private MiniKdc setupMiniKdc() throws Exception {
Properties conf = MiniKdc.createConf();
@ -151,12 +149,12 @@ public class TestLogLevel {
}
/**
* Get the SSL configuration.
* This method is copied from KeyStoreTestUtil#getSslConfig() in Hadoop.
* Get the SSL configuration. This method is copied from KeyStoreTestUtil#getSslConfig() in
* Hadoop.
* @return {@link Configuration} instance with ssl configs loaded.
* @param conf to pull client/server SSL settings filename from
*/
private static Configuration getSslConfig(Configuration conf){
private static Configuration getSslConfig(Configuration conf) {
Configuration sslConf = new Configuration(false);
String sslServerConfFile = conf.get(SSLFactory.SSL_SERVER_CONF_KEY);
String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY);
@ -184,36 +182,29 @@ public class TestLogLevel {
public void testCommandOptions() throws Exception {
final String className = this.getClass().getName();
assertFalse(validateCommand(new String[] {"-foo" }));
assertFalse(validateCommand(new String[] { "-foo" }));
// fail due to insufficient number of arguments
assertFalse(validateCommand(new String[] {}));
assertFalse(validateCommand(new String[] {"-getlevel" }));
assertFalse(validateCommand(new String[] {"-setlevel" }));
assertFalse(validateCommand(new String[] {"-getlevel", "foo.bar:8080" }));
assertFalse(validateCommand(new String[] { "-getlevel" }));
assertFalse(validateCommand(new String[] { "-setlevel" }));
assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080" }));
// valid command arguments
assertTrue(validateCommand(
new String[] {"-getlevel", "foo.bar:8080", className }));
assertTrue(validateCommand(
new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" }));
assertTrue(validateCommand(
new String[] {"-getlevel", "foo.bar:8080", className }));
assertTrue(validateCommand(
new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" }));
assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className }));
assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" }));
assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className }));
assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" }));
// fail due to the extra argument
assertFalse(validateCommand(
new String[] {"-getlevel", "foo.bar:8080", className, "blah" }));
assertFalse(validateCommand(
new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG", "blah" }));
assertFalse(validateCommand(
new String[] {"-getlevel", "foo.bar:8080", className, "-setlevel", "foo.bar:8080",
className }));
assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "blah" }));
assertFalse(
validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG", "blah" }));
assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "-setlevel",
"foo.bar:8080", className }));
}
/**
* Check to see if a command can be accepted.
*
* @param args a String array of arguments
* @return true if the command can be accepted, false if not.
*/
@ -232,34 +223,26 @@ public class TestLogLevel {
}
/**
* Creates and starts a Jetty server binding at an ephemeral port to run
* LogLevel servlet.
* Creates and starts a Jetty server binding at an ephemeral port to run LogLevel servlet.
* @param protocol "http" or "https"
* @param isSpnego true if SPNEGO is enabled
* @return a created HttpServer object
* @throws Exception if unable to create or start a Jetty server
*/
private HttpServer createServer(String protocol, boolean isSpnego)
throws Exception {
HttpServer.Builder builder = new HttpServer.Builder()
.setName("..")
.addEndpoint(new URI(protocol + "://localhost:0"))
.setFindPort(true)
.setConf(serverConf);
private HttpServer createServer(String protocol, boolean isSpnego) throws Exception {
HttpServer.Builder builder = new HttpServer.Builder().setName("..")
.addEndpoint(new URI(protocol + "://localhost:0")).setFindPort(true).setConf(serverConf);
if (isSpnego) {
// Set up server Kerberos credentials.
// Since the server may fall back to simple authentication,
// use ACL to make sure the connection is Kerberos/SPNEGO authenticated.
builder.setSecurityEnabled(true)
.setUsernameConfKey(PRINCIPAL)
.setKeytabConfKey(KEYTAB)
builder.setSecurityEnabled(true).setUsernameConfKey(PRINCIPAL).setKeytabConfKey(KEYTAB)
.setACL(new AccessControlList("client"));
}
// if using HTTPS, configure keystore/truststore properties.
if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) {
builder = builder.
keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
builder = builder.keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
.keyStore(sslConf.get("ssl.server.keystore.location"),
sslConf.get("ssl.server.keystore.password"),
sslConf.get("ssl.server.keystore.type", "jks"))
@ -274,31 +257,29 @@ public class TestLogLevel {
}
private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol,
final boolean isSpnego)
throws Exception {
testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, Level.DEBUG.toString());
final boolean isSpnego) throws Exception {
testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego,
org.apache.logging.log4j.Level.DEBUG.toString());
}
/**
* Run both client and server using the given protocol.
*
* @param bindProtocol specify either http or https for server
* @param connectProtocol specify either http or https for client
* @param isSpnego true if SPNEGO is enabled
* @throws Exception if client can't accesss server.
*/
private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol,
final boolean isSpnego, final String newLevel)
throws Exception {
final boolean isSpnego, final String newLevel) throws Exception {
if (!LogLevel.isValidProtocol(bindProtocol)) {
throw new Exception("Invalid server protocol " + bindProtocol);
}
if (!LogLevel.isValidProtocol(connectProtocol)) {
throw new Exception("Invalid client protocol " + connectProtocol);
}
Level oldLevel = log.getEffectiveLevel();
org.apache.logging.log4j.Level oldLevel = log.getLevel();
assertNotEquals("Get default Log Level which shouldn't be ERROR.",
Level.ERROR, oldLevel);
org.apache.logging.log4j.Level.ERROR, oldLevel);
// configs needed for SPNEGO at server side
if (isSpnego) {
@ -319,8 +300,8 @@ public class TestLogLevel {
String keytabFilePath = keyTabFile.getAbsolutePath();
UserGroupInformation clientUGI = UserGroupInformation.
loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath);
UserGroupInformation clientUGI =
UserGroupInformation.loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath);
try {
clientUGI.doAs((PrivilegedExceptionAction<Void>) () -> {
// client command line
@ -334,44 +315,38 @@ public class TestLogLevel {
}
// restore log level
GenericTestUtils.setLogLevel(log, oldLevel);
org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), oldLevel);
}
/**
* Run LogLevel command line to start a client to get log level of this test
* class.
*
* Run LogLevel command line to start a client to get log level of this test class.
* @param protocol specify either http or https
* @param authority daemon's web UI address
* @throws Exception if unable to connect
*/
private void getLevel(String protocol, String authority) throws Exception {
String[] getLevelArgs = {"-getlevel", authority, logName, "-protocol", protocol};
String[] getLevelArgs = { "-getlevel", authority, logName, "-protocol", protocol };
CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf);
cli.run(getLevelArgs);
}
/**
* Run LogLevel command line to start a client to set log level of this test
* class to debug.
*
* Run LogLevel command line to start a client to set log level of this test class to debug.
* @param protocol specify either http or https
* @param authority daemon's web UI address
* @throws Exception if unable to run or log level does not change as expected
*/
private void setLevel(String protocol, String authority, String newLevel)
throws Exception {
String[] setLevelArgs = {"-setlevel", authority, logName, newLevel, "-protocol", protocol};
private void setLevel(String protocol, String authority, String newLevel) throws Exception {
String[] setLevelArgs = { "-setlevel", authority, logName, newLevel, "-protocol", protocol };
CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf);
cli.run(setLevelArgs);
assertEquals("new level not equal to expected: ", newLevel.toUpperCase(),
log.getEffectiveLevel().toString());
log.getLevel().toString());
}
/**
* Test setting log level to "Info".
*
* @throws Exception if client can't set log level to INFO.
*/
@Test
@ -381,7 +356,6 @@ public class TestLogLevel {
/**
* Test setting log level to "Error".
*
* @throws Exception if client can't set log level to ERROR.
*/
@Test
@ -391,18 +365,15 @@ public class TestLogLevel {
/**
* Server runs HTTP, no SPNEGO.
*
* @throws Exception if http client can't access http server,
* or http client can access https server.
* @throws Exception if http client can't access http server, or http client can access https
* server.
*/
@Test
public void testLogLevelByHttp() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, false);
try {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS,
false);
fail("An HTTPS Client should not have succeeded in connecting to a " +
"HTTP server");
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, false);
fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server");
} catch (SSLException e) {
exceptionShouldContains("Unrecognized SSL message", e);
}
@ -410,18 +381,15 @@ public class TestLogLevel {
/**
* Server runs HTTP + SPNEGO.
*
* @throws Exception if http client can't access http server,
* or http client can access https server.
* @throws Exception if http client can't access http server, or http client can access https
* server.
*/
@Test
public void testLogLevelByHttpWithSpnego() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, true);
try {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS,
true);
fail("An HTTPS Client should not have succeeded in connecting to a " +
"HTTP server");
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, true);
fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server");
} catch (SSLException e) {
exceptionShouldContains("Unrecognized SSL message", e);
}
@ -429,19 +397,15 @@ public class TestLogLevel {
/**
* Server runs HTTPS, no SPNEGO.
*
* @throws Exception if https client can't access https server,
* or https client can access http server.
* @throws Exception if https client can't access https server, or https client can access http
* server.
*/
@Test
public void testLogLevelByHttps() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS,
false);
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, false);
try {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP,
false);
fail("An HTTP Client should not have succeeded in connecting to a " +
"HTTPS server");
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, false);
fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server");
} catch (SocketException e) {
exceptionShouldContains("Unexpected end of file from server", e);
}
@ -449,32 +413,27 @@ public class TestLogLevel {
/**
* Server runs HTTPS + SPNEGO.
*
* @throws Exception if https client can't access https server,
* or https client can access http server.
* @throws Exception if https client can't access https server, or https client can access http
* server.
*/
@Test
public void testLogLevelByHttpsWithSpnego() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS,
true);
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, true);
try {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP,
true);
fail("An HTTP Client should not have succeeded in connecting to a " +
"HTTPS server");
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, true);
fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server");
} catch (SocketException e) {
exceptionShouldContains("Unexpected end of file from server", e);
}
}
/**
* Assert that a throwable or one of its causes should contain the substr in its message.
*
* Ideally we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util
* method which asserts t.toString() contains the substr. As the original throwable may have been
* wrapped in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes.
* After stop supporting Hadoop2, this method can be removed and assertion in tests can use
* t.getCause() directly, similar to HADOOP-15280.
* Assert that a throwable or one of its causes should contain the substr in its message. Ideally
* we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util method
* which asserts t.toString() contains the substr. As the original throwable may have been wrapped
* in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. After stop
* supporting Hadoop2, this method can be removed and assertion in tests can use t.getCause()
* directly, similar to HADOOP-15280.
*/
private static void exceptionShouldContains(String substr, Throwable throwable) {
Throwable t = throwable;

View File

@ -249,13 +249,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>

View File

@ -38,7 +38,7 @@
<testResource>
<directory>src/test/resources</directory>
<includes>
<include>log4j.properties</include>
<include>log4j2.xml</include>
</includes>
</testResource>
</testResources>
@ -80,13 +80,33 @@
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<artifactId>jcl-over-slf4j</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -19,16 +19,15 @@ package org.apache.hadoop.hbase.logging;
import java.io.File;
import java.io.IOException;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
import org.apache.yetus.audience.InterfaceAudience;
/**
* The actual class for operating on log4j.
* The actual class for operating on log4j2.
* <p/>
* This class will depend on log4j directly, so callers should not use this class directly to avoid
* introducing log4j dependencies to downstream users. Please call the methods in
* introducing log4j2 dependencies to downstream users. Please call the methods in
* {@link Log4jUtils}, as they will call the methods here through reflection.
*/
@InterfaceAudience.Private
@ -38,31 +37,52 @@ final class InternalLog4jUtils {
}
static void setLogLevel(String loggerName, String levelName) {
org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName);
org.apache.log4j.Level level = org.apache.log4j.Level.toLevel(levelName.toUpperCase());
org.apache.logging.log4j.Level level =
org.apache.logging.log4j.Level.toLevel(levelName.toUpperCase());
if (!level.toString().equalsIgnoreCase(levelName)) {
throw new IllegalArgumentException("Unsupported log level " + levelName);
}
logger.setLevel(level);
org.apache.logging.log4j.core.config.Configurator.setLevel(loggerName, level);
}
static String getEffectiveLevel(String loggerName) {
org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName);
return logger.getEffectiveLevel().toString();
org.apache.logging.log4j.Logger logger =
org.apache.logging.log4j.LogManager.getLogger(loggerName);
return logger.getLevel().name();
}
static Set<File> getActiveLogFiles() throws IOException {
Set<File> ret = new HashSet<>();
org.apache.log4j.Appender a;
@SuppressWarnings("unchecked")
Enumeration<org.apache.log4j.Appender> e =
org.apache.log4j.Logger.getRootLogger().getAllAppenders();
while (e.hasMoreElements()) {
a = e.nextElement();
if (a instanceof org.apache.log4j.FileAppender) {
org.apache.log4j.FileAppender fa = (org.apache.log4j.FileAppender) a;
String filename = fa.getFile();
ret.add(new File(filename));
org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
if (!(logger instanceof org.apache.logging.log4j.core.Logger)) {
return ret;
}
org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger;
for (org.apache.logging.log4j.core.Appender appender : coreLogger.getAppenders().values()) {
if (appender instanceof org.apache.logging.log4j.core.appender.FileAppender) {
String fileName =
((org.apache.logging.log4j.core.appender.FileAppender) appender).getFileName();
ret.add(new File(fileName));
} else if (appender instanceof org.apache.logging.log4j.core.appender.AbstractFileAppender) {
String fileName =
((org.apache.logging.log4j.core.appender.AbstractFileAppender<?>) appender).getFileName();
ret.add(new File(fileName));
} else if (appender instanceof org.apache.logging.log4j.core.appender.RollingFileAppender) {
String fileName =
((org.apache.logging.log4j.core.appender.RollingFileAppender) appender).getFileName();
ret.add(new File(fileName));
} else
if (appender instanceof org.apache.logging.log4j.core.appender.RandomAccessFileAppender) {
String fileName =
((org.apache.logging.log4j.core.appender.RandomAccessFileAppender) appender)
.getFileName();
ret.add(new File(fileName));
} else
if (appender instanceof org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) {
String fileName =
((org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) appender)
.getFileName();
ret.add(new File(fileName));
}
}
return ret;

View File

@ -0,0 +1,288 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.log4j;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.Writer;
/**
* Just a copy of the old log4j12 FileAppender. The ContainerLogAppender for YARN NodeManager needs
* this class but the log4j-1.2-api bridge does not provide it which causes the UTs in
* hbase-mapreduce module to fail if we start a separated MR cluster.
*/
public class FileAppender extends WriterAppender {
/**
* Controls file truncatation. The default value for this variable is <code>true</code>, meaning
* that by default a <code>FileAppender</code> will append to an existing file and not truncate
* it.
* <p>
* This option is meaningful only if the FileAppender opens the file.
*/
protected boolean fileAppend = true;
/**
* The name of the log file.
*/
protected String fileName = null;
/**
* Do we do bufferedIO?
*/
protected boolean bufferedIO = false;
/**
* Determines the size of IO buffer be. Default is 8K.
*/
protected int bufferSize = 8 * 1024;
/**
* The default constructor does not do anything.
*/
public FileAppender() {
}
/**
* Instantiate a <code>FileAppender</code> and open the file designated by <code>fileName</code>.
* The opened filename will become the output destination for this appender.
* <p>
* If the <code>append</code> parameter is true, the file will be appended to. Otherwise, the file
* designated by <code>fileName</code> will be truncated before being opened.
* <p>
* If the <code>bufferedIO</code> parameter is <code>true</code>, then buffered IO will be used to
* write to the output file.
*/
public FileAppender(Layout layout, String fileName, boolean append, boolean bufferedIO,
int bufferSize) throws IOException {
this.layout = layout;
this.setFile(fileName, append, bufferedIO, bufferSize);
}
/**
* Instantiate a FileAppender and open the file designated by <code>fileName</code>. The opened
* filename will become the output destination for this appender.
* <p>
* If the <code>append</code> parameter is true, the file will be appended to. Otherwise, the file
* designated by <code>fileName</code> will be truncated before being opened.
*/
public FileAppender(Layout layout, String fileName, boolean append) throws IOException {
this.layout = layout;
this.setFile(fileName, append, false, bufferSize);
}
/**
* Instantiate a FileAppender and open the file designated by <code>filename</code>. The opened
* filename will become the output destination for this appender.
* <p>
* The file will be appended to.
*/
public FileAppender(Layout layout, String fileName) throws IOException {
this(layout, fileName, true);
}
/**
* The <b>File</b> property takes a string value which should be the name of the file to append
* to.
* <p>
* <font color="#DD0044"><b>Note that the special values "System.out" or "System.err" are no
* longer honored.</b></font>
* <p>
* Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
* options are set.
*/
public void setFile(String file) {
// Trim spaces from both ends. The users probably does not want
// trailing spaces in file names.
String val = file.trim();
fileName = val;
}
/**
* Returns the value of the <b>Append</b> option.
*/
public boolean getAppend() {
return fileAppend;
}
/** Returns the value of the <b>File</b> option. */
public String getFile() {
return fileName;
}
/**
* If the value of <b>File</b> is not <code>null</code>, then {@link #setFile} is called with the
* values of <b>File</b> and <b>Append</b> properties.
* @since 0.8.1
*/
@Override
public void activateOptions() {
if (fileName != null) {
try {
setFile(fileName, fileAppend, bufferedIO, bufferSize);
} catch (java.io.IOException e) {
errorHandler.error("setFile(" + fileName + "," + fileAppend + ") call failed.", e,
org.apache.log4j.spi.ErrorCode.FILE_OPEN_FAILURE);
}
}
}
/**
* Closes the previously opened file.
*/
protected void closeFile() {
if (this.qw != null) {
try {
this.qw.close();
} catch (java.io.IOException e) {
if (e instanceof InterruptedIOException) {
Thread.currentThread().interrupt();
}
// Exceptionally, it does not make sense to delegate to an
// ErrorHandler. Since a closed appender is basically dead.
}
}
}
/**
* Get the value of the <b>BufferedIO</b> option.
* <p>
* BufferedIO will significatnly increase performance on heavily loaded systems.
*/
public boolean getBufferedIO() {
return this.bufferedIO;
}
/**
* Get the size of the IO buffer.
*/
public int getBufferSize() {
return this.bufferSize;
}
/**
* The <b>Append</b> option takes a boolean value. It is set to <code>true</code> by default. If
* true, then <code>File</code> will be opened in append mode by {@link #setFile setFile} (see
* above). Otherwise, {@link #setFile setFile} will open <code>File</code> in truncate mode.
* <p>
* Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
* options are set.
*/
public void setAppend(boolean flag) {
fileAppend = flag;
}
/**
* The <b>BufferedIO</b> option takes a boolean value. It is set to <code>false</code> by default.
* If true, then <code>File</code> will be opened and the resulting {@link java.io.Writer} wrapped
* around a {@link BufferedWriter}. BufferedIO will significatnly increase performance on heavily
* loaded systems.
*/
public void setBufferedIO(boolean bufferedIO) {
this.bufferedIO = bufferedIO;
if (bufferedIO) {
immediateFlush = false;
}
}
/**
* Set the size of the IO buffer.
*/
public void setBufferSize(int bufferSize) {
this.bufferSize = bufferSize;
}
/**
* <p>
* Sets and <i>opens</i> the file where the log output will go. The specified file must be
* writable.
* <p>
* If there was already an opened file, then the previous file is closed first.
* <p>
* <b>Do not use this method directly. To configure a FileAppender or one of its subclasses, set
* its properties one by one and then call activateOptions.</b>
* @param fileName The path to the log file.
* @param append If true will append to fileName. Otherwise will truncate fileName.
*/
public synchronized void setFile(String fileName, boolean append, boolean bufferedIO,
int bufferSize) throws IOException {
// It does not make sense to have immediate flush and bufferedIO.
if (bufferedIO) {
setImmediateFlush(false);
}
reset();
FileOutputStream ostream = null;
try {
//
// attempt to create file
//
ostream = new FileOutputStream(fileName, append);
} catch (FileNotFoundException ex) {
//
// if parent directory does not exist then
// attempt to create it and try to create file
// see bug 9150
//
String parentName = new File(fileName).getParent();
if (parentName != null) {
File parentDir = new File(parentName);
if (!parentDir.exists() && parentDir.mkdirs()) {
ostream = new FileOutputStream(fileName, append);
} else {
throw ex;
}
} else {
throw ex;
}
}
Writer fw = createWriter(ostream);
if (bufferedIO) {
fw = new BufferedWriter(fw, bufferSize);
}
this.setQWForFiles(fw);
this.fileName = fileName;
this.fileAppend = append;
this.bufferedIO = bufferedIO;
this.bufferSize = bufferSize;
writeHeader();
}
/**
* Sets the quiet writer being used. This method is overriden by {@code RollingFileAppender}.
*/
protected void setQWForFiles(Writer writer) {
this.qw = new org.apache.log4j.helpers.QuietWriter(writer, errorHandler);
}
/**
* Close any previously opened file and call the parent's <code>reset</code>.
*/
@Override
protected void reset() {
closeFile();
this.fileName = null;
super.reset();
}
}

View File

@ -1,68 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Debugging Pattern format
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
# Custom Logging levels
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop=WARN
log4j.logger.org.apache.zookeeper=ERROR
log4j.logger.org.apache.hadoop.hbase=DEBUG
#These settings are workarounds against spurious logs from the minicluster.
#See HBASE-4709
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE

View File

@ -0,0 +1,45 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<Configuration>
<Appenders>
<Console name="Console" target="SYSTEM_ERR">
<PatternLayout pattern="%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n" />
</Console>
</Appenders>
<Loggers>
<Root level="info">
<AppenderRef ref="Console" />
</Root>
<Logger name="org.apache.hadoop" level="warn" />
<Logger name="org.apache.zookeeper" level="error" />
<Logger name="org.apache.hadoop.hbase" level="debug" />
<!-- These settings are workarounds against spurious logs from the minicluster. See HBASE-4709 -->
<Logger name="org.apache.hadoop.metrics2.impl.MetricsConfig" level="warn" />
<Logger name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter" level="warn" />
<Logger name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl" level="warn" />
<Logger name="org.apache.hadoop.metrics2.util.MBeans" level="warn" />
<Logger name="org.apache.directory" level="warn" additivity="false" />
<Logger name="org.apache.hbase.thirdparty.io.netty.channel" level="debug" />
<!-- For testing where we want to capture the log message of these special loggers -->
<Logger name="org.apache.hadoop.hbase.ipc.FailedServers" level="debug" />
<Logger name="org.apache.hadoop.hbase.regionserver.RSRpcServices" level="debug" />
</Loggers>
</Configuration>

View File

@ -260,13 +260,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -25,24 +25,16 @@ import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
import javax.crypto.spec.SecretKeySpec;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.ZooKeeper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability;
@ -52,6 +44,7 @@ import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
@ -61,6 +54,10 @@ import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.ZooKeeper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.org.apache.commons.cli.AlreadySelectedException;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
@ -583,7 +580,7 @@ public class LoadTestTool extends AbstractHBaseTool {
@Override
protected int doWork() throws IOException {
if (!isVerbose) {
LogManager.getLogger(ZooKeeper.class.getName()).setLevel(Level.WARN);
Log4jUtils.setLogLevel(ZooKeeper.class.getName(), "WARN");
}
if (numTables > 1) {
return parallelLoadTables();

View File

@ -133,13 +133,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -141,13 +141,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -130,13 +130,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -138,13 +138,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -354,13 +354,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -494,13 +494,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -639,7 +639,6 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
* This is used before starting HDFS and map-reduce mini-clusters Run something like the below to
* check for the likes of '/tmp' references -- i.e. references outside of the test data dir -- in
* the conf.
*
* <pre>
* Configuration conf = TEST_UTIL.getConfiguration();
* for (Iterator&lt;Map.Entry&lt;String, String&gt;&gt; i = conf.iterator(); i.hasNext();) {

View File

@ -29,11 +29,10 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@ -61,10 +60,13 @@ public class TestAsyncTableBatchRetryImmediately {
private static AsyncConnection CONN;
private static String LOG_LEVEL;
@BeforeClass
public static void setUp() throws Exception {
// disable the debug log to avoid flooding the output
LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO);
LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, 1024);
UTIL.startMiniCluster(1);
Table table = UTIL.createTable(TABLE_NAME, FAMILY);
@ -79,6 +81,9 @@ public class TestAsyncTableBatchRetryImmediately {
@AfterClass
public static void tearDown() throws Exception {
if (LOG_LEVEL != null) {
Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
}
CONN.close();
UTIL.shutdownMiniCluster();
}

View File

@ -33,14 +33,13 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@ -65,6 +64,7 @@ public class TestMultiRespectsLimits {
CompatibilityFactory.getInstance(MetricsAssertHelper.class);
private final static byte[] FAMILY = Bytes.toBytes("D");
public static final int MAX_SIZE = 100;
private static String LOG_LEVEL;
@Rule
public TestName name = new TestName();
@ -72,7 +72,8 @@ public class TestMultiRespectsLimits {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// disable the debug log to avoid flooding the output
LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO);
LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
TEST_UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY,
MAX_SIZE);
@ -82,6 +83,9 @@ public class TestMultiRespectsLimits {
@AfterClass
public static void tearDownAfterClass() throws Exception {
if (LOG_LEVEL != null) {
Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
}
TEST_UTIL.shutdownMiniCluster();
}

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.testclassification.RPCTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.After;
@ -82,10 +83,8 @@ public class TestProtoBufRpc {
this.conf = HBaseConfiguration.create();
this.conf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY,
rpcServerImpl);
org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer")
.setLevel(org.apache.log4j.Level.ERROR);
org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace")
.setLevel(org.apache.log4j.Level.TRACE);
Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer", "ERROR");
Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer.trace", "TRACE");
// Create server side implementation
// Get RPC server for server side implementation
this.server = RpcServerFactory.createRpcServer(null, "testrpc",

View File

@ -35,17 +35,19 @@ import org.mockito.Mockito;
public class TestRpcServerTraceLogging {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule
.forClass(TestRpcServerTraceLogging.class);
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestRpcServerTraceLogging.class);
static org.apache.log4j.Logger rpcServerLog = org.apache.log4j.Logger.getLogger(RpcServer.class);
private static final org.apache.logging.log4j.core.Logger rpcServerLog =
(org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(RpcServer.class);
static final String TRACE_LOG_MSG =
"This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }"
+ " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } "
+ "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } "
+ "number_of_rows: 2147483647 close_scanner: false client_handles_partials: "
+ "true client_handles_heartbeats: true track_scan_metrics: false";
"This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" +
" scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " +
"max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " +
"number_of_rows: 2147483647 close_scanner: false client_handles_partials: " +
"true client_handles_heartbeats: true track_scan_metrics: false";
static final int TRACE_LOG_LENGTH = TRACE_LOG_MSG.length();
@ -62,7 +64,7 @@ public class TestRpcServerTraceLogging {
@Test
public void testLoggingWithTraceOff() {
conf.setInt("hbase.ipc.trace.log.max.length", 250);
rpcServerLog.setLevel(org.apache.log4j.Level.DEBUG);
rpcServerLog.setLevel(org.apache.logging.log4j.Level.DEBUG);
String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
assertEquals(150 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@ -72,7 +74,7 @@ public class TestRpcServerTraceLogging {
@Test
public void testLoggingWithTraceOn() {
conf.setInt("hbase.ipc.trace.log.max.length", 250);
rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
assertEquals(250 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@ -82,7 +84,7 @@ public class TestRpcServerTraceLogging {
@Test
public void testLoggingWithTraceOnLargeMax() {
conf.setInt("hbase.ipc.trace.log.max.length", 2000);
rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
assertEquals(TRACE_LOG_LENGTH, truncatedString.length());

View File

@ -20,14 +20,16 @@ package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -36,10 +38,6 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Appender;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
@ -47,8 +45,9 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
@ -80,7 +79,7 @@ public class TestMultiLogThreshold {
private HRegionServer rs;
private RSRpcServices services;
private Appender appender;
private org.apache.logging.log4j.core.Appender appender;
@Parameterized.Parameter
public static boolean rejectLargeBatchOp;
@ -90,6 +89,21 @@ public class TestMultiLogThreshold {
return Arrays.asList(new Object[] { false }, new Object[] { true });
}
private final class LevelAndMessage {
final org.apache.logging.log4j.Level level;
final String msg;
public LevelAndMessage(org.apache.logging.log4j.Level level, String msg) {
this.level = level;
this.msg = msg;
}
}
// log4j2 will reuse the LogEvent so we need to copy the level and message out.
private BlockingDeque<LevelAndMessage> logs = new LinkedBlockingDeque<>();
@Before
public void setupTest() throws Exception {
util = new HBaseTestingUtility();
@ -100,13 +114,28 @@ public class TestMultiLogThreshold {
util.startMiniCluster();
util.createTable(NAME, TEST_FAM);
rs = util.getRSForFirstRegionInTable(NAME);
appender = mock(Appender.class);
LogManager.getLogger(RSRpcServices.class).addAppender(appender);
appender = mock(org.apache.logging.log4j.core.Appender.class);
when(appender.getName()).thenReturn("mockAppender");
when(appender.isStarted()).thenReturn(true);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
org.apache.logging.log4j.core.LogEvent logEvent =
invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
logs.add(
new LevelAndMessage(logEvent.getLevel(), logEvent.getMessage().getFormattedMessage()));
return null;
}
}).when(appender).append(any(org.apache.logging.log4j.core.LogEvent.class));
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(RSRpcServices.class)).addAppender(appender);
}
@After
public void tearDown() throws Exception {
LogManager.getLogger(RSRpcServices.class).removeAppender(appender);
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(RSRpcServices.class)).removeAppender(appender);
util.shutdownMiniCluster();
}
@ -149,17 +178,16 @@ public class TestMultiLogThreshold {
}
private void assertLogBatchWarnings(boolean expected) {
ArgumentCaptor<LoggingEvent> captor = ArgumentCaptor.forClass(LoggingEvent.class);
verify(appender, atLeastOnce()).doAppend(captor.capture());
assertFalse(logs.isEmpty());
boolean actual = false;
for (LoggingEvent event : captor.getAllValues()) {
if (event.getLevel() == Level.WARN &&
event.getRenderedMessage().contains("Large batch operation detected")) {
for (LevelAndMessage event : logs) {
if (event.level == org.apache.logging.log4j.Level.WARN &&
event.msg.contains("Large batch operation detected")) {
actual = true;
break;
}
}
reset(appender);
logs.clear();
assertEquals(expected, actual);
}

View File

@ -42,11 +42,6 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.log4j.Appender;
import org.apache.log4j.Layout;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.WriterAppender;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.Before;
@ -56,6 +51,8 @@ import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
@Category(LargeTests.class)
public class TestRegionServerReportForDuty {
@ -91,26 +88,15 @@ public class TestRegionServerReportForDuty {
testUtil.shutdownMiniDFSCluster();
}
/**
* LogCapturer is similar to {@link org.apache.hadoop.test.GenericTestUtils.LogCapturer}
* except that this implementation has a default appender to the root logger.
* Hadoop 2.8+ supports the default appender in the LogCapture it ships and this can be replaced.
* TODO: This class can be removed after we upgrade Hadoop dependency.
*/
static class LogCapturer {
private static class LogCapturer {
private StringWriter sw = new StringWriter();
private WriterAppender appender;
private org.apache.log4j.Logger logger;
private org.apache.logging.log4j.core.appender.WriterAppender appender;
private org.apache.logging.log4j.core.Logger logger;
LogCapturer(org.apache.log4j.Logger logger) {
LogCapturer(org.apache.logging.log4j.core.Logger logger) {
this.logger = logger;
Appender defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("stdout");
if (defaultAppender == null) {
defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("console");
}
final Layout layout = (defaultAppender == null) ? new PatternLayout() :
defaultAppender.getLayout();
this.appender = new WriterAppender(layout, sw);
this.appender = org.apache.logging.log4j.core.appender.WriterAppender.newBuilder()
.setName("test").setTarget(sw).build();
this.logger.addAppender(this.appender);
}
@ -146,7 +132,9 @@ public class TestRegionServerReportForDuty {
master = cluster.addMaster();
master.start();
LogCapturer capturer = new LogCapturer(org.apache.log4j.Logger.getLogger(HRegionServer.class));
LogCapturer capturer =
new LogCapturer((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(HRegionServer.class));
// Set sleep interval relatively low so that exponential backoff is more demanding.
int msginterval = 100;
cluster.getConfiguration().setInt("hbase.regionserver.msginterval", msginterval);

View File

@ -26,11 +26,11 @@ import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
@ -132,12 +132,9 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
this.ratio = inRatio;
// Hide lots of logging so the system out is usable as a tab delimited file.
org.apache.log4j.Logger.getLogger(CompactionConfiguration.class).
setLevel(org.apache.log4j.Level.ERROR);
org.apache.log4j.Logger.getLogger(RatioBasedCompactionPolicy.class).
setLevel(org.apache.log4j.Level.ERROR);
org.apache.log4j.Logger.getLogger(cpClass).setLevel(org.apache.log4j.Level.ERROR);
Log4jUtils.setLogLevel(CompactionConfiguration.class.getName(), "ERROR");
Log4jUtils.setLogLevel(RatioBasedCompactionPolicy.class.getName(), "ERROR");
Log4jUtils.setLogLevel(cpClass.getName(), "ERROR");
Configuration configuration = HBaseConfiguration.create();

View File

@ -27,15 +27,17 @@ import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
@ -50,9 +52,6 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Appender;
import org.apache.log4j.LogManager;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
@ -60,14 +59,9 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.mockito.ArgumentMatcher;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
@RunWith(MockitoJUnitRunner.class)
@Category({LargeTests.class})
@Category({ LargeTests.class })
public class TestCanaryTool {
@ClassRule
@ -81,22 +75,26 @@ public class TestCanaryTool {
@Rule
public TestName name = new TestName();
private org.apache.logging.log4j.core.Appender mockAppender;
@Before
public void setUp() throws Exception {
testingUtility = new HBaseTestingUtility();
testingUtility.startMiniCluster();
LogManager.getRootLogger().addAppender(mockAppender);
mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
when(mockAppender.getName()).thenReturn("mockAppender");
when(mockAppender.isStarted()).thenReturn(true);
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender);
}
@After
public void tearDown() throws Exception {
testingUtility.shutdownMiniCluster();
LogManager.getRootLogger().removeAppender(mockAppender);
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger("org.apache.hadoop.hbase")).removeAppender(mockAppender);
}
@Mock
Appender mockAppender;
@Test
public void testBasicZookeeperCanaryWorks() throws Exception {
final String[] args = { "-t", "10000", "-zookeeper" };
@ -105,8 +103,8 @@ public class TestCanaryTool {
@Test
public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception {
final String[] args = { "-t", "10000", "-zookeeper", "-treatFailureAsError",
"-permittedZookeeperFailures", "1" };
final String[] args =
{ "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" };
testZookeeperCanaryWithArgs(args);
}
@ -115,7 +113,7 @@ public class TestCanaryTool {
final TableName tableName = TableName.valueOf(name.getMethodName());
Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
// insert some test rows
for (int i=0; i<1000; i++) {
for (int i = 0; i < 1000; i++) {
byte[] iBytes = Bytes.toBytes(i);
Put p = new Put(iBytes);
p.addColumn(FAMILY, COLUMN, iBytes);
@ -184,7 +182,7 @@ public class TestCanaryTool {
TableName tableName = TableName.valueOf("testCanaryRegionTaskResult");
Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
// insert some test rows
for (int i=0; i<1000; i++) {
for (int i = 0; i < 1000; i++) {
byte[] iBytes = Bytes.toBytes(i);
Put p = new Put(iBytes);
p.addColumn(FAMILY, COLUMN, iBytes);
@ -213,7 +211,7 @@ public class TestCanaryTool {
assertFalse("verify region map has size > 0", regionMap.isEmpty());
for (String regionName : regionMap.keySet()) {
for (CanaryTool.RegionTaskResult res: regionMap.get(regionName)) {
for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) {
assertNotNull("verify getRegionNameAsString()", regionName);
assertNotNull("verify getRegionInfo()", res.getRegionInfo());
assertNotNull("verify getTableName()", res.getTableName());
@ -250,10 +248,11 @@ public class TestCanaryTool {
// )
// )
//
@org.junit.Ignore @Test
@org.junit.Ignore
@Test
public void testReadTableTimeouts() throws Exception {
final TableName [] tableNames = new TableName[] {TableName.valueOf(name.getMethodName() + "1"),
TableName.valueOf(name.getMethodName() + "2")};
final TableName[] tableNames = new TableName[] { TableName.valueOf(name.getMethodName() + "1"),
TableName.valueOf(name.getMethodName() + "2") };
// Create 2 test tables.
for (int j = 0; j < 2; j++) {
Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY });
@ -270,8 +269,8 @@ public class TestCanaryTool {
CanaryTool canary = new CanaryTool(executor, sink);
String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," +
tableNames[1].getNameAsString() + "=0";
String[] args = {"-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1",
name.getMethodName() + "2"};
String[] args = { "-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1",
name.getMethodName() + "2" };
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class));
for (int i = 0; i < 2; i++) {
@ -281,16 +280,19 @@ public class TestCanaryTool {
sink.getReadLatencyMap().get(tableNames[i].getNameAsString()));
}
// One table's timeout is set for 0 ms and thus, should lead to an error.
verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
verify(mockAppender, times(1))
.append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
@Override
public boolean matches(LoggingEvent argument) {
return argument.getRenderedMessage().contains("exceeded the configured read timeout.");
public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
return argument.getMessage().getFormattedMessage()
.contains("exceeded the configured read timeout.");
}
}));
verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
verify(mockAppender, times(2))
.append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
@Override
public boolean matches(LoggingEvent argument) {
return argument.getRenderedMessage().contains("Configured read timeout");
public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
return argument.getMessage().getFormattedMessage().contains("Configured read timeout");
}
}));
}
@ -300,41 +302,45 @@ public class TestCanaryTool {
ExecutorService executor = new ScheduledThreadPoolExecutor(1);
CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink());
CanaryTool canary = new CanaryTool(executor, sink);
String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE)};
String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE) };
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
assertNotEquals("verify non-null write latency", null, sink.getWriteLatency());
assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency());
verify(mockAppender, times(1)).doAppend(argThat(
new ArgumentMatcher<LoggingEvent>() {
verify(mockAppender, times(1))
.append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
@Override
public boolean matches(LoggingEvent argument) {
return argument.getRenderedMessage().contains("Configured write timeout");
public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
return argument.getMessage().getFormattedMessage().contains("Configured write timeout");
}
}));
}
//no table created, so there should be no regions
// no table created, so there should be no regions
@Test
public void testRegionserverNoRegions() throws Exception {
runRegionserverCanary();
verify(mockAppender).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
verify(mockAppender)
.append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
@Override
public boolean matches(LoggingEvent argument) {
return argument.getRenderedMessage().contains("Regionserver not serving any regions");
public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
return argument.getMessage().getFormattedMessage()
.contains("Regionserver not serving any regions");
}
}));
}
//by creating a table, there shouldn't be any region servers not serving any regions
// by creating a table, there shouldn't be any region servers not serving any regions
@Test
public void testRegionserverWithRegions() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
testingUtility.createTable(tableName, new byte[][] { FAMILY });
runRegionserverCanary();
verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() {
verify(mockAppender, never())
.append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
@Override
public boolean matches(LoggingEvent argument) {
return argument.getRenderedMessage().contains("Regionserver not serving any regions");
public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
return argument.getMessage().getFormattedMessage()
.contains("Regionserver not serving any regions");
}
}));
}
@ -344,7 +350,7 @@ public class TestCanaryTool {
final TableName tableName = TableName.valueOf(name.getMethodName());
Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
// insert some test rows
for (int i=0; i<1000; i++) {
for (int i = 0; i < 1000; i++) {
byte[] iBytes = Bytes.toBytes(i);
Put p = new Put(iBytes);
p.addColumn(FAMILY, COLUMN, iBytes);
@ -358,8 +364,7 @@ public class TestCanaryTool {
new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration());
conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true);
assertEquals(0, ToolRunner.run(conf, canary, args));
verify(sink, atLeastOnce())
.publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
isA(ColumnFamilyDescriptor.class), anyLong());
assertEquals("verify no read error count", 0, canary.getReadFailures().size());
}
@ -367,14 +372,12 @@ public class TestCanaryTool {
private void runRegionserverCanary() throws Exception {
ExecutorService executor = new ScheduledThreadPoolExecutor(1);
CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink());
String[] args = { "-t", "10000", "-regionserver"};
String[] args = { "-t", "10000", "-regionserver" };
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
assertEquals("verify no read error count", 0, canary.getReadFailures().size());
}
private void testZookeeperCanaryWithArgs(String[] args) throws Exception {
Integer port =
Iterables.getOnlyElement(testingUtility.getZkCluster().getClientPortList(), null);
String hostPort = testingUtility.getZkCluster().getAddress().toString();
testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort);
ExecutorService executor = new ScheduledThreadPoolExecutor(2);
@ -382,8 +385,8 @@ public class TestCanaryTool {
CanaryTool canary = new CanaryTool(executor, sink);
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
String baseZnode = testingUtility.getConfiguration()
.get(HConstants.ZOOKEEPER_ZNODE_PARENT, HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
String baseZnode = testingUtility.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT,
HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
verify(sink, atLeastOnce()).publishReadTiming(eq(baseZnode), eq(hostPort), anyLong());
}
}

View File

@ -46,12 +46,10 @@
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-mapreduce</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client-byo-hadoop</artifactId>
<version>${project.version}</version>
</dependency>
<!-- parent pom defines these for children. :( :( :( -->
<dependency>
@ -60,8 +58,18 @@
<scope>provided</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>provided</scope>
</dependency>
<!-- Test dependencies -->

View File

@ -1,7 +1,7 @@
<project xmlns="https://maven.apache.org/POM/4.0.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -19,7 +19,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
-->
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>hbase-shaded</artifactId>
@ -67,7 +67,9 @@
<profile>
<id>hadoop-3.0</id>
<activation>
<property><name>!hadoop.profile</name></property>
<property>
<name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<dependency>

View File

@ -75,7 +75,7 @@
<exclude>com.github.spotbugs:*</exclude>
<exclude>org.apache.htrace:*</exclude>
<exclude>org.apache.yetus:*</exclude>
<exclude>log4j:*</exclude>
<exclude>org.apache.logging.log4j:*</exclude>
<exclude>commons-logging:*</exclude>
<exclude>org.javassist:*</exclude>
</excludes>
@ -92,5 +92,4 @@
<artifactId>hbase-client</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -51,7 +51,7 @@
<configuration>
<archive>
<manifest>
<!--Include the Driver class as the 'main'.
<!-- Include the Driver class as the 'main'.
Executing the jar will then show a list of the basic MR jobs.
-->
<mainClass>org/apache/hadoop/hbase/mapreduce/Driver</mainClass>
@ -177,7 +177,9 @@
<profile>
<id>hadoop-3.0</id>
<activation>
<property><name>!hadoop.profile</name></property>
<property>
<name>!hadoop.profile</name>
</property>
</activation>
<properties>
<hadoop.version>${hadoop-three.version}</hadoop.version>

View File

@ -1,7 +1,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -19,7 +19,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
-->
<modelVersion>4.0.0</modelVersion>
<parent>
@ -39,17 +39,35 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-logging</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>

View File

@ -1,7 +1,7 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -19,7 +19,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
-->
<modelVersion>4.0.0</modelVersion>
<parent>
@ -80,40 +80,12 @@
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-app</artifactId>
<version>${hadoop.version}</version>
<type>test-jar</type>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<version>${hadoop.version}</version>
<type>test-jar</type>
<scope>compile</scope>
</dependency>
@ -162,7 +134,6 @@
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-testing-util</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
<exclusions>
<exclusion>
@ -172,7 +143,6 @@
</exclusions>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
@ -219,7 +189,7 @@
<exclude>com.github.spotbugs:*</exclude>
<exclude>org.apache.htrace:*</exclude>
<exclude>org.apache.yetus:*</exclude>
<exclude>log4j:*</exclude>
<exclude>org.apache.logging.log4j:*</exclude>
<exclude>commons-logging:*</exclude>
<exclude>org.javassist:*</exclude>
</excludes>
@ -230,5 +200,4 @@
</plugin>
</plugins>
</build>
</project>

View File

@ -40,7 +40,6 @@
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client</artifactId>
<version>${project.version}</version>
</dependency>
<!-- parent pom defines these for children. :( :( :( -->
<dependency>
@ -49,8 +48,18 @@
<scope>provided</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>provided</scope>
</dependency>
<!-- Test dependencies -->

View File

@ -1,5 +1,7 @@
<?xml version="1.0"?>
<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="https://maven.apache.org/POM/4.0.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
@ -55,13 +57,18 @@
</dependency>
<!-- put the log implementations to optional -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<optional>true</optional>
</dependency>
</dependencies>
@ -155,7 +162,7 @@
<exclude>com.github.spotbugs:*</exclude>
<exclude>org.apache.htrace:*</exclude>
<exclude>org.apache.yetus:*</exclude>
<exclude>log4j:*</exclude>
<exclude>org.apache.logging.log4j:*</exclude>
<exclude>commons-logging:*</exclude>
<exclude>org.javassist:*</exclude>
</excludes>
@ -222,13 +229,11 @@
<pattern>com.zaxxer</pattern>
<shadedPattern>${shaded.prefix}.com.zaxxer</shadedPattern>
</relocation>
<!-- dnsjava -->
<relocation>
<pattern>org.xbill</pattern>
<shadedPattern>${shaded.prefix}.org.xbill</shadedPattern>
</relocation>
<!-- netty family -->
<relocation>
<pattern>org.jboss.netty</pattern>
@ -238,13 +243,11 @@
<pattern>io.netty</pattern>
<shadedPattern>${shaded.prefix}.io.netty</shadedPattern>
</relocation>
<!-- top level okio -->
<relocation>
<pattern>okio</pattern>
<shadedPattern>${shaded.prefix}.okio</shadedPattern>
</relocation>
<!-- top level org -->
<relocation>
<pattern>org.checkerframework</pattern>
@ -323,8 +326,6 @@
<pattern>org.objectweb</pattern>
<shadedPattern>${shaded.prefix}.org.objectweb</shadedPattern>
</relocation>
<!-- org.apache relocations not in org.apache.hadoop or org.apache.commons -->
<relocation>
<pattern>org.apache.avro</pattern>
@ -362,7 +363,6 @@
<pattern>org.apache.zookeeper</pattern>
<shadedPattern>${shaded.prefix}.org.apache.zookeeper</shadedPattern>
</relocation>
<!-- org.apache.commons not including logging -->
<relocation>
<pattern>org.apache.commons.validator</pattern>
@ -444,7 +444,6 @@
<pattern>org.apache.commons.text</pattern>
<shadedPattern>${shaded.prefix}.org.apache.commons.text</shadedPattern>
</relocation>
<!-- top level net-->
<relocation>
<pattern>net/</pattern>
@ -456,7 +455,8 @@
<!-- Need to filter out some extraneous license files.
Don't use the ApacheLicenseRT because it just removes all
META-INF/LICENSE(.txt)? files, including ours. -->
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
<transformer
implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
<resources>
<resource>LICENSE.txt</resource>
<resource>ASL2.0</resource>
@ -465,11 +465,13 @@
</resources>
</transformer>
<!-- Where notices exist, just concat them -->
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer">
<transformer
implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer">
<addHeader>false</addHeader>
<projectName>${project.name}</projectName>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
<transformer
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
</transformer>
</transformers>
<filters>

View File

@ -142,13 +142,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -1,5 +1,7 @@
<?xml version="1.0"?>
<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="https://maven.apache.org/POM/4.0.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
@ -121,13 +123,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
@ -140,7 +152,9 @@
<profile>
<id>hadoop-3.0</id>
<activation>
<property><name>!hadoop.profile</name></property>
<property>
<name>!hadoop.profile</name>
</property>
</activation>
<dependencies>
<dependency>

View File

@ -253,13 +253,23 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -166,13 +166,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

139
pom.xml
View File

@ -865,7 +865,7 @@
</goals>
<configuration>
<excludes>
<exclude>log4j.properties</exclude>
<exclude>log4j2.xml</exclude>
</excludes>
</configuration>
</execution>
@ -1179,6 +1179,42 @@
</rules>
</configuration>
</execution>
<execution>
<id>banned-log4j</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<bannedDependencies>
<excludes>
<exclude>log4j:log4j</exclude>
</excludes>
<message>
We do not allow log4j dependencies as now we use log4j2
</message>
</bannedDependencies>
</rules>
</configuration>
</execution>
<execution>
<id>banned-slf4j-log4j12</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<bannedDependencies>
<excludes>
<exclude>org.slf4j:slf4j-log4j12</exclude>
</excludes>
<message>
We do not allow slf4j-log4j12 dependency as now we use log4j-slf4j-impl
</message>
</bannedDependencies>
</rules>
</configuration>
</execution>
<execution>
<id>banned-jetty</id>
<goals>
@ -1267,16 +1303,18 @@
<reason>Use SLF4j for logging</reason>
<bannedImports>
<bannedImport>org.apache.commons.logging.**</bannedImport>
<bannedImport>org.apache.log4j.**</bannedImport>
<bannedImport>org.apache.logging.log4j.**</bannedImport>
</bannedImports>
</restrictImports>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>false</includeTestCode>
<commentLineBufferSize>512</commentLineBufferSize>
<reason>
Do not use log4j directly in code, see Log4jUtils in hbase-logging for more details.
Do not use log4j2 directly in code, see Log4jUtils in hbase-logging for more details.
</reason>
<bannedImports>
<bannedImport>org.apache.log4j.**</bannedImport>
<bannedImport>org.apache.logging.log4j.**</bannedImport>
</bannedImports>
</restrictImports>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
@ -1653,7 +1691,7 @@
<junit.version>4.13</junit.version>
<hamcrest.version>1.3</hamcrest.version>
<htrace.version>4.2.0-incubating</htrace.version>
<log4j.version>1.2.17</log4j.version>
<log4j2.version>2.14.1</log4j2.version>
<mockito-core.version>2.28.2</mockito-core.version>
<protobuf.plugin.version>0.6.1</protobuf.plugin.version>
<thrift.path>thrift</thrift.path>
@ -2050,8 +2088,8 @@
</dependency>
<!--
Logging dependencies. In general, we use slf4j as the log facade in HBase, so all sub
modules should depend on slf4j-api at compile scope, and then depend on slf4j-log4j12
and log4j at test scope(and in hbase-assembly when shipping the binary) to redirect the
modules should depend on slf4j-api at compile scope, and then depend on log4j-slf4j-impl
and log4j2 at test scope(and in hbase-assembly when shipping the binary) to redirect the
log message to log4j. Do not introduce logging dependencies other than slf4j-api at compile
scope as it will mess up the logging framework for downstream users.
Here we also depend on jcl-over-slf4j and jul-to-slf4j, as some of the libraries we depend
@ -2063,16 +2101,12 @@
<artifactId>jettison</artifactId>
<version>${jettison.version}</version>
</dependency>
<!-- Logging -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
@ -2084,9 +2118,24 @@
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<version>${log4j2.version}</version>
</dependency>
<!-- Avro dependencies we mostly get transitively, manual version coallescing -->
<dependency>
@ -2094,8 +2143,6 @@
<artifactId>avro</artifactId>
<version>${avro.version}</version>
</dependency>
<!--This is not used by hbase directly. Used by thrift,
dropwizard and zk.-->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
@ -2771,6 +2818,46 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-app</artifactId>
<version>${hadoop-three.version}</version>
<type>test-jar</type>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
@ -2792,10 +2879,6 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -2821,10 +2904,6 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -3051,6 +3130,14 @@
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -3126,10 +3213,6 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>