From 5844b53dea9b279275538db879fdefa4bc214bfe Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Sat, 12 Mar 2022 03:17:43 +0800 Subject: [PATCH] HBASE-26802 Backport the log4j2 changes to branch-2 (#4166) Signed-off-by: Andrew Purtell Conflicts: hbase-hadoop-compat/pom.xml hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml hbase-shaded/hbase-shaded-client/pom.xml hbase-shaded/hbase-shaded-mapreduce/pom.xml hbase-shaded/hbase-shaded-testing-util/pom.xml hbase-shaded/pom.xml hbase-testing-util/pom.xml --- bin/hbase | 16 +- bin/hbase.cmd | 2 + conf/hbase-env.cmd | 5 +- conf/hbase-env.sh | 4 +- conf/log4j-hbtop.properties | 27 - conf/log4j.properties | 139 --- conf/log4j2-hbtop.properties | 35 + conf/log4j2.properties | 137 +++ hbase-archetypes/hbase-client-project/pom.xml | 18 +- .../src/main/resources/log4j.properties | 121 -- .../src/main/resources/log4j2.properties | 137 +++ .../hbase-shaded-client-project/pom.xml | 18 +- .../src/main/resources/log4j.properties | 121 -- .../src/main/resources/log4j2.properties | 137 +++ hbase-assembly/pom.xml | 16 +- hbase-assembly/src/main/assembly/client.xml | 16 +- .../src/main/assembly/hadoop-three-compat.xml | 16 +- .../src/main/assembly/hadoop-two-compat.xml | 12 +- hbase-asyncfs/pom.xml | 18 +- .../hbase/io/asyncfs/AsyncFSTestBase.java | 5 - hbase-client/pom.xml | 13 +- .../hbase/ipc/TestFailedServersLog.java | 67 +- .../security/TestHBaseSaslRpcClient.java | 14 +- hbase-common/pom.xml | 13 +- .../hadoop/hbase/logging/TestJul2Slf4j.java | 48 +- .../hadoop/hbase/logging/TestLog4jUtils.java | 39 +- .../hbase-compression-aircompressor/pom.xml | 18 +- .../hbase-compression-lz4/pom.xml | 18 +- .../hbase-compression-snappy/pom.xml | 18 +- .../hbase-compression-xz/pom.xml | 18 +- .../hbase-compression-zstd/pom.xml | 18 +- hbase-endpoint/pom.xml | 18 +- hbase-examples/pom.xml | 18 +- hbase-hadoop-compat/pom.xml | 255 ++-- hbase-hadoop2-compat/pom.xml | 18 +- hbase-hbtop/pom.xml | 13 +- hbase-http/pom.xml | 18 +- .../hadoop/hbase/http/log/LogLevel.java | 4 +- .../hadoop/hbase/http/log/TestLogLevel.java | 212 ++-- hbase-it/pom.xml | 18 +- hbase-logging/pom.xml | 23 +- .../hadoop/hbase/AsyncConsoleAppender.java | 46 - .../hbase/logging/InternalLog4jUtils.java | 58 +- .../java/org/apache/log4j/FileAppender.java | 288 +++++ .../src/test/resources/log4j.properties | 68 -- .../src/test/resources/log4j2.properties | 68 ++ hbase-mapreduce/pom.xml | 18 +- .../hadoop/hbase/util/LoadTestTool.java | 17 +- hbase-metrics-api/pom.xml | 13 +- hbase-metrics/pom.xml | 13 +- hbase-procedure/pom.xml | 13 +- hbase-protocol-shaded/pom.xml | 5 +- hbase-replication/pom.xml | 13 +- hbase-rest/pom.xml | 18 +- hbase-rsgroup/pom.xml | 18 +- hbase-server/pom.xml | 18 +- .../hadoop/hbase/HBaseTestingUtility.java | 1 - .../TestAsyncTableBatchRetryImmediately.java | 11 +- .../hbase/client/TestMultiRespectsLimits.java | 13 +- .../hadoop/hbase/ipc/TestProtoBufRpc.java | 7 +- .../hbase/ipc/TestRpcServerTraceLogging.java | 24 +- .../hbase/regionserver/TestHRegion.java | 2 +- .../regionserver/TestMultiLogThreshold.java | 64 +- .../TestRegionServerReportForDuty.java | 34 +- .../PerfTestCompactionPolicies.java | 26 +- .../hadoop/hbase/tool/TestCanaryTool.java | 162 +-- .../hbase-shaded-check-invariants/pom.xml | 19 +- .../hbase-shaded-client-byo-hadoop/pom.xml | 294 ++--- hbase-shaded/hbase-shaded-client/pom.xml | 152 ++- hbase-shaded/hbase-shaded-mapreduce/pom.xml | 614 ++++------ .../hbase-shaded-testing-util-tester/pom.xml | 57 +- .../hbase-shaded-testing-util/pom.xml | 332 +++-- .../pom.xml | 18 +- hbase-shaded/pom.xml | 1073 +++++++++-------- hbase-shell/pom.xml | 18 +- hbase-testing-util/pom.xml | 557 ++++----- hbase-thrift/pom.xml | 18 +- hbase-zookeeper/pom.xml | 13 +- pom.xml | 122 +- 79 files changed, 3383 insertions(+), 2803 deletions(-) delete mode 100644 conf/log4j-hbtop.properties delete mode 100644 conf/log4j.properties create mode 100644 conf/log4j2-hbtop.properties create mode 100644 conf/log4j2.properties delete mode 100644 hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties create mode 100644 hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties delete mode 100644 hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties create mode 100644 hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties delete mode 100644 hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java create mode 100644 hbase-logging/src/test/java/org/apache/log4j/FileAppender.java delete mode 100644 hbase-logging/src/test/resources/log4j.properties create mode 100644 hbase-logging/src/test/resources/log4j2.properties diff --git a/bin/hbase b/bin/hbase index 75aa81b7c3a..4489a3f3391 100755 --- a/bin/hbase +++ b/bin/hbase @@ -305,10 +305,13 @@ else # make it easier to check for shaded/not later on. shaded_jar="" fi +# here we will add slf4j-api, commons-logging, jul-to-slf4j, jcl-over-slf4j +# to classpath, as they are all logging bridges. Only exclude log4j* so we +# will not actually log anything out. Add it later if necessary for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \ - [ "${f}" != "htrace-core.jar$" ] && \ - [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then + [[ "${f}" != "htrace-core.jar$" ]] && \ + [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then CLASSPATH="${CLASSPATH}:${f}" fi done @@ -671,7 +674,7 @@ elif [ "$COMMAND" = "mapredcp" ] ; then for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \ [ "${f}" != "htrace-core.jar$" ] && \ - [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then + [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then echo -n ":${f}" fi done @@ -720,8 +723,8 @@ elif [ "$COMMAND" = "hbtop" ] ; then done fi - if [ -f "${HBASE_HOME}/conf/log4j-hbtop.properties" ] ; then - HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j.configuration=file:${HBASE_HOME}/conf/log4j-hbtop.properties" + if [ -f "${HBASE_HOME}/conf/log4j2-hbtop.properties" ] ; then + HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j2.configurationFile=file:${HBASE_HOME}/conf/log4j2-hbtop.properties" fi HBASE_OPTS="${HBASE_OPTS} ${HBASE_HBTOP_OPTS}" else @@ -810,10 +813,9 @@ fi HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX" # by now if we're running a command it means we need logging -for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do +for f in ${HBASE_HOME}/lib/client-facing-thirdparty/log4j*.jar; do if [ -f "${f}" ]; then CLASSPATH="${CLASSPATH}:${f}" - break fi done diff --git a/bin/hbase.cmd b/bin/hbase.cmd index 3b569099090..240b63c7ec7 100644 --- a/bin/hbase.cmd +++ b/bin/hbase.cmd @@ -332,6 +332,7 @@ set HBASE_OPTS=%HBASE_OPTS% -Djava.util.logging.config.class="org.apache.hadoop. if not defined HBASE_ROOT_LOGGER ( set HBASE_ROOT_LOGGER=INFO,console ) + set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger="%HBASE_ROOT_LOGGER%" if defined JAVA_LIBRARY_PATH ( @@ -348,6 +349,7 @@ if not defined HBASE_SECURITY_LOGGER ( set HBASE_SECURITY_LOGGER=INFO,DRFAS ) ) + set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger="%HBASE_SECURITY_LOGGER%" set HEAP_SETTINGS=%JAVA_HEAP_MAX% %JAVA_OFFHEAP_MAX% diff --git a/conf/hbase-env.cmd b/conf/hbase-env.cmd index 4beebf646de..84519d5606d 100644 --- a/conf/hbase-env.cmd +++ b/conf/hbase-env.cmd @@ -32,7 +32,7 @@ @rem set HBASE_OFFHEAPSIZE=1000 @rem For example, to allocate 8G of offheap, to 8G: -@rem etHBASE_OFFHEAPSIZE=8G +@rem set HBASE_OFFHEAPSIZE=8G @rem Extra Java runtime options. @rem Below are what we set by default. May only work with SUN JVM. @@ -82,6 +82,9 @@ set HBASE_OPTS=%HBASE_OPTS% "-XX:+UseConcMarkSweepGC" "-Djava.net.preferIPv4Stac @rem Tell HBase whether it should manage it's own instance of ZooKeeper or not. @rem set HBASE_MANAGES_ZK=true +@rem Tell HBase the logger level and appenders +@rem set HBASE_ROOT_LOGGER=INFO,DRFA + @rem Uncomment to enable trace, you can change the options to use other exporters such as jaeger or @rem zipkin. See https://github.com/open-telemetry/opentelemetry-java-instrumentation on how to @rem configure exporters and other components through system properties. diff --git a/conf/hbase-env.sh b/conf/hbase-env.sh index ee71a0ab56d..e049fd6d853 100644 --- a/conf/hbase-env.sh +++ b/conf/hbase-env.sh @@ -126,11 +126,11 @@ # export HBASE_MANAGES_ZK=true # The default log rolling policy is RFA, where the log file is rolled as per the size defined for the -# RFA appender. Please refer to the log4j.properties file to see more details on this appender. +# RFA appender. Please refer to the log4j2.properties file to see more details on this appender. # In case one needs to do log rolling on a date change, one should set the environment property # HBASE_ROOT_LOGGER to ",DRFA". # For example: -# HBASE_ROOT_LOGGER=INFO,DRFA +# export HBASE_ROOT_LOGGER=INFO,DRFA # The reason for changing default to RFA is to avoid the boundary case of filling out disk space as # DRFA doesn't put any cap on the log size. Please refer to HBase-5655 for more context. diff --git a/conf/log4j-hbtop.properties b/conf/log4j-hbtop.properties deleted file mode 100644 index 4d68d79db70..00000000000 --- a/conf/log4j-hbtop.properties +++ /dev/null @@ -1,27 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=WARN,console -log4j.threshold=WARN - -# console -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# ZooKeeper will still put stuff at WARN -log4j.logger.org.apache.zookeeper=ERROR diff --git a/conf/log4j.properties b/conf/log4j.properties deleted file mode 100644 index 2282fa5d4a3..00000000000 --- a/conf/log4j.properties +++ /dev/null @@ -1,139 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.security.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log -hbase.log.level=INFO - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n - -# Rolling File Appender properties -hbase.log.maxfilesize=256MB -hbase.log.maxbackupindex=20 - -# Rolling File Appender -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file} - -log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize} -log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex} - -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n - -# -# Security audit appender -# -hbase.security.log.file=SecurityAuth.audit -hbase.security.log.maxfilesize=256MB -hbase.security.log.maxbackupindex=20 -log4j.appender.RFAS=org.apache.log4j.RollingFileAppender -log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file} -log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize} -log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex} -log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout -log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %.1000m%n -log4j.category.SecurityLogger=${hbase.security.logger} -log4j.additivity.SecurityLogger=false -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE - -# -# Null Appender -# -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n - -log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender -log4j.appender.asyncconsole.target=System.err - -# Custom Logging levels - -log4j.logger.org.apache.zookeeper=${hbase.log.level} -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG -log4j.logger.org.apache.hadoop.hbase=${hbase.log.level} -log4j.logger.org.apache.hadoop.hbase.META=${hbase.log.level} -# Make these two classes INFO-level. Make them DEBUG to see more zk debug. -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=${hbase.log.level} -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=${hbase.log.level} -#log4j.logger.org.apache.hadoop.dfs=DEBUG -# Set this class to log INFO only otherwise its OTT -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE - - -# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output) -#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG - -# Uncomment the below if you want to remove logging of client region caching' -# and scan of hbase:meta messages -# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=INFO - -# EventCounter -# Add "EventCounter" to rootlogger if you want to use this -# Uncomment the line below to add EventCounter information -# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter - -# Prevent metrics subsystem start/stop messages (HBASE-17722) -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN - -# Disable request log by default, you can enable this by changing the appender -log4j.category.http.requests=INFO,NullAppender -log4j.additivity.http.requests=false -# Replace the above with this configuration if you want an http access.log -#log4j.appender.accessRFA=org.apache.log4j.RollingFileAppender -#log4j.appender.accessRFA.File=/var/log/hbase/access.log -#log4j.appender.accessRFA.layout=org.apache.log4j.PatternLayout -#log4j.appender.accessRFA.layout.ConversionPattern=%m%n -#log4j.appender.accessRFA.MaxFileSize=200MB -#log4j.appender.accessRFA.MaxBackupIndex=10 -# route http.requests to the accessRFA appender -#log4j.logger.http.requests=INFO,accessRFA -# disable http.requests.* entries going up to the root logger -#log4j.additivity.http.requests=false diff --git a/conf/log4j2-hbtop.properties b/conf/log4j2-hbtop.properties new file mode 100644 index 00000000000..de2f97641da --- /dev/null +++ b/conf/log4j2-hbtop.properties @@ -0,0 +1,35 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# console +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %m%n + +rootLogger = WARN,console + +# ZooKeeper will still put stuff at WARN +logger.zookeeper.name = org.apache.zookeeper +logger.zookeeper.level = ERROR + diff --git a/conf/log4j2.properties b/conf/log4j2.properties new file mode 100644 index 00000000000..5ffcfda2417 --- /dev/null +++ b/conf/log4j2.properties @@ -0,0 +1,137 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# Console appender +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n + +# Daily Rolling File Appender +appender.DRFA.type = RollingFile +appender.DRFA.name = DRFA +appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd} +appender.DRFA.createOnDemand = true +appender.DRFA.layout.type = PatternLayout +appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.DRFA.policies.type = Policies +appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy +appender.DRFA.policies.time.interval = 1 +appender.DRFA.policies.time.modulate = true +appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy +appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.DRFA.strategy.type = DefaultRolloverStrategy +appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Rolling File Appender +appender.RFA.type = RollingFile +appender.RFA.name = RFA +appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i +appender.RFA.createOnDemand = true +appender.RFA.layout.type = PatternLayout +appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFA.policies.type = Policies +appender.RFA.policies.size.type = SizeBasedTriggeringPolicy +appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.RFA.strategy.type = DefaultRolloverStrategy +appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Security Audit Appender +appender.RFAS.type = RollingFile +appender.RFAS.name = RFAS +appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit} +appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i +appender.RFAS.createOnDemand = true +appender.RFAS.layout.type = PatternLayout +appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFAS.policies.type = Policies +appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy +appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB} +appender.RFAS.strategy.type = DefaultRolloverStrategy +appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20} + +# Http Access Log RFA, uncomment this if you want an http access.log +# appender.AccessRFA.type = RollingFile +# appender.AccessRFA.name = AccessRFA +# appender.AccessRFA.fileName = /var/log/hbase/access.log +# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i +# appender.AccessRFA.createOnDemand = true +# appender.AccessRFA.layout.type = PatternLayout +# appender.AccessRFA.layout.pattern = %m%n +# appender.AccessRFA.policies.type = Policies +# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy +# appender.AccessRFA.policies.size.size = 200MB +# appender.AccessRFA.strategy.type = DefaultRolloverStrategy +# appender.AccessRFA.strategy.max = 10 + +# Null Appender +appender.NullAppender.type = Null +appender.NullAppender.name = NullAppender + +rootLogger = ${sys:hbase.root.logger:-INFO,console} + +logger.SecurityLogger.name = SecurityLogger +logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console} +logger.SecurityLogger.additivity = false + +# Custom Logging levels +# logger.zookeeper.name = org.apache.zookeeper +# logger.zookeeper.level = ERROR + +# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem +# logger.FSNamesystem.level = DEBUG + +# logger.hbase.name = org.apache.hadoop.hbase +# logger.hbase.level = DEBUG + +# logger.META.name = org.apache.hadoop.hbase.META +# logger.META.level = DEBUG + +# Make these two classes below DEBUG to see more zk debug. +# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil +# logger.ZKUtil.level = DEBUG + +# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher +# logger.ZKWatcher.level = DEBUG + +# logger.dfs.name = org.apache.hadoop.dfs +# logger.dfs.level = DEBUG + +# Prevent metrics subsystem start/stop messages (HBASE-17722) +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapte.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +# Disable request log by default, you can enable this by changing the appender +logger.http.name = http.requests +logger.http.additivity = false +logger.http = INFO,NullAppender +# Replace the above with this configuration if you want an http access.log +# logger.http = INFO,AccessRFA diff --git a/hbase-archetypes/hbase-client-project/pom.xml b/hbase-archetypes/hbase-client-project/pom.xml index 9e20d7fd0b1..08630ae8ee5 100644 --- a/hbase-archetypes/hbase-client-project/pom.xml +++ b/hbase-archetypes/hbase-client-project/pom.xml @@ -64,13 +64,23 @@ runtime - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api runtime - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + runtime + + + org.apache.logging.log4j + log4j-slf4j-impl + runtime + + + org.apache.logging.log4j + log4j-1.2-api runtime diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties deleted file mode 100644 index 0b01e57e6ea..00000000000 --- a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties +++ /dev/null @@ -1,121 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.security.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Rolling File Appender properties -hbase.log.maxfilesize=256MB -hbase.log.maxbackupindex=20 - -# Rolling File Appender -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file} - -log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize} -log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex} - -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# -# Security audit appender -# -hbase.security.log.file=SecurityAuth.audit -hbase.security.log.maxfilesize=256MB -hbase.security.log.maxbackupindex=20 -log4j.appender.RFAS=org.apache.log4j.RollingFileAppender -log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file} -log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize} -log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex} -log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout -log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -log4j.category.SecurityLogger=${hbase.security.logger} -log4j.additivity.SecurityLogger=false -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE - -# -# Null Appender -# -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Custom Logging levels - -log4j.logger.org.apache.zookeeper=INFO -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG -log4j.logger.org.apache.hadoop.hbase=INFO -# Make these two classes INFO-level. Make them DEBUG to see more zk debug. -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO -#log4j.logger.org.apache.hadoop.dfs=DEBUG -# Set this class to log INFO only otherwise its OTT -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE - - -# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output) -#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG - -# Uncomment the below if you want to remove logging of client region caching' -# and scan of hbase:meta messages -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO -# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO - -# EventCounter -# Add "EventCounter" to rootlogger if you want to use this -# Uncomment the line below to add EventCounter information -# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter - -# Prevent metrics subsystem start/stop messages (HBASE-17722) -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties new file mode 100644 index 00000000000..5ffcfda2417 --- /dev/null +++ b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties @@ -0,0 +1,137 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# Console appender +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n + +# Daily Rolling File Appender +appender.DRFA.type = RollingFile +appender.DRFA.name = DRFA +appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd} +appender.DRFA.createOnDemand = true +appender.DRFA.layout.type = PatternLayout +appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.DRFA.policies.type = Policies +appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy +appender.DRFA.policies.time.interval = 1 +appender.DRFA.policies.time.modulate = true +appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy +appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.DRFA.strategy.type = DefaultRolloverStrategy +appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Rolling File Appender +appender.RFA.type = RollingFile +appender.RFA.name = RFA +appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i +appender.RFA.createOnDemand = true +appender.RFA.layout.type = PatternLayout +appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFA.policies.type = Policies +appender.RFA.policies.size.type = SizeBasedTriggeringPolicy +appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.RFA.strategy.type = DefaultRolloverStrategy +appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Security Audit Appender +appender.RFAS.type = RollingFile +appender.RFAS.name = RFAS +appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit} +appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i +appender.RFAS.createOnDemand = true +appender.RFAS.layout.type = PatternLayout +appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFAS.policies.type = Policies +appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy +appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB} +appender.RFAS.strategy.type = DefaultRolloverStrategy +appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20} + +# Http Access Log RFA, uncomment this if you want an http access.log +# appender.AccessRFA.type = RollingFile +# appender.AccessRFA.name = AccessRFA +# appender.AccessRFA.fileName = /var/log/hbase/access.log +# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i +# appender.AccessRFA.createOnDemand = true +# appender.AccessRFA.layout.type = PatternLayout +# appender.AccessRFA.layout.pattern = %m%n +# appender.AccessRFA.policies.type = Policies +# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy +# appender.AccessRFA.policies.size.size = 200MB +# appender.AccessRFA.strategy.type = DefaultRolloverStrategy +# appender.AccessRFA.strategy.max = 10 + +# Null Appender +appender.NullAppender.type = Null +appender.NullAppender.name = NullAppender + +rootLogger = ${sys:hbase.root.logger:-INFO,console} + +logger.SecurityLogger.name = SecurityLogger +logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console} +logger.SecurityLogger.additivity = false + +# Custom Logging levels +# logger.zookeeper.name = org.apache.zookeeper +# logger.zookeeper.level = ERROR + +# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem +# logger.FSNamesystem.level = DEBUG + +# logger.hbase.name = org.apache.hadoop.hbase +# logger.hbase.level = DEBUG + +# logger.META.name = org.apache.hadoop.hbase.META +# logger.META.level = DEBUG + +# Make these two classes below DEBUG to see more zk debug. +# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil +# logger.ZKUtil.level = DEBUG + +# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher +# logger.ZKWatcher.level = DEBUG + +# logger.dfs.name = org.apache.hadoop.dfs +# logger.dfs.level = DEBUG + +# Prevent metrics subsystem start/stop messages (HBASE-17722) +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapte.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +# Disable request log by default, you can enable this by changing the appender +logger.http.name = http.requests +logger.http.additivity = false +logger.http = INFO,NullAppender +# Replace the above with this configuration if you want an http access.log +# logger.http = INFO,AccessRFA diff --git a/hbase-archetypes/hbase-shaded-client-project/pom.xml b/hbase-archetypes/hbase-shaded-client-project/pom.xml index 16ea70faa80..29f46050688 100644 --- a/hbase-archetypes/hbase-shaded-client-project/pom.xml +++ b/hbase-archetypes/hbase-shaded-client-project/pom.xml @@ -70,13 +70,23 @@ runtime - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api runtime - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + runtime + + + org.apache.logging.log4j + log4j-slf4j-impl + runtime + + + org.apache.logging.log4j + log4j-1.2-api runtime diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties deleted file mode 100644 index 0b01e57e6ea..00000000000 --- a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties +++ /dev/null @@ -1,121 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.security.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Rolling File Appender properties -hbase.log.maxfilesize=256MB -hbase.log.maxbackupindex=20 - -# Rolling File Appender -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file} - -log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize} -log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex} - -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# -# Security audit appender -# -hbase.security.log.file=SecurityAuth.audit -hbase.security.log.maxfilesize=256MB -hbase.security.log.maxbackupindex=20 -log4j.appender.RFAS=org.apache.log4j.RollingFileAppender -log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file} -log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize} -log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex} -log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout -log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -log4j.category.SecurityLogger=${hbase.security.logger} -log4j.additivity.SecurityLogger=false -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE - -# -# Null Appender -# -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Custom Logging levels - -log4j.logger.org.apache.zookeeper=INFO -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG -log4j.logger.org.apache.hadoop.hbase=INFO -# Make these two classes INFO-level. Make them DEBUG to see more zk debug. -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO -#log4j.logger.org.apache.hadoop.dfs=DEBUG -# Set this class to log INFO only otherwise its OTT -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE - - -# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output) -#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG - -# Uncomment the below if you want to remove logging of client region caching' -# and scan of hbase:meta messages -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO -# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO - -# EventCounter -# Add "EventCounter" to rootlogger if you want to use this -# Uncomment the line below to add EventCounter information -# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter - -# Prevent metrics subsystem start/stop messages (HBASE-17722) -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties new file mode 100644 index 00000000000..5ffcfda2417 --- /dev/null +++ b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties @@ -0,0 +1,137 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# Console appender +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n + +# Daily Rolling File Appender +appender.DRFA.type = RollingFile +appender.DRFA.name = DRFA +appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd} +appender.DRFA.createOnDemand = true +appender.DRFA.layout.type = PatternLayout +appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.DRFA.policies.type = Policies +appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy +appender.DRFA.policies.time.interval = 1 +appender.DRFA.policies.time.modulate = true +appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy +appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.DRFA.strategy.type = DefaultRolloverStrategy +appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Rolling File Appender +appender.RFA.type = RollingFile +appender.RFA.name = RFA +appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i +appender.RFA.createOnDemand = true +appender.RFA.layout.type = PatternLayout +appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFA.policies.type = Policies +appender.RFA.policies.size.type = SizeBasedTriggeringPolicy +appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.RFA.strategy.type = DefaultRolloverStrategy +appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Security Audit Appender +appender.RFAS.type = RollingFile +appender.RFAS.name = RFAS +appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit} +appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i +appender.RFAS.createOnDemand = true +appender.RFAS.layout.type = PatternLayout +appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFAS.policies.type = Policies +appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy +appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB} +appender.RFAS.strategy.type = DefaultRolloverStrategy +appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20} + +# Http Access Log RFA, uncomment this if you want an http access.log +# appender.AccessRFA.type = RollingFile +# appender.AccessRFA.name = AccessRFA +# appender.AccessRFA.fileName = /var/log/hbase/access.log +# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i +# appender.AccessRFA.createOnDemand = true +# appender.AccessRFA.layout.type = PatternLayout +# appender.AccessRFA.layout.pattern = %m%n +# appender.AccessRFA.policies.type = Policies +# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy +# appender.AccessRFA.policies.size.size = 200MB +# appender.AccessRFA.strategy.type = DefaultRolloverStrategy +# appender.AccessRFA.strategy.max = 10 + +# Null Appender +appender.NullAppender.type = Null +appender.NullAppender.name = NullAppender + +rootLogger = ${sys:hbase.root.logger:-INFO,console} + +logger.SecurityLogger.name = SecurityLogger +logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console} +logger.SecurityLogger.additivity = false + +# Custom Logging levels +# logger.zookeeper.name = org.apache.zookeeper +# logger.zookeeper.level = ERROR + +# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem +# logger.FSNamesystem.level = DEBUG + +# logger.hbase.name = org.apache.hadoop.hbase +# logger.hbase.level = DEBUG + +# logger.META.name = org.apache.hadoop.hbase.META +# logger.META.level = DEBUG + +# Make these two classes below DEBUG to see more zk debug. +# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil +# logger.ZKUtil.level = DEBUG + +# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher +# logger.ZKWatcher.level = DEBUG + +# logger.dfs.name = org.apache.hadoop.dfs +# logger.dfs.level = DEBUG + +# Prevent metrics subsystem start/stop messages (HBASE-17722) +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapte.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +# Disable request log by default, you can enable this by changing the appender +logger.http.name = http.requests +logger.http.additivity = false +logger.http = INFO,NullAppender +# Replace the above with this configuration if you want an http access.log +# logger.http = INFO,AccessRFA diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml index 51385d0c90f..5c68d9822f5 100644 --- a/hbase-assembly/pom.xml +++ b/hbase-assembly/pom.xml @@ -352,12 +352,16 @@ jul-to-slf4j - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + + + org.apache.logging.log4j + log4j-slf4j-impl @@ -365,6 +369,10 @@ opentelemetry-javaagent all + + org.apache.logging.log4j + log4j-1.2-api + diff --git a/hbase-assembly/src/main/assembly/client.xml b/hbase-assembly/src/main/assembly/client.xml index 41aeea577eb..2a8e669ae49 100644 --- a/hbase-assembly/src/main/assembly/client.xml +++ b/hbase-assembly/src/main/assembly/client.xml @@ -54,19 +54,14 @@ jline:jline com.github.stephenc.findbugs:findbugs-annotations commons-logging:commons-logging - log4j:log4j - ch.qos.reload4j:reload4j org.apache.hbase:hbase-shaded-client org.apache.hbase:hbase-shaded-client-byo-hadoop org.apache.hbase:hbase-shaded-mapreduce org.apache.htrace:htrace-core4 org.apache.htrace:htrace-core org.apache.yetus:audience-annotations - org.slf4j:slf4j-api - org.slf4j:jcl-over-slf4j - org.slf4j:jul-to-slf4j - org.slf4j:slf4j-log4j12 - org.slf4j:slf4j-reload4j + org.slf4j:* + org.apache.logging.log4j:* io.opentelemetry.javaagent:* @@ -148,14 +143,11 @@ com.github.stephenc.findbugs:findbugs-annotations commons-logging:commons-logging - ch.qos.reload4j:reload4j org.apache.htrace:htrace-core4 org.apache.htrace:htrace-core org.apache.yetus:audience-annotations - org.slf4j:slf4j-api - org.slf4j:jcl-over-slf4j - org.slf4j:jul-to-slf4j - org.slf4j:slf4j-reload4j + org.slf4j:* + org.apache.logging.log4j:* io.opentelemetry:* diff --git a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml index d24f5822427..a1bbcb8ab86 100644 --- a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml +++ b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml @@ -49,11 +49,9 @@ org.apache.hbase:hbase-metrics org.apache.hbase:hbase-metrics-api org.apache.hbase:hbase-procedure - org.apache.hbase:hbase-protocol org.apache.hbase:hbase-protocol-shaded org.apache.hbase:hbase-replication org.apache.hbase:hbase-rest - org.apache.hbase:hbase-rsgroup org.apache.hbase:hbase-server org.apache.hbase:hbase-shell org.apache.hbase:hbase-testing-util @@ -106,14 +104,11 @@ org.apache.hbase:hbase-shaded-mapreduce com.github.stephenc.findbugs:findbugs-annotations commons-logging:commons-logging - log4j:log4j - ch.qos.reload4j:reload4j org.apache.htrace:htrace-core4 org.apache.htrace:htrace-core org.apache.yetus:audience-annotations - org.slf4j:slf4j-api - org.slf4j:slf4j-log4j12 - org.slf4j:slf4j-reload4j + org.slf4j:* + org.apache.logging.log4j:* io.opentelemetry.javaagent:* @@ -207,14 +202,11 @@ com.github.stephenc.findbugs:findbugs-annotations commons-logging:commons-logging - ch.qos.reload4j:reload4j org.apache.htrace:htrace-core4 org.apache.htrace:htrace-core org.apache.yetus:audience-annotations - org.slf4j:slf4j-api - org.slf4j:jcl-over-slf4j - org.slf4j:jul-to-slf4j - org.slf4j:slf4j-reload4j + org.slf4j:* + org.apache.logging.log4j:* io.opentelemetry:* diff --git a/hbase-assembly/src/main/assembly/hadoop-two-compat.xml b/hbase-assembly/src/main/assembly/hadoop-two-compat.xml index 1845e87c5ec..53edfdfa7a2 100644 --- a/hbase-assembly/src/main/assembly/hadoop-two-compat.xml +++ b/hbase-assembly/src/main/assembly/hadoop-two-compat.xml @@ -103,8 +103,6 @@ com.sun.jersey:* commons-logging:commons-logging jline:jline - log4j:log4j - ch.qos.reload4j:reload4j org.apache.hbase:hbase-shaded-client-byo-hadoop org.apache.hbase:hbase-shaded-client org.apache.hbase:hbase-shaded-mapreduce @@ -112,9 +110,8 @@ org.apache.htrace:htrace-core org.apache.yetus:audience-annotations org.jruby:jruby-complete - org.slf4j:slf4j-api - org.slf4j:slf4j-log4j12 - org.slf4j:slf4j-reload4j + org.slf4j:* + org.apache.logging.log4j:* io.opentelemetry.javaagent:* @@ -208,12 +205,11 @@ com.github.stephenc.findbugs:findbugs-annotations commons-logging:commons-logging - ch.qos.reload4j:reload4j org.apache.htrace:htrace-core4 org.apache.htrace:htrace-core org.apache.yetus:audience-annotations - org.slf4j:slf4j-api - org.slf4j:slf4j-reload4j + org.slf4j:* + org.apache.logging.log4j:* io.opentelemetry:* diff --git a/hbase-asyncfs/pom.xml b/hbase-asyncfs/pom.xml index 3a48250320f..99a325d26e8 100644 --- a/hbase-asyncfs/pom.xml +++ b/hbase-asyncfs/pom.xml @@ -149,13 +149,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java index fc148e8de79..e1bc83ca684 100644 --- a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java +++ b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java @@ -96,11 +96,6 @@ public abstract class AsyncFSTestBase { createDirsAndSetProperties(); Configuration conf = UTIL.getConfiguration(); - // Error level to skip some warnings specific to the minicluster. See HBASE-4709 - org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.util.MBeans.class) - .setLevel(org.apache.log4j.Level.ERROR); - org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class) - .setLevel(org.apache.log4j.Level.ERROR); CLUSTER = new MiniDFSCluster.Builder(conf).numDataNodes(servers).build(); CLUSTER.waitClusterUp(); diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml index 60177380a24..93432a213b1 100644 --- a/hbase-client/pom.xml +++ b/hbase-client/pom.xml @@ -177,13 +177,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java index fa44022f8d0..dc94e91f4fd 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java @@ -17,73 +17,82 @@ */ package org.apache.hadoop.hbase.ipc; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.log4j.Appender; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; -@RunWith(MockitoJUnitRunner.class) @Category({ ClientTests.class, SmallTests.class }) public class TestFailedServersLog { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFailedServersLog.class); + HBaseClassTestRule.forClass(TestFailedServersLog.class); static final int TEST_PORT = 9999; + private Address addr; - @Mock - private Appender mockAppender; - - @Captor - private ArgumentCaptor captorLoggingEvent; + private org.apache.logging.log4j.core.Appender mockAppender; @Before public void setup() { - LogManager.getRootLogger().addAppender(mockAppender); + mockAppender = mock(org.apache.logging.log4j.core.Appender.class); + when(mockAppender.getName()).thenReturn("mockAppender"); + when(mockAppender.isStarted()).thenReturn(true); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(FailedServers.class)).addAppender(mockAppender); + } @After public void teardown() { - LogManager.getRootLogger().removeAppender(mockAppender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(FailedServers.class)).removeAppender(mockAppender); } @Test public void testAddToFailedServersLogging() { - Throwable nullException = new NullPointerException(); + AtomicReference level = new AtomicReference<>(); + AtomicReference msg = new AtomicReference(); + doAnswer(new Answer() { + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + org.apache.logging.log4j.core.LogEvent logEvent = + invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class); + level.set(logEvent.getLevel()); + msg.set(logEvent.getMessage().getFormattedMessage()); + return null; + } + }).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class)); + + Throwable nullException = new NullPointerException(); FailedServers fs = new FailedServers(new Configuration()); addr = Address.fromParts("localhost", TEST_PORT); fs.addToFailedServers(addr, nullException); - Mockito.verify(mockAppender).doAppend((LoggingEvent) captorLoggingEvent.capture()); - LoggingEvent loggingEvent = (LoggingEvent) captorLoggingEvent.getValue(); - assertThat(loggingEvent.getLevel(), is(Level.DEBUG)); - assertEquals("Added failed server with address " + addr.toString() + " to list caused by " - + nullException.toString(), - loggingEvent.getRenderedMessage()); + verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class)); + assertEquals(org.apache.logging.log4j.Level.DEBUG, level.get()); + assertEquals("Added failed server with address " + addr.toString() + " to list caused by " + + nullException.toString(), msg.get()); } - } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java index 2252c215fa6..538a9b91c3c 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java @@ -30,7 +30,6 @@ import static org.mockito.Mockito.when; import java.io.IOException; import java.net.InetAddress; import java.util.Map; - import javax.security.auth.callback.Callback; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; @@ -39,7 +38,6 @@ import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.sasl.RealmCallback; import javax.security.sasl.Sasl; import javax.security.sasl.SaslClient; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -55,16 +53,15 @@ import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.Assert; -import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.base.Strings; @@ -83,17 +80,12 @@ public class TestHBaseSaslRpcClient { static final String DEFAULT_USER_NAME = "principal"; static final String DEFAULT_USER_PASSWORD = "password"; - private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class); @Rule public ExpectedException exception = ExpectedException.none(); - @BeforeClass - public static void before() { - Logger.getRootLogger().setLevel(Level.DEBUG); - } - @Test public void testSaslClientUsesGivenRpcProtection() throws Exception { Token token = createTokenMockWithCredentials(DEFAULT_USER_NAME, diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index cb7e1a55f9a..1f395ed1d37 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -235,13 +235,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java index cf654f583b8..f67ce616e2e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java @@ -17,27 +17,26 @@ */ package org.apache.hadoop.hbase.logging; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.IOException; +import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.log4j.Appender; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.mockito.ArgumentCaptor; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; /** * This should be in the hbase-logging module but the {@link HBaseClassTestRule} is in hbase-common @@ -56,27 +55,42 @@ public class TestJul2Slf4j { private String loggerName = getClass().getName(); - private Appender mockAppender; + private org.apache.logging.log4j.core.Appender mockAppender; @Before public void setUp() { - mockAppender = mock(Appender.class); - LogManager.getRootLogger().addAppender(mockAppender); + mockAppender = mock(org.apache.logging.log4j.core.Appender.class); + when(mockAppender.getName()).thenReturn("mockAppender"); + when(mockAppender.isStarted()).thenReturn(true); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(loggerName)).addAppender(mockAppender); } @After public void tearDown() { - LogManager.getRootLogger().removeAppender(mockAppender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(loggerName)).removeAppender(mockAppender); } @Test public void test() throws IOException { + AtomicReference level = new AtomicReference<>(); + AtomicReference msg = new AtomicReference(); + doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + org.apache.logging.log4j.core.LogEvent logEvent = + invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class); + level.set(logEvent.getLevel()); + msg.set(logEvent.getMessage().getFormattedMessage()); + return null; + } + }).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class)); java.util.logging.Logger logger = java.util.logging.Logger.getLogger(loggerName); logger.info(loggerName); - ArgumentCaptor captor = ArgumentCaptor.forClass(LoggingEvent.class); - verify(mockAppender, times(1)).doAppend(captor.capture()); - LoggingEvent loggingEvent = captor.getValue(); - assertThat(loggingEvent.getLevel(), is(Level.INFO)); - assertEquals(loggerName, loggingEvent.getRenderedMessage()); + verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class)); + assertEquals(org.apache.logging.log4j.Level.INFO, level.get()); + assertEquals(loggerName, msg.get()); } -} +} \ No newline at end of file diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java index 89931de7128..806107b55c6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java @@ -24,9 +24,6 @@ import java.io.IOException; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -44,23 +41,29 @@ public class TestLog4jUtils { @Test public void test() { - Logger zk = LogManager.getLogger("org.apache.zookeeper"); - Level zkLevel = zk.getEffectiveLevel(); - Logger hbaseZk = LogManager.getLogger("org.apache.hadoop.hbase.zookeeper"); - Level hbaseZkLevel = hbaseZk.getEffectiveLevel(); - Logger client = LogManager.getLogger("org.apache.hadoop.hbase.client"); - Level clientLevel = client.getEffectiveLevel(); + org.apache.logging.log4j.Logger zk = + org.apache.logging.log4j.LogManager.getLogger("org.apache.zookeeper"); + org.apache.logging.log4j.Level zkLevel = zk.getLevel(); + org.apache.logging.log4j.Logger hbaseZk = + org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.zookeeper"); + org.apache.logging.log4j.Level hbaseZkLevel = hbaseZk.getLevel(); + org.apache.logging.log4j.Logger client = + org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.client"); + org.apache.logging.log4j.Level clientLevel = client.getLevel(); Log4jUtils.disableZkAndClientLoggers(); - assertEquals(Level.OFF, zk.getLevel()); - assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(zk.getName())); - assertEquals(Level.OFF, hbaseZk.getLevel()); - assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(hbaseZk.getName())); - assertEquals(Level.OFF, client.getLevel()); - assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(client.getName())); + assertEquals(org.apache.logging.log4j.Level.OFF, zk.getLevel()); + assertEquals(org.apache.logging.log4j.Level.OFF.toString(), + Log4jUtils.getEffectiveLevel(zk.getName())); + assertEquals(org.apache.logging.log4j.Level.OFF, hbaseZk.getLevel()); + assertEquals(org.apache.logging.log4j.Level.OFF.toString(), + Log4jUtils.getEffectiveLevel(hbaseZk.getName())); + assertEquals(org.apache.logging.log4j.Level.OFF, client.getLevel()); + assertEquals(org.apache.logging.log4j.Level.OFF.toString(), + Log4jUtils.getEffectiveLevel(client.getName())); // restore the level - zk.setLevel(zkLevel); - hbaseZk.setLevel(hbaseZkLevel); - client.setLevel(clientLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(zk.getName(), zkLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(hbaseZk.getName(), hbaseZkLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(client.getName(), clientLevel); } @Test diff --git a/hbase-compression/hbase-compression-aircompressor/pom.xml b/hbase-compression/hbase-compression-aircompressor/pom.xml index f7314977b35..c5b6280eeff 100644 --- a/hbase-compression/hbase-compression-aircompressor/pom.xml +++ b/hbase-compression/hbase-compression-aircompressor/pom.xml @@ -135,13 +135,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-compression/hbase-compression-lz4/pom.xml b/hbase-compression/hbase-compression-lz4/pom.xml index a374594b14d..c891a6a2a7c 100644 --- a/hbase-compression/hbase-compression-lz4/pom.xml +++ b/hbase-compression/hbase-compression-lz4/pom.xml @@ -124,13 +124,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-compression/hbase-compression-snappy/pom.xml b/hbase-compression/hbase-compression-snappy/pom.xml index 4c69607b37f..069896dbf16 100644 --- a/hbase-compression/hbase-compression-snappy/pom.xml +++ b/hbase-compression/hbase-compression-snappy/pom.xml @@ -124,13 +124,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-compression/hbase-compression-xz/pom.xml b/hbase-compression/hbase-compression-xz/pom.xml index 2cf293be8b5..23452af8183 100644 --- a/hbase-compression/hbase-compression-xz/pom.xml +++ b/hbase-compression/hbase-compression-xz/pom.xml @@ -108,13 +108,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-compression/hbase-compression-zstd/pom.xml b/hbase-compression/hbase-compression-zstd/pom.xml index d1c2adf4981..cb416517c56 100644 --- a/hbase-compression/hbase-compression-zstd/pom.xml +++ b/hbase-compression/hbase-compression-zstd/pom.xml @@ -124,13 +124,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml index cba36477e60..37093291036 100644 --- a/hbase-endpoint/pom.xml +++ b/hbase-endpoint/pom.xml @@ -228,13 +228,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index dc1ea499dac..b6b3dfc4dc1 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -225,13 +225,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml index 48f7886790f..b5a821374e6 100644 --- a/hbase-hadoop-compat/pom.xml +++ b/hbase-hadoop-compat/pom.xml @@ -1,5 +1,7 @@ - + - 4.0.0 - - hbase-build-configuration - org.apache.hbase - 2.5.0-SNAPSHOT - ../hbase-build-configuration - + 4.0.0 + + hbase-build-configuration + org.apache.hbase + 2.5.0-SNAPSHOT + ../hbase-build-configuration + - hbase-hadoop-compat - Apache HBase - Hadoop Compatibility - + hbase-hadoop-compat + Apache HBase - Hadoop Compatibility + Interfaces to be implemented in order to smooth over hadoop version differences - + - - + + maven-assembly-plugin @@ -43,117 +45,126 @@ true - - - org.apache.maven.plugins - maven-source-plugin - - - net.revelc.code - warbucks-maven-plugin - - - + + + org.apache.maven.plugins + maven-source-plugin + + + net.revelc.code + warbucks-maven-plugin + + + - - - org.apache.hbase - hbase-annotations - test-jar - test - - - org.apache.hbase - hbase-logging - test-jar - test - - - org.apache.hbase - hbase-common - test-jar - test - - - org.apache.hbase.thirdparty - hbase-shaded-miscellaneous - - - - org.slf4j - slf4j-api - - - org.apache.hbase - hbase-metrics-api - - - junit - junit - test - - - org.slf4j - jcl-over-slf4j - test - - - org.slf4j - jul-to-slf4j - test - - - org.slf4j - slf4j-reload4j - test - - - ch.qos.reload4j - reload4j - test - - - - + + + org.apache.hbase + hbase-annotations + test-jar + test + + + org.apache.hbase + hbase-logging + test-jar + test + + + org.apache.hbase + hbase-common + test-jar + test + + + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous + + + + org.slf4j + slf4j-api + + + org.apache.hbase + hbase-metrics-api + + + junit + junit + test + + + org.slf4j + jcl-over-slf4j + test + + + org.slf4j + jul-to-slf4j + test + + + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api + test + + + - - skipHadoopCompatTests - - - skipHadoopCompatTests - - - - true - true - - - - eclipse-specific - - - m2e.version - - - - - + + skipHadoopCompatTests + + + skipHadoopCompatTests + + + + true + true + + + + eclipse-specific + + + m2e.version + + + + + - - org.eclipse.m2e - lifecycle-mapping - - - - - - - - - - - - + + org.eclipse.m2e + lifecycle-mapping + + + + + + + + + + + + diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml index ef28d01c0fb..db0b47fb9a5 100644 --- a/hbase-hadoop2-compat/pom.xml +++ b/hbase-hadoop2-compat/pom.xml @@ -180,13 +180,23 @@ limitations under the License. test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-hbtop/pom.xml b/hbase-hbtop/pom.xml index d0d461b0af3..3559382fb91 100644 --- a/hbase-hbtop/pom.xml +++ b/hbase-hbtop/pom.xml @@ -92,13 +92,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml index 3ab2dbe6de7..3e986591be4 100644 --- a/hbase-http/pom.xml +++ b/hbase-http/pom.xml @@ -246,13 +246,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java index 819581735a8..611316d9ec6 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java @@ -45,7 +45,6 @@ import org.apache.hadoop.util.HttpExceptionUtils; import org.apache.hadoop.util.ServletUtil; import org.apache.hadoop.util.Tool; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -307,8 +306,7 @@ public final class LogLevel { /** * A servlet implementation */ - @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) - @InterfaceStability.Unstable + @InterfaceAudience.Private public static class Servlet extends HttpServlet { private static final long serialVersionUID = 1L; diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java index b52129ccdbf..d7889ea4a3b 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; + import java.io.File; import java.io.IOException; import java.net.BindException; @@ -53,9 +54,6 @@ import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -65,11 +63,11 @@ import org.junit.experimental.categories.Category; /** * Test LogLevel. */ -@Category({MiscTests.class, SmallTests.class}) +@Category({ MiscTests.class, SmallTests.class }) public class TestLogLevel { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLogLevel.class); + HBaseClassTestRule.forClass(TestLogLevel.class); private static String keystoresDir; private static String sslConfDir; @@ -79,9 +77,10 @@ public class TestLogLevel { private static final String logName = TestLogLevel.class.getName(); private static final String protectedPrefix = "protected"; private static final String protectedLogName = protectedPrefix + "." + logName; - private static final Logger log = LogManager.getLogger(logName); + private static final org.apache.logging.log4j.Logger log = + org.apache.logging.log4j.LogManager.getLogger(logName); private final static String PRINCIPAL = "loglevel.principal"; - private final static String KEYTAB = "loglevel.keytab"; + private final static String KEYTAB = "loglevel.keytab"; private static MiniKdc kdc; @@ -111,8 +110,7 @@ public class TestLogLevel { } /** - * Sets up {@link MiniKdc} for testing security. - * Copied from HBaseTestingUtility#setupMiniKdc(). + * Sets up {@link MiniKdc} for testing security. Copied from HBaseTestingUtility#setupMiniKdc(). */ static private MiniKdc setupMiniKdc() throws Exception { Properties conf = MiniKdc.createConf(); @@ -130,7 +128,7 @@ public class TestLogLevel { kdc = new MiniKdc(conf, dir); kdc.start(); } catch (BindException e) { - FileUtils.deleteDirectory(dir); // clean directory + FileUtils.deleteDirectory(dir); // clean directory numTries++; if (numTries == 3) { log.error("Failed setting up MiniKDC. Tried " + numTries + " times."); @@ -156,15 +154,15 @@ public class TestLogLevel { } /** - * Get the SSL configuration. - * This method is copied from KeyStoreTestUtil#getSslConfig() in Hadoop. + * Get the SSL configuration. This method is copied from KeyStoreTestUtil#getSslConfig() in + * Hadoop. * @return {@link Configuration} instance with ssl configs loaded. * @param conf to pull client/server SSL settings filename from */ - private static Configuration getSslConfig(Configuration conf){ + private static Configuration getSslConfig(Configuration conf) { Configuration sslConf = new Configuration(false); String sslServerConfFile = conf.get(SSLFactory.SSL_SERVER_CONF_KEY); - String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY); + String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY); sslConf.addResource(sslServerConfFile); sslConf.addResource(sslClientConfFile); sslConf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile); @@ -189,36 +187,29 @@ public class TestLogLevel { public void testCommandOptions() throws Exception { final String className = this.getClass().getName(); - assertFalse(validateCommand(new String[] {"-foo" })); + assertFalse(validateCommand(new String[] { "-foo" })); // fail due to insufficient number of arguments assertFalse(validateCommand(new String[] {})); - assertFalse(validateCommand(new String[] {"-getlevel" })); - assertFalse(validateCommand(new String[] {"-setlevel" })); - assertFalse(validateCommand(new String[] {"-getlevel", "foo.bar:8080" })); + assertFalse(validateCommand(new String[] { "-getlevel" })); + assertFalse(validateCommand(new String[] { "-setlevel" })); + assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080" })); // valid command arguments - assertTrue(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className })); - assertTrue(validateCommand( - new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" })); - assertTrue(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className })); - assertTrue(validateCommand( - new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" })); + assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className })); + assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" })); + assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className })); + assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" })); // fail due to the extra argument - assertFalse(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className, "blah" })); - assertFalse(validateCommand( - new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG", "blah" })); - assertFalse(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className, "-setlevel", "foo.bar:8080", - className })); + assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "blah" })); + assertFalse( + validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG", "blah" })); + assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "-setlevel", + "foo.bar:8080", className })); } /** * Check to see if a command can be accepted. - * * @param args a String array of arguments * @return true if the command can be accepted, false if not. */ @@ -237,40 +228,32 @@ public class TestLogLevel { } /** - * Creates and starts a Jetty server binding at an ephemeral port to run - * LogLevel servlet. + * Creates and starts a Jetty server binding at an ephemeral port to run LogLevel servlet. * @param protocol "http" or "https" * @param isSpnego true if SPNEGO is enabled * @return a created HttpServer object * @throws Exception if unable to create or start a Jetty server */ - private HttpServer createServer(String protocol, boolean isSpnego) - throws Exception { - HttpServer.Builder builder = new HttpServer.Builder() - .setName("..") - .addEndpoint(new URI(protocol + "://localhost:0")) - .setFindPort(true) - .setConf(serverConf); + private HttpServer createServer(String protocol, boolean isSpnego) throws Exception { + HttpServer.Builder builder = new HttpServer.Builder().setName("..") + .addEndpoint(new URI(protocol + "://localhost:0")).setFindPort(true).setConf(serverConf); if (isSpnego) { // Set up server Kerberos credentials. // Since the server may fall back to simple authentication, // use ACL to make sure the connection is Kerberos/SPNEGO authenticated. - builder.setSecurityEnabled(true) - .setUsernameConfKey(PRINCIPAL) - .setKeytabConfKey(KEYTAB) - .setACL(new AccessControlList("client")); + builder.setSecurityEnabled(true).setUsernameConfKey(PRINCIPAL).setKeytabConfKey(KEYTAB) + .setACL(new AccessControlList("client")); } // if using HTTPS, configure keystore/truststore properties. if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) { - builder = builder. - keyPassword(sslConf.get("ssl.server.keystore.keypassword")) - .keyStore(sslConf.get("ssl.server.keystore.location"), - sslConf.get("ssl.server.keystore.password"), - sslConf.get("ssl.server.keystore.type", "jks")) - .trustStore(sslConf.get("ssl.server.truststore.location"), - sslConf.get("ssl.server.truststore.password"), - sslConf.get("ssl.server.truststore.type", "jks")); + builder = builder.keyPassword(sslConf.get("ssl.server.keystore.keypassword")) + .keyStore(sslConf.get("ssl.server.keystore.location"), + sslConf.get("ssl.server.keystore.password"), + sslConf.get("ssl.server.keystore.type", "jks")) + .trustStore(sslConf.get("ssl.server.truststore.location"), + sslConf.get("ssl.server.truststore.password"), + sslConf.get("ssl.server.truststore.type", "jks")); } HttpServer server = builder.build(); @@ -279,38 +262,38 @@ public class TestLogLevel { } private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, - final boolean isSpnego) - throws Exception { - testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, logName, Level.DEBUG.toString()); + final boolean isSpnego) throws Exception { + testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, + logName, + org.apache.logging.log4j.Level.DEBUG.toString()); } private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, - final boolean isSpnego, final String newLevel) - throws Exception { - testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, logName, newLevel); + final boolean isSpnego, final String newLevel) throws Exception { + testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, + logName, + newLevel); } /** * Run both client and server using the given protocol. - * * @param bindProtocol specify either http or https for server * @param connectProtocol specify either http or https for client * @param isSpnego true if SPNEGO is enabled * @throws Exception if client can't accesss server. */ private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, - final boolean isSpnego, final String loggerName, final String newLevel) - throws Exception { + final boolean isSpnego, final String loggerName, final String newLevel) throws Exception { if (!LogLevel.isValidProtocol(bindProtocol)) { throw new Exception("Invalid server protocol " + bindProtocol); } if (!LogLevel.isValidProtocol(connectProtocol)) { throw new Exception("Invalid client protocol " + connectProtocol); } - Logger log = LogManager.getLogger(loggerName); - Level oldLevel = log.getLevel(); + org.apache.logging.log4j.Logger log = org.apache.logging.log4j.LogManager.getLogger(loggerName); + org.apache.logging.log4j.Level oldLevel = log.getLevel(); assertNotEquals("Get default Log Level which shouldn't be ERROR.", - Level.ERROR, oldLevel); + org.apache.logging.log4j.Level.ERROR, oldLevel); // configs needed for SPNEGO at server side if (isSpnego) { @@ -331,8 +314,8 @@ public class TestLogLevel { String keytabFilePath = keyTabFile.getAbsolutePath(); - UserGroupInformation clientUGI = UserGroupInformation. - loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath); + UserGroupInformation clientUGI = + UserGroupInformation.loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath); try { clientUGI.doAs((PrivilegedExceptionAction) () -> { // client command line @@ -346,41 +329,37 @@ public class TestLogLevel { } // restore log level - GenericTestUtils.setLogLevel(log, oldLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), oldLevel); } /** - * Run LogLevel command line to start a client to get log level of this test - * class. - * + * Run LogLevel command line to start a client to get log level of this test class. * @param protocol specify either http or https * @param authority daemon's web UI address * @throws Exception if unable to connect */ private void getLevel(String protocol, String authority, String logName) throws Exception { - String[] getLevelArgs = {"-getlevel", authority, logName, "-protocol", protocol}; + String[] getLevelArgs = { "-getlevel", authority, logName, "-protocol", protocol }; CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf); cli.run(getLevelArgs); } /** - * Run LogLevel command line to start a client to set log level of this test - * class to debug. - * + * Run LogLevel command line to start a client to set log level of this test class to debug. * @param protocol specify either http or https * @param authority daemon's web UI address * @throws Exception if unable to run or log level does not change as expected */ private void setLevel(String protocol, String authority, String logName, String newLevel) - throws Exception { - String[] setLevelArgs = {"-setlevel", authority, logName, newLevel, "-protocol", protocol}; + throws Exception { + String[] setLevelArgs = { "-setlevel", authority, logName, newLevel, "-protocol", protocol }; CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf); cli.run(setLevelArgs); - Logger log = LogManager.getLogger(logName); + org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getLogger(logName); assertEquals("new level not equal to expected: ", newLevel.toUpperCase(), - log.getEffectiveLevel().toString()); + logger.getLevel().toString()); } @Test @@ -397,7 +376,6 @@ public class TestLogLevel { /** * Test setting log level to "Info". - * * @throws Exception if client can't set log level to INFO. */ @Test @@ -407,7 +385,6 @@ public class TestLogLevel { /** * Test setting log level to "Error". - * * @throws Exception if client can't set log level to ERROR. */ @Test @@ -417,18 +394,15 @@ public class TestLogLevel { /** * Server runs HTTP, no SPNEGO. - * - * @throws Exception if http client can't access http server, - * or http client can access https server. + * @throws Exception if http client can't access http server, or http client can access https + * server. */ @Test public void testLogLevelByHttp() throws Exception { testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, false); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, - false); - fail("An HTTPS Client should not have succeeded in connecting to a " + - "HTTP server"); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, false); + fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server"); } catch (SSLException e) { exceptionShouldContains("Unrecognized SSL message", e); } @@ -436,18 +410,15 @@ public class TestLogLevel { /** * Server runs HTTP + SPNEGO. - * - * @throws Exception if http client can't access http server, - * or http client can access https server. + * @throws Exception if http client can't access http server, or http client can access https + * server. */ @Test public void testLogLevelByHttpWithSpnego() throws Exception { testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, true); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, - true); - fail("An HTTPS Client should not have succeeded in connecting to a " + - "HTTP server"); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, true); + fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server"); } catch (SSLException e) { exceptionShouldContains("Unrecognized SSL message", e); } @@ -455,19 +426,15 @@ public class TestLogLevel { /** * Server runs HTTPS, no SPNEGO. - * - * @throws Exception if https client can't access https server, - * or https client can access http server. + * @throws Exception if https client can't access https server, or https client can access http + * server. */ @Test public void testLogLevelByHttps() throws Exception { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, - false); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, false); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, - false); - fail("An HTTP Client should not have succeeded in connecting to a " + - "HTTPS server"); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, false); + fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server"); } catch (SocketException e) { exceptionShouldContains("Unexpected end of file from server", e); } @@ -475,32 +442,27 @@ public class TestLogLevel { /** * Server runs HTTPS + SPNEGO. - * - * @throws Exception if https client can't access https server, - * or https client can access http server. + * @throws Exception if https client can't access https server, or https client can access http + * server. */ @Test public void testLogLevelByHttpsWithSpnego() throws Exception { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, - true); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, true); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, - true); - fail("An HTTP Client should not have succeeded in connecting to a " + - "HTTPS server"); - } catch (SocketException e) { + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, true); + fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server"); + } catch (SocketException e) { exceptionShouldContains("Unexpected end of file from server", e); } } /** - * Assert that a throwable or one of its causes should contain the substr in its message. - * - * Ideally we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util - * method which asserts t.toString() contains the substr. As the original throwable may have been - * wrapped in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. - * After stop supporting Hadoop2, this method can be removed and assertion in tests can use - * t.getCause() directly, similar to HADOOP-15280. + * Assert that a throwable or one of its causes should contain the substr in its message. Ideally + * we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util method + * which asserts t.toString() contains the substr. As the original throwable may have been wrapped + * in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. After stop + * supporting Hadoop2, this method can be removed and assertion in tests can use t.getCause() + * directly, similar to HADOOP-15280. */ private static void exceptionShouldContains(String substr, Throwable throwable) { Throwable t = throwable; @@ -512,6 +474,6 @@ public class TestLogLevel { t = t.getCause(); } throw new AssertionError("Expected to find '" + substr + "' but got unexpected exception:" + - StringUtils.stringifyException(throwable), throwable); + StringUtils.stringifyException(throwable), throwable); } -} +} \ No newline at end of file diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml index 1ff3d65206b..f1ce681c688 100644 --- a/hbase-it/pom.xml +++ b/hbase-it/pom.xml @@ -268,13 +268,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-logging/pom.xml b/hbase-logging/pom.xml index b6eb495ad54..85f64b58045 100644 --- a/hbase-logging/pom.xml +++ b/hbase-logging/pom.xml @@ -38,7 +38,7 @@ src/test/resources - log4j.properties + log4j2.properties @@ -80,7 +80,7 @@ org.slf4j - slf4j-reload4j + jcl-over-slf4j test @@ -89,9 +89,24 @@ provided - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-api provided + + org.apache.logging.log4j + log4j-core + provided + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api + test + diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java deleted file mode 100644 index 939b453c8d4..00000000000 --- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import org.apache.yetus.audience.InterfaceAudience; - -/** - * Logger class that buffers before trying to log to the specified console. - */ -@InterfaceAudience.Private -public class AsyncConsoleAppender extends org.apache.log4j.AsyncAppender { - private final org.apache.log4j.ConsoleAppender consoleAppender; - - public AsyncConsoleAppender() { - super(); - consoleAppender = new org.apache.log4j.ConsoleAppender( - new org.apache.log4j.PatternLayout("%d{ISO8601} %-5p [%t] %c{2}: %m%n")); - this.addAppender(consoleAppender); - } - - public void setTarget(String value) { - consoleAppender.setTarget(value); - } - - @Override - public void activateOptions() { - consoleAppender.activateOptions(); - super.activateOptions(); - } - -} diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java index 28d29bf3013..b0711d7e8f1 100644 --- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java +++ b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java @@ -19,16 +19,15 @@ package org.apache.hadoop.hbase.logging; import java.io.File; import java.io.IOException; -import java.util.Enumeration; import java.util.HashSet; import java.util.Set; import org.apache.yetus.audience.InterfaceAudience; /** - * The actual class for operating on log4j. + * The actual class for operating on log4j2. *

* This class will depend on log4j directly, so callers should not use this class directly to avoid - * introducing log4j dependencies to downstream users. Please call the methods in + * introducing log4j2 dependencies to downstream users. Please call the methods in * {@link Log4jUtils}, as they will call the methods here through reflection. */ @InterfaceAudience.Private @@ -38,32 +37,53 @@ final class InternalLog4jUtils { } static void setLogLevel(String loggerName, String levelName) { - org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName); - org.apache.log4j.Level level = org.apache.log4j.Level.toLevel(levelName.toUpperCase()); + org.apache.logging.log4j.Level level = + org.apache.logging.log4j.Level.toLevel(levelName.toUpperCase()); if (!level.toString().equalsIgnoreCase(levelName)) { throw new IllegalArgumentException("Unsupported log level " + levelName); } - logger.setLevel(level); + org.apache.logging.log4j.core.config.Configurator.setLevel(loggerName, level); } static String getEffectiveLevel(String loggerName) { - org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName); - return logger.getEffectiveLevel().toString(); + org.apache.logging.log4j.Logger logger = + org.apache.logging.log4j.LogManager.getLogger(loggerName); + return logger.getLevel().name(); } static Set getActiveLogFiles() throws IOException { Set ret = new HashSet<>(); - org.apache.log4j.Appender a; - @SuppressWarnings("unchecked") - Enumeration e = - org.apache.log4j.Logger.getRootLogger().getAllAppenders(); - while (e.hasMoreElements()) { - a = e.nextElement(); - if (a instanceof org.apache.log4j.FileAppender) { - org.apache.log4j.FileAppender fa = (org.apache.log4j.FileAppender) a; - String filename = fa.getFile(); - ret.add(new File(filename)); - } + org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); + if (!(logger instanceof org.apache.logging.log4j.core.Logger)) { + return ret; + } + org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger; + for (org.apache.logging.log4j.core.Appender appender : coreLogger.getAppenders().values()) { + if (appender instanceof org.apache.logging.log4j.core.appender.FileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.FileAppender) appender).getFileName(); + ret.add(new File(fileName)); + } else if (appender instanceof org.apache.logging.log4j.core.appender.AbstractFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.AbstractFileAppender) appender).getFileName(); + ret.add(new File(fileName)); + } else if (appender instanceof org.apache.logging.log4j.core.appender.RollingFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.RollingFileAppender) appender).getFileName(); + ret.add(new File(fileName)); + } else + if (appender instanceof org.apache.logging.log4j.core.appender.RandomAccessFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.RandomAccessFileAppender) appender) + .getFileName(); + ret.add(new File(fileName)); + } else + if (appender instanceof org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) appender) + .getFileName(); + ret.add(new File(fileName)); + } } return ret; } diff --git a/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java new file mode 100644 index 00000000000..7b3876ce083 --- /dev/null +++ b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java @@ -0,0 +1,288 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.log4j; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InterruptedIOException; +import java.io.Writer; + +/** + * Just a copy of the old log4j12 FileAppender. The ContainerLogAppender for YARN NodeManager needs + * this class but the log4j-1.2-api bridge does not provide it which causes the UTs in + * hbase-mapreduce module to fail if we start a separated MR cluster. + */ +public class FileAppender extends WriterAppender { + + /** + * Controls file truncatation. The default value for this variable is true, meaning + * that by default a FileAppender will append to an existing file and not truncate + * it. + *

+ * This option is meaningful only if the FileAppender opens the file. + */ + protected boolean fileAppend = true; + + /** + * The name of the log file. + */ + protected String fileName = null; + + /** + * Do we do bufferedIO? + */ + protected boolean bufferedIO = false; + + /** + * Determines the size of IO buffer be. Default is 8K. + */ + protected int bufferSize = 8 * 1024; + + /** + * The default constructor does not do anything. + */ + public FileAppender() { + } + + /** + * Instantiate a FileAppender and open the file designated by fileName. + * The opened filename will become the output destination for this appender. + *

+ * If the append parameter is true, the file will be appended to. Otherwise, the file + * designated by fileName will be truncated before being opened. + *

+ * If the bufferedIO parameter is true, then buffered IO will be used to + * write to the output file. + */ + public FileAppender(Layout layout, String fileName, boolean append, boolean bufferedIO, + int bufferSize) throws IOException { + this.layout = layout; + this.setFile(fileName, append, bufferedIO, bufferSize); + } + + /** + * Instantiate a FileAppender and open the file designated by fileName. The opened + * filename will become the output destination for this appender. + *

+ * If the append parameter is true, the file will be appended to. Otherwise, the file + * designated by fileName will be truncated before being opened. + */ + public FileAppender(Layout layout, String fileName, boolean append) throws IOException { + this.layout = layout; + this.setFile(fileName, append, false, bufferSize); + } + + /** + * Instantiate a FileAppender and open the file designated by filename. The opened + * filename will become the output destination for this appender. + *

+ * The file will be appended to. + */ + public FileAppender(Layout layout, String fileName) throws IOException { + this(layout, fileName, true); + } + + /** + * The File property takes a string value which should be the name of the file to append + * to. + *

+ * Note that the special values "System.out" or "System.err" are no + * longer honored. + *

+ * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the + * options are set. + */ + public void setFile(String file) { + // Trim spaces from both ends. The users probably does not want + // trailing spaces in file names. + String val = file.trim(); + fileName = val; + } + + /** + * Returns the value of the Append option. + */ + public boolean getAppend() { + return fileAppend; + } + + /** Returns the value of the File option. */ + public String getFile() { + return fileName; + } + + /** + * If the value of File is not null, then {@link #setFile} is called with the + * values of File and Append properties. + * @since 0.8.1 + */ + @Override + public void activateOptions() { + if (fileName != null) { + try { + setFile(fileName, fileAppend, bufferedIO, bufferSize); + } catch (java.io.IOException e) { + errorHandler.error("setFile(" + fileName + "," + fileAppend + ") call failed.", e, + org.apache.log4j.spi.ErrorCode.FILE_OPEN_FAILURE); + } + } + } + + /** + * Closes the previously opened file. + */ + protected void closeFile() { + if (this.qw != null) { + try { + this.qw.close(); + } catch (java.io.IOException e) { + if (e instanceof InterruptedIOException) { + Thread.currentThread().interrupt(); + } + // Exceptionally, it does not make sense to delegate to an + // ErrorHandler. Since a closed appender is basically dead. + } + } + } + + /** + * Get the value of the BufferedIO option. + *

+ * BufferedIO will significatnly increase performance on heavily loaded systems. + */ + public boolean getBufferedIO() { + return this.bufferedIO; + } + + /** + * Get the size of the IO buffer. + */ + public int getBufferSize() { + return this.bufferSize; + } + + /** + * The Append option takes a boolean value. It is set to true by default. If + * true, then File will be opened in append mode by {@link #setFile setFile} (see + * above). Otherwise, {@link #setFile setFile} will open File in truncate mode. + *

+ * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the + * options are set. + */ + public void setAppend(boolean flag) { + fileAppend = flag; + } + + /** + * The BufferedIO option takes a boolean value. It is set to false by default. + * If true, then File will be opened and the resulting {@link java.io.Writer} wrapped + * around a {@link BufferedWriter}. BufferedIO will significatnly increase performance on heavily + * loaded systems. + */ + public void setBufferedIO(boolean bufferedIO) { + this.bufferedIO = bufferedIO; + if (bufferedIO) { + immediateFlush = false; + } + } + + /** + * Set the size of the IO buffer. + */ + public void setBufferSize(int bufferSize) { + this.bufferSize = bufferSize; + } + + /** + *

+ * Sets and opens the file where the log output will go. The specified file must be + * writable. + *

+ * If there was already an opened file, then the previous file is closed first. + *

+ * Do not use this method directly. To configure a FileAppender or one of its subclasses, set + * its properties one by one and then call activateOptions. + * @param fileName The path to the log file. + * @param append If true will append to fileName. Otherwise will truncate fileName. + */ + public synchronized void setFile(String fileName, boolean append, boolean bufferedIO, + int bufferSize) throws IOException { + + // It does not make sense to have immediate flush and bufferedIO. + if (bufferedIO) { + setImmediateFlush(false); + } + + reset(); + FileOutputStream ostream = null; + try { + // + // attempt to create file + // + ostream = new FileOutputStream(fileName, append); + } catch (FileNotFoundException ex) { + // + // if parent directory does not exist then + // attempt to create it and try to create file + // see bug 9150 + // + String parentName = new File(fileName).getParent(); + if (parentName != null) { + File parentDir = new File(parentName); + if (!parentDir.exists() && parentDir.mkdirs()) { + ostream = new FileOutputStream(fileName, append); + } else { + throw ex; + } + } else { + throw ex; + } + } + Writer fw = createWriter(ostream); + if (bufferedIO) { + fw = new BufferedWriter(fw, bufferSize); + } + this.setQWForFiles(fw); + this.fileName = fileName; + this.fileAppend = append; + this.bufferedIO = bufferedIO; + this.bufferSize = bufferSize; + writeHeader(); + } + + /** + * Sets the quiet writer being used. This method is overriden by {@code RollingFileAppender}. + */ + protected void setQWForFiles(Writer writer) { + this.qw = new org.apache.log4j.helpers.QuietWriter(writer, errorHandler); + } + + /** + * Close any previously opened file and call the parent's reset. + */ + @Override + protected void reset() { + closeFile(); + this.fileName = null; + super.reset(); + } +} diff --git a/hbase-logging/src/test/resources/log4j.properties b/hbase-logging/src/test/resources/log4j.properties deleted file mode 100644 index c322699ced2..00000000000 --- a/hbase-logging/src/test/resources/log4j.properties +++ /dev/null @@ -1,68 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n - -# Custom Logging levels - -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG - -log4j.logger.org.apache.hadoop=WARN -log4j.logger.org.apache.zookeeper=ERROR -log4j.logger.org.apache.hadoop.hbase=DEBUG - -#These settings are workarounds against spurious logs from the minicluster. -#See HBASE-4709 -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN -log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE diff --git a/hbase-logging/src/test/resources/log4j2.properties b/hbase-logging/src/test/resources/log4j2.properties new file mode 100644 index 00000000000..f63c8701e35 --- /dev/null +++ b/hbase-logging/src/test/resources/log4j2.properties @@ -0,0 +1,68 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = debug +dest = err +name = PropertiesConfig + +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = Console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %C{2}(%L): %m%n + +rootLogger = INFO,Console + +logger.hadoop.name = org.apache.hadoop +logger.hadoop.level = WARN + +logger.zookeeper.name = org.apache.zookeeper +logger.zookeeper.level = ERROR + +logger.hbase.name = org.apache.hadoop.hbase +logger.hbase.level = DEBUG + +# These settings are workarounds against spurious logs from the minicluster. See HBASE-4709 +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapter.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapter.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +logger.MBeans.name = org.apache.hadoop.metrics2.util.MBeans +logger.MBeans.level = WARN + +logger.directory.name = org.apache.directory +logger.directory.level = WARN +logger.directory.additivity = false + +logger.netty.name = org.apache.hbase.thirdparty.io.netty.channel +logger.netty.level = DEBUG + +# For testing where we want to capture the log message of these special loggers +logger.FailedServers.name = org.apache.hadoop.hbase.ipc.FailedServers +logger.FailedServers.level = DEBUG + +logger.RSRpcServices.name = org.apache.hadoop.hbase.regionserver.RSRpcServices +logger.RSRpcServices.level = DEBUG + +logger.TestJul2Slf4j.name = org.apache.hadoop.hbase.logging.TestJul2Slf4j +logger.TestJul2Slf4j.level = DEBUG diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml index 33969c78363..9d7a6fc4c08 100644 --- a/hbase-mapreduce/pom.xml +++ b/hbase-mapreduce/pom.xml @@ -306,13 +306,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java index 4d27f9b6e95..5e61a4b1268 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java @@ -24,24 +24,16 @@ import java.util.Arrays; import java.util.List; import java.util.Properties; import java.util.concurrent.atomic.AtomicReference; - import javax.crypto.spec.SecretKeySpec; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.zookeeper.ZooKeeper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Durability; @@ -51,6 +43,7 @@ import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.log.HBaseMarkers; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.HBaseKerberosUtils; @@ -60,6 +53,10 @@ import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL; import org.apache.hadoop.util.ToolRunner; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.zookeeper.ZooKeeper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.org.apache.commons.cli.AlreadySelectedException; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; @@ -582,7 +579,7 @@ public class LoadTestTool extends AbstractHBaseTool { @Override protected int doWork() throws IOException { if (!isVerbose) { - LogManager.getLogger(ZooKeeper.class.getName()).setLevel(Level.WARN); + Log4jUtils.setLogLevel(ZooKeeper.class.getName(), "WARN"); } if (numTables > 1) { return parallelLoadTables(); diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml index 7cd6f077a87..7865b237136 100644 --- a/hbase-metrics-api/pom.xml +++ b/hbase-metrics-api/pom.xml @@ -133,13 +133,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml index 98663550d40..ee0ec004b94 100644 --- a/hbase-metrics/pom.xml +++ b/hbase-metrics/pom.xml @@ -141,13 +141,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml index 1dfcc07c6d8..f04329ddc21 100644 --- a/hbase-procedure/pom.xml +++ b/hbase-procedure/pom.xml @@ -130,13 +130,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml index b7cf14e24f1..8b269c8644e 100644 --- a/hbase-protocol-shaded/pom.xml +++ b/hbase-protocol-shaded/pom.xml @@ -162,10 +162,9 @@ com.google.j2objc:j2objc-annotations org.codehaus.mojo:animal-sniffer-annotations junit:junit - log4j:log4j - ch.qos.reload4j:* commons-logging:commons-logging - org.slf4j:slf4j-api + org.slf4j:* + org.apache.logging.log4j:* org.apache.yetus:audience-annotations com.github.stephenc.fingbugs:* com.github.spotbugs:* diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml index 3264af51714..593a79ae738 100644 --- a/hbase-replication/pom.xml +++ b/hbase-replication/pom.xml @@ -136,13 +136,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml index dbe52ccef00..31885e867d0 100644 --- a/hbase-rest/pom.xml +++ b/hbase-rest/pom.xml @@ -372,13 +372,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml index 9b333bebe09..f30fad9ef66 100644 --- a/hbase-rsgroup/pom.xml +++ b/hbase-rsgroup/pom.xml @@ -181,13 +181,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index bcf5e5f62b2..99ab7a43b05 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -533,13 +533,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index cd0fac2eeee..7d056916326 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -696,7 +696,6 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * This is used before starting HDFS and map-reduce mini-clusters Run something like the below to * check for the likes of '/tmp' references -- i.e. references outside of the test data dir -- in * the conf. - * *

    * Configuration conf = TEST_UTIL.getConfiguration();
    * for (Iterator<Map.Entry<String, String>> i = conf.iterator(); i.hasNext();) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
index c4bf8af5c82..60d88207f50 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
@@ -28,11 +28,10 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
@@ -60,10 +59,13 @@ public class TestAsyncTableBatchRetryImmediately {
 
   private static AsyncConnection CONN;
 
+  private static String LOG_LEVEL;
+
   @BeforeClass
   public static void setUp() throws Exception {
     // disable the debug log to avoid flooding the output
-    LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO);
+    LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
+    Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
     UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, 1024);
     UTIL.startMiniCluster(1);
     Table table = UTIL.createTable(TABLE_NAME, FAMILY);
@@ -78,6 +80,9 @@ public class TestAsyncTableBatchRetryImmediately {
 
   @AfterClass
   public static void tearDown() throws Exception {
+    if (LOG_LEVEL != null) {
+      Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
+    }
     CONN.close();
     UTIL.shutdownMiniCluster();
   }
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
index 20ab2f92613..515d533e678 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.metrics.BaseSource;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.test.MetricsAssertHelper;
@@ -64,15 +65,18 @@ public class TestMultiRespectsLimits {
       CompatibilityFactory.getInstance(MetricsAssertHelper.class);
   private final static byte[] FAMILY = Bytes.toBytes("D");
   public static final int MAX_SIZE = 100;
+  private static String LOG_LEVEL;
 
   @Rule
   public TestName name = new TestName();
 
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
-    TEST_UTIL.getConfiguration().setLong(
-        HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY,
-        MAX_SIZE);
+    // disable the debug log to avoid flooding the output
+    LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
+    Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
+    TEST_UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY,
+      MAX_SIZE);
 
     // Only start on regionserver so that all regions are on the same server.
     TEST_UTIL.startMiniCluster(1);
@@ -80,6 +84,9 @@ public class TestMultiRespectsLimits {
 
   @AfterClass
   public static void tearDownAfterClass() throws Exception {
+    if (LOG_LEVEL != null) {
+      Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
+    }
     TEST_UTIL.shutdownMiniCluster();
   }
 
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
index 941d921481d..a45804a4515 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.testclassification.RPCTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.junit.After;
@@ -82,10 +83,8 @@ public class TestProtoBufRpc {
     this.conf = HBaseConfiguration.create();
     this.conf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY,
         rpcServerImpl);
-    org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer")
-      .setLevel(org.apache.log4j.Level.ERROR);
-    org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace")
-      .setLevel(org.apache.log4j.Level.TRACE);
+    Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer", "ERROR");
+    Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer.trace", "TRACE");
     // Create server side implementation
     // Get RPC server for server side implementation
     this.server = RpcServerFactory.createRpcServer(null, "testrpc",
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
index 2d66106a3d2..122517574f7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
@@ -35,17 +35,19 @@ import org.mockito.Mockito;
 public class TestRpcServerTraceLogging {
 
   @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule
-      .forClass(TestRpcServerTraceLogging.class);
+  public static final HBaseClassTestRule CLASS_RULE =
+    HBaseClassTestRule.forClass(TestRpcServerTraceLogging.class);
 
-  static org.apache.log4j.Logger rpcServerLog = org.apache.log4j.Logger.getLogger(RpcServer.class);
+  private static final org.apache.logging.log4j.core.Logger rpcServerLog =
+    (org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(RpcServer.class);
 
   static final String TRACE_LOG_MSG =
-      "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }"
-          + " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } "
-      + "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } "
-      + "number_of_rows: 2147483647 close_scanner: false client_handles_partials: "
-      + "true client_handles_heartbeats: true track_scan_metrics: false";
+    "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" +
+      " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " +
+      "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " +
+      "number_of_rows: 2147483647 close_scanner: false client_handles_partials: " +
+      "true client_handles_heartbeats: true track_scan_metrics: false";
 
   static final int TRACE_LOG_LENGTH = TRACE_LOG_MSG.length();
 
@@ -62,7 +64,7 @@ public class TestRpcServerTraceLogging {
   @Test
   public void testLoggingWithTraceOff() {
     conf.setInt("hbase.ipc.trace.log.max.length", 250);
-    rpcServerLog.setLevel(org.apache.log4j.Level.DEBUG);
+    rpcServerLog.setLevel(org.apache.logging.log4j.Level.DEBUG);
     String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
 
     assertEquals(150 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@@ -72,7 +74,7 @@ public class TestRpcServerTraceLogging {
   @Test
   public void testLoggingWithTraceOn() {
     conf.setInt("hbase.ipc.trace.log.max.length", 250);
-    rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
+    rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
     String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
 
     assertEquals(250 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@@ -82,7 +84,7 @@ public class TestRpcServerTraceLogging {
   @Test
   public void testLoggingWithTraceOnLargeMax() {
     conf.setInt("hbase.ipc.trace.log.max.length", 2000);
-    rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
+    rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
     String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
 
     assertEquals(TRACE_LOG_LENGTH, truncatedString.length());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 6077a23a3c3..d494dd34762 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -7055,7 +7055,7 @@ public class TestHRegion {
     // using small heart beat cells
     conf.setLong(StoreScanner.HBASE_CELLS_SCANNED_PER_HEARTBEAT_CHECK, 2);
 
-    region = HBaseTestingUtil
+    region = HBaseTestingUtility
       .createRegionAndWAL(RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(),
         TEST_UTIL.getDataTestDir(), conf, tableDescriptor);
     assertNotNull(region);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java
index e2dcac08122..5e2679e4111 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java
@@ -20,14 +20,16 @@ package org.apache.hadoop.hbase.regionserver;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.reset;
-import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -36,10 +38,6 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -47,8 +45,9 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-import org.mockito.ArgumentCaptor;
 import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
 
 import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
 import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
@@ -80,7 +79,7 @@ public class TestMultiLogThreshold {
   private HRegionServer rs;
   private RSRpcServices services;
 
-  private Appender appender;
+  private org.apache.logging.log4j.core.Appender appender;
 
   @Parameterized.Parameter
   public static boolean rejectLargeBatchOp;
@@ -90,6 +89,21 @@ public class TestMultiLogThreshold {
     return Arrays.asList(new Object[] { false }, new Object[] { true });
   }
 
+  private final class LevelAndMessage {
+    final org.apache.logging.log4j.Level level;
+
+    final String msg;
+
+    public LevelAndMessage(org.apache.logging.log4j.Level level, String msg) {
+      this.level = level;
+      this.msg = msg;
+    }
+
+  }
+
+  // log4j2 will reuse the LogEvent so we need to copy the level and message out.
+  private BlockingDeque logs = new LinkedBlockingDeque<>();
+
   @Before
   public void setupTest() throws Exception {
     util = new HBaseTestingUtility();
@@ -100,13 +114,28 @@ public class TestMultiLogThreshold {
     util.startMiniCluster();
     util.createTable(NAME, TEST_FAM);
     rs = util.getRSForFirstRegionInTable(NAME);
-    appender = mock(Appender.class);
-    LogManager.getLogger(RSRpcServices.class).addAppender(appender);
+    appender = mock(org.apache.logging.log4j.core.Appender.class);
+    when(appender.getName()).thenReturn("mockAppender");
+    when(appender.isStarted()).thenReturn(true);
+    doAnswer(new Answer() {
+
+      @Override
+      public Void answer(InvocationOnMock invocation) throws Throwable {
+        org.apache.logging.log4j.core.LogEvent logEvent =
+          invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
+        logs.add(
+          new LevelAndMessage(logEvent.getLevel(), logEvent.getMessage().getFormattedMessage()));
+        return null;
+      }
+    }).when(appender).append(any(org.apache.logging.log4j.core.LogEvent.class));
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(RSRpcServices.class)).addAppender(appender);
   }
 
   @After
   public void tearDown() throws Exception {
-    LogManager.getLogger(RSRpcServices.class).removeAppender(appender);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(RSRpcServices.class)).removeAppender(appender);
     util.shutdownMiniCluster();
   }
 
@@ -149,17 +178,16 @@ public class TestMultiLogThreshold {
   }
 
   private void assertLogBatchWarnings(boolean expected) {
-    ArgumentCaptor captor = ArgumentCaptor.forClass(LoggingEvent.class);
-    verify(appender, atLeastOnce()).doAppend(captor.capture());
+    assertFalse(logs.isEmpty());
     boolean actual = false;
-    for (LoggingEvent event : captor.getAllValues()) {
-      if (event.getLevel() == Level.WARN &&
-        event.getRenderedMessage().contains("Large batch operation detected")) {
+    for (LevelAndMessage event : logs) {
+      if (event.level == org.apache.logging.log4j.Level.WARN &&
+        event.msg.contains("Large batch operation detected")) {
         actual = true;
         break;
       }
     }
-    reset(appender);
+    logs.clear();
     assertEquals(expected, actual);
   }
 
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
index 4d9bb68ac0c..3524a72a64c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
@@ -42,11 +42,6 @@ import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.PatternLayout;
-import org.apache.log4j.WriterAppender;
 import org.apache.zookeeper.KeeperException;
 import org.junit.After;
 import org.junit.Before;
@@ -56,6 +51,8 @@ import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 @Category(LargeTests.class)
 public class TestRegionServerReportForDuty {
 
@@ -91,26 +88,15 @@ public class TestRegionServerReportForDuty {
     testUtil.shutdownMiniDFSCluster();
   }
 
-  /**
-   * LogCapturer is similar to {@link org.apache.hadoop.test.GenericTestUtils.LogCapturer}
-   * except that this implementation has a default appender to the root logger.
-   * Hadoop 2.8+ supports the default appender in the LogCapture it ships and this can be replaced.
-   * TODO: This class can be removed after we upgrade Hadoop dependency.
-   */
-  static class LogCapturer {
+  private static class LogCapturer {
     private StringWriter sw = new StringWriter();
-    private WriterAppender appender;
-    private org.apache.log4j.Logger logger;
+    private org.apache.logging.log4j.core.appender.WriterAppender appender;
+    private org.apache.logging.log4j.core.Logger logger;
 
-    LogCapturer(org.apache.log4j.Logger logger) {
+    LogCapturer(org.apache.logging.log4j.core.Logger logger) {
       this.logger = logger;
-      Appender defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("stdout");
-      if (defaultAppender == null) {
-        defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("console");
-      }
-      final Layout layout = (defaultAppender == null) ? new PatternLayout() :
-          defaultAppender.getLayout();
-      this.appender = new WriterAppender(layout, sw);
+      this.appender = org.apache.logging.log4j.core.appender.WriterAppender.newBuilder()
+        .setName("test").setTarget(sw).build();
       this.logger.addAppender(this.appender);
     }
 
@@ -146,7 +132,9 @@ public class TestRegionServerReportForDuty {
     master = cluster.addMaster();
     master.start();
 
-    LogCapturer capturer = new LogCapturer(org.apache.log4j.Logger.getLogger(HRegionServer.class));
+    LogCapturer capturer =
+      new LogCapturer((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+        .getLogger(HRegionServer.class));
     // Set sleep interval relatively low so that exponential backoff is more demanding.
     int msginterval = 100;
     cluster.getConfiguration().setInt("hbase.regionserver.msginterval", msginterval);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
index 45e9c14ae1f..2186935ec28 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
@@ -26,24 +26,36 @@ import java.lang.reflect.InvocationTargetException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * This is not a unit test. It is not run as part of the general unit test suite. It is for
+ * comparing compaction policies. You must run it explicitly;
+ * e.g. mvn test -Dtest=PerfTestCompactionPolicies
+ */
 @Category({RegionServerTests.class, MediumTests.class})
 @RunWith(Parameterized.class)
 public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+    HBaseClassTestRule.forClass(PerfTestCompactionPolicies.class);
+
   private final RatioBasedCompactionPolicy cp;
   private final StoreFileListGenerator generator;
   private final HStore store;
@@ -119,12 +131,9 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
     this.ratio = inRatio;
 
     // Hide lots of logging so the system out is usable as a tab delimited file.
-    org.apache.log4j.Logger.getLogger(CompactionConfiguration.class).
-        setLevel(org.apache.log4j.Level.ERROR);
-    org.apache.log4j.Logger.getLogger(RatioBasedCompactionPolicy.class).
-        setLevel(org.apache.log4j.Level.ERROR);
-
-    org.apache.log4j.Logger.getLogger(cpClass).setLevel(org.apache.log4j.Level.ERROR);
+    Log4jUtils.setLogLevel(CompactionConfiguration.class.getName(), "ERROR");
+    Log4jUtils.setLogLevel(RatioBasedCompactionPolicy.class.getName(), "ERROR");
+    Log4jUtils.setLogLevel(cpClass.getName(), "ERROR");
 
 
     Configuration configuration = HBaseConfiguration.create();
@@ -196,7 +205,8 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
     HStore s = mock(HStore.class);
     when(s.getStoreFileTtl()).thenReturn(Long.MAX_VALUE);
     when(s.getBlockingFileCount()).thenReturn(7L);
+    when(s.getRegionInfo()).thenReturn(RegionInfoBuilder.FIRST_META_REGIONINFO);
     return s;
   }
 
-}
+}
\ No newline at end of file
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
index 546643542aa..9eb543858d1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
@@ -27,15 +27,17 @@ import static org.mockito.ArgumentMatchers.argThat;
 import static org.mockito.ArgumentMatchers.eq;
 import static org.mockito.ArgumentMatchers.isA;
 import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.ScheduledThreadPoolExecutor;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -50,9 +52,6 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Appender;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -60,19 +59,14 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
 import org.mockito.ArgumentMatcher;
-import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnitRunner;
-import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
 
-@RunWith(MockitoJUnitRunner.class)
-@Category({LargeTests.class})
+@Category({ LargeTests.class })
 public class TestCanaryTool {
 
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
-      HBaseClassTestRule.forClass(TestCanaryTool.class);
+    HBaseClassTestRule.forClass(TestCanaryTool.class);
 
   private HBaseTestingUtility testingUtility;
   private static final byte[] FAMILY = Bytes.toBytes("f");
@@ -81,22 +75,26 @@ public class TestCanaryTool {
   @Rule
   public TestName name = new TestName();
 
+  private org.apache.logging.log4j.core.Appender mockAppender;
+
   @Before
   public void setUp() throws Exception {
     testingUtility = new HBaseTestingUtility();
     testingUtility.startMiniCluster();
-    LogManager.getRootLogger().addAppender(mockAppender);
+    mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
+    when(mockAppender.getName()).thenReturn("mockAppender");
+    when(mockAppender.isStarted()).thenReturn(true);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender);
   }
 
   @After
   public void tearDown() throws Exception {
     testingUtility.shutdownMiniCluster();
-    LogManager.getRootLogger().removeAppender(mockAppender);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger("org.apache.hadoop.hbase")).removeAppender(mockAppender);
   }
 
-  @Mock
-  Appender mockAppender;
-
   @Test
   public void testBasicZookeeperCanaryWorks() throws Exception {
     final String[] args = { "-t", "10000", "-zookeeper" };
@@ -105,7 +103,8 @@ public class TestCanaryTool {
 
   @Test
   public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception {
-    final String[] args = { "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" };
+    final String[] args =
+      { "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" };
     testZookeeperCanaryWithArgs(args);
   }
 
@@ -114,7 +113,7 @@ public class TestCanaryTool {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
-    for (int i=0; i<1000; i++) {
+    for (int i = 0; i < 1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);
       Put p = new Put(iBytes);
       p.addColumn(FAMILY, COLUMN, iBytes);
@@ -155,7 +154,7 @@ public class TestCanaryTool {
       // the test table has two column family. If readAllCF set true,
       // we expect read count is double of region count
       int expectedReadCount =
-          readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions();
+        readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions();
       assertEquals("canary region success count should equal total expected read count",
         expectedReadCount, sink.getReadSuccessCount());
       Map> regionMap = sink.getRegionMap();
@@ -183,7 +182,7 @@ public class TestCanaryTool {
     TableName tableName = TableName.valueOf("testCanaryRegionTaskResult");
     Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
-    for (int i=0; i<1000; i++) {
+    for (int i = 0; i < 1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);
       Put p = new Put(iBytes);
       p.addColumn(FAMILY, COLUMN, iBytes);
@@ -212,7 +211,7 @@ public class TestCanaryTool {
     assertFalse("verify region map has size > 0", regionMap.isEmpty());
 
     for (String regionName : regionMap.keySet()) {
-      for (CanaryTool.RegionTaskResult res: regionMap.get(regionName)) {
+      for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) {
         assertNotNull("verify getRegionNameAsString()", regionName);
         assertNotNull("verify getRegionInfo()", res.getRegionInfo());
         assertNotNull("verify getTableName()", res.getTableName());
@@ -235,24 +234,25 @@ public class TestCanaryTool {
 
   // Ignore this test. It fails w/ the below on some mac os x.
   // [ERROR] Failures:
-  // [ERROR]   TestCanaryTool.testReadTableTimeouts:216
+  // [ERROR] TestCanaryTool.testReadTableTimeouts:216
   // Argument(s) are different! Wanted:
   // mockAppender.doAppend(
   // 
-  //      );
-  //      -> at org.apache.hadoop.hbase.tool.TestCanaryTool
-  //          .testReadTableTimeouts(TestCanaryTool.java:216)
-  //      Actual invocations have different arguments:
-  //      mockAppender.doAppend(
-  //          org.apache.log4j.spi.LoggingEvent@2055cfc1
-  //          );
-  //      )
-  //  )
+  // );
+  // -> at org.apache.hadoop.hbase.tool.TestCanaryTool
+  // .testReadTableTimeouts(TestCanaryTool.java:216)
+  // Actual invocations have different arguments:
+  // mockAppender.doAppend(
+  // org.apache.log4j.spi.LoggingEvent@2055cfc1
+  // );
+  // )
+  // )
   //
-  @org.junit.Ignore @Test
+  @org.junit.Ignore
+  @Test
   public void testReadTableTimeouts() throws Exception {
-    final TableName [] tableNames = new TableName[] {TableName.valueOf(name.getMethodName() + "1"),
-      TableName.valueOf(name.getMethodName() + "2")};
+    final TableName[] tableNames = new TableName[] { TableName.valueOf(name.getMethodName() + "1"),
+      TableName.valueOf(name.getMethodName() + "2") };
     // Create 2 test tables.
     for (int j = 0; j < 2; j++) {
       Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY });
@@ -269,8 +269,8 @@ public class TestCanaryTool {
     CanaryTool canary = new CanaryTool(executor, sink);
     String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," +
       tableNames[1].getNameAsString() + "=0";
-    String[] args = {"-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1",
-      name.getMethodName() + "2"};
+    String[] args = { "-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1",
+      name.getMethodName() + "2" };
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class));
     for (int i = 0; i < 2; i++) {
@@ -280,18 +280,21 @@ public class TestCanaryTool {
         sink.getReadLatencyMap().get(tableNames[i].getNameAsString()));
     }
     // One table's timeout is set for 0 ms and thus, should lead to an error.
-    verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("exceeded the configured read timeout.");
-      }
-    }));
-    verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("Configured read timeout");
-      }
-    }));
+    verify(mockAppender, times(1))
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage()
+            .contains("exceeded the configured read timeout.");
+        }
+      }));
+    verify(mockAppender, times(2))
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage().contains("Configured read timeout");
+        }
+      }));
   }
 
   @Test
@@ -299,43 +302,47 @@ public class TestCanaryTool {
     ExecutorService executor = new ScheduledThreadPoolExecutor(1);
     CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink());
     CanaryTool canary = new CanaryTool(executor, sink);
-    String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE)};
+    String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE) };
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     assertNotEquals("verify non-null write latency", null, sink.getWriteLatency());
     assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency());
-    verify(mockAppender, times(1)).doAppend(argThat(
-        new ArgumentMatcher() {
-          @Override
-          public boolean matches(LoggingEvent argument) {
-            return argument.getRenderedMessage().contains("Configured write timeout");
-          }
-        }));
+    verify(mockAppender, times(1))
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage().contains("Configured write timeout");
+        }
+      }));
   }
 
-  //no table created, so there should be no regions
+  // no table created, so there should be no regions
   @Test
   public void testRegionserverNoRegions() throws Exception {
     runRegionserverCanary();
-    verify(mockAppender).doAppend(argThat(new ArgumentMatcher() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("Regionserver not serving any regions");
-      }
-    }));
+    verify(mockAppender)
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage()
+            .contains("Regionserver not serving any regions");
+        }
+      }));
   }
 
-  //by creating a table, there shouldn't be any region servers not serving any regions
+  // by creating a table, there shouldn't be any region servers not serving any regions
   @Test
   public void testRegionserverWithRegions() throws Exception {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     testingUtility.createTable(tableName, new byte[][] { FAMILY });
     runRegionserverCanary();
-    verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("Regionserver not serving any regions");
-      }
-    }));
+    verify(mockAppender, never())
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage()
+            .contains("Regionserver not serving any regions");
+        }
+      }));
   }
 
   @Test
@@ -343,7 +350,7 @@ public class TestCanaryTool {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
-    for (int i=0; i<1000; i++) {
+    for (int i = 0; i < 1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);
       Put p = new Put(iBytes);
       p.addColumn(FAMILY, COLUMN, iBytes);
@@ -357,23 +364,20 @@ public class TestCanaryTool {
       new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration());
     conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true);
     assertEquals(0, ToolRunner.run(conf, canary, args));
-    verify(sink, atLeastOnce())
-        .publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
-        isA(ColumnFamilyDescriptor.class), anyLong());
+    verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
+      isA(ColumnFamilyDescriptor.class), anyLong());
     assertEquals("verify no read error count", 0, canary.getReadFailures().size());
   }
 
   private void runRegionserverCanary() throws Exception {
     ExecutorService executor = new ScheduledThreadPoolExecutor(1);
     CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink());
-    String[] args = { "-t", "10000", "-regionserver"};
+    String[] args = { "-t", "10000", "-regionserver" };
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     assertEquals("verify no read error count", 0, canary.getReadFailures().size());
   }
 
   private void testZookeeperCanaryWithArgs(String[] args) throws Exception {
-    Integer port =
-      Iterables.getOnlyElement(testingUtility.getZkCluster().getClientPortList(), null);
     String hostPort = testingUtility.getZkCluster().getAddress().toString();
     testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort + "/hbase");
     ExecutorService executor = new ScheduledThreadPoolExecutor(2);
@@ -381,8 +385,8 @@ public class TestCanaryTool {
     CanaryTool canary = new CanaryTool(executor, sink);
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
 
-    String baseZnode = testingUtility.getConfiguration()
-      .get(HConstants.ZOOKEEPER_ZNODE_PARENT, HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
+    String baseZnode = testingUtility.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT,
+      HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
     verify(sink, atLeastOnce()).publishReadTiming(eq(baseZnode), eq(hostPort), anyLong());
   }
 }
diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
index 4b506388589..ccf465e24b2 100644
--- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
@@ -46,12 +46,10 @@
     
       org.apache.hbase
       hbase-shaded-mapreduce
-      ${project.version}
     
     
       org.apache.hbase
       hbase-shaded-client-byo-hadoop
-      ${project.version}
     
     
     
@@ -60,8 +58,18 @@
       provided
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-api
+      provided
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      provided
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
       provided
     
     
@@ -108,8 +116,7 @@
                   
                     
                     org.slf4j:*
-                    log4j:*
-                    ch.qos.reload4j:*
+                    org.apache.logging.log4j:*
                     commons-logging:*
                     
                     com.google.code.findbugs:*
diff --git a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
index 6558910a18b..1d5aefae8d1 100644
--- a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
+++ b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
@@ -1,6 +1,6 @@
 
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     
-    4.0.0
-    
-        hbase-shaded
-        org.apache.hbase
-        2.5.0-SNAPSHOT
-        ..
-    
-    hbase-shaded-client-byo-hadoop
-    Apache HBase - Shaded - Client
-    
-        
-            
-                org.apache.maven.plugins
-                maven-site-plugin
-                
-                    true
-                
-            
-            
-                
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-                org.apache.maven.plugins
-                maven-shade-plugin
-            
-        
-    
-    
-        
-            org.apache.hbase
-            hbase-client
-        
-    
+  4.0.0
+  
+    hbase-shaded
+    org.apache.hbase
+    2.5.0-SNAPSHOT
+    ..
+  
+  hbase-shaded-client-byo-hadoop
+  Apache HBase - Shaded - Client
+  
+    
+      
+        org.apache.maven.plugins
+        maven-site-plugin
+        
+          true
+        
+      
+      
+        
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+        org.apache.maven.plugins
+        maven-shade-plugin
+      
+    
+  
+  
+    
+      org.apache.hadoop
+      hadoop-auth
+      provided
+    
+  
 
-    
+  
       
-      
-        hadoop-2.0
-        
-          
+    
+      hadoop-2.0
+      
+        
               
-              !hadoop.profile
-          
-        
-        
-          
-            org.apache.hadoop
-            hadoop-auth
-            provided
-          
-          
-            org.apache.hadoop
-            hadoop-common
-            provided
-          
-          
-            org.codehaus.jackson
-            jackson-jaxrs
-            1.9.13
-            provided
-            
-              
-                org.codehaus.jackson
-                jackson-mapper-asl
-              
-              
-                org.codehaus.jackson
-                jackson-core-asl
-              
-            
-          
-          
-            org.codehaus.jackson
-            jackson-xc
-            1.9.13
-            provided
-            
-              
-                org.codehaus.jackson
-                jackson-mapper-asl
-              
-              
-                org.codehaus.jackson
-                jackson-core-asl
-              
-            
-          
-        
-      
+              
+          !hadoop.profile
+        
+      
+      
+        
+          org.apache.hadoop
+          hadoop-auth
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-common
+          provided
+        
+        
+          org.codehaus.jackson
+          jackson-jaxrs
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-xc
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+      
+    
 
       
-      
-        hadoop-3.0
-        
-          
-            hadoop.profile
-            3.0
-          
-        
-        
-          
-            org.apache.hadoop
-            hadoop-auth
-            provided
-          
-          
-            org.apache.hadoop
-            hadoop-common
-            provided
-          
-          
-            org.codehaus.jackson
-            jackson-jaxrs
-            1.9.13
-            provided
-            
-              
-                org.codehaus.jackson
-                jackson-mapper-asl
-              
-              
-                org.codehaus.jackson
-                jackson-core-asl
-              
-            
-          
-          
-            org.codehaus.jackson
-            jackson-xc
-            1.9.13
-            provided
-            
-              
-                org.codehaus.jackson
-                jackson-mapper-asl
-              
-              
-                org.codehaus.jackson
-                jackson-core-asl
-              
-            
-          
-        
-      
-    
+    
+      hadoop-3.0
+      
+        
+          hadoop.profile
+          3.0
+        
+      
+      
+        
+          org.apache.hadoop
+          hadoop-auth
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-common
+          provided
+        
+        
+          org.codehaus.jackson
+          jackson-jaxrs
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-xc
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+      
+    
+  
 
diff --git a/hbase-shaded/hbase-shaded-client/pom.xml b/hbase-shaded/hbase-shaded-client/pom.xml
index e2ead27063c..6e26e57b041 100644
--- a/hbase-shaded/hbase-shaded-client/pom.xml
+++ b/hbase-shaded/hbase-shaded-client/pom.xml
@@ -1,6 +1,6 @@
 
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     
-    4.0.0
-    
-        hbase-shaded
-        org.apache.hbase
-        2.5.0-SNAPSHOT
-        ..
-    
-    hbase-shaded-client
-    Apache HBase - Shaded - Client (with Hadoop bundled)
-    
-        
-            
-                org.apache.maven.plugins
-                maven-site-plugin
-                
-                    true
-                
-            
-            
+  4.0.0
+  
+    hbase-shaded
+    org.apache.hbase
+    2.5.0-SNAPSHOT
+    ..
+  
+  hbase-shaded-client
+  Apache HBase - Shaded - Client (with Hadoop bundled)
+  
+    
+      
+        org.apache.maven.plugins
+        maven-site-plugin
+        
+          true
+        
+      
+      
                 
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-                org.apache.maven.plugins
-                maven-shade-plugin
-                
-                    
-                        aggregate-into-a-jar-with-relocated-third-parties
-                        
-                            
-                                
-                                    
-                                    javax.annotation:javax.annotation-api
-                                    javax.activation:javax.activation-api
-                                    jakarta.activation:jakarta.activation-api 
-                                    jakarta.ws.rs:jakarta.ws.rs-api
-                                    jakarta.annotation:jakarta.annotation-api
-                                    jakarta.validation:jakarta.validation-api
-                                    org.glassfish.hk2.external:jakarta.inject
-                                    
-                                    
-                                    org.apache.hbase:hbase-resource-bundle
-                                    org.slf4j:*
-                                    com.google.code.findbugs:*
-                                    com.github.stephenc.findbugs:*
-                                    com.github.spotbugs:*
-                                    org.apache.htrace:*
-                                    org.apache.yetus:*
-                                    log4j:*
-                                    ch.qos.reload4j:*
-                                    commons-logging:*
-                                    org.javassist:*
-                                    io.opentelemetry:*
-                                
-                            
-                        
-                    
-                
-            
-        
-    
-    
-        
-            org.apache.hbase
-            hbase-client
-        
-    
-
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+        org.apache.maven.plugins
+        maven-shade-plugin
+        
+          
+            aggregate-into-a-jar-with-relocated-third-parties
+            
+              
+                
+                  
+                  javax.annotation:javax.annotation-api
+                  javax.activation:javax.activation-api
+                  jakarta.activation:jakarta.activation-api 
+                  jakarta.ws.rs:jakarta.ws.rs-api
+                  jakarta.annotation:jakarta.annotation-api
+                  jakarta.validation:jakarta.validation-api
+                  org.glassfish.hk2.external:jakarta.inject
+                  
+                  
+                  org.apache.hbase:hbase-resource-bundle
+                  org.slf4j:*
+                  com.google.code.findbugs:*
+                  com.github.stephenc.findbugs:*
+                  com.github.spotbugs:*
+                  org.apache.htrace:*
+                  org.apache.yetus:*
+                  org.apache.logging.log4j:*
+                  commons-logging:*
+                  org.javassist:*
+                  io.opentelemetry:*
+                
+              
+            
+          
+        
+      
+    
+  
+  
+    
+      org.apache.hbase
+      hbase-client
+    
+  
 
diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
index c2454383701..e5c77e27c6c 100644
--- a/hbase-shaded/hbase-shaded-mapreduce/pom.xml
+++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
@@ -1,6 +1,6 @@
 
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     
-    4.0.0
-    
-        hbase-shaded
-        org.apache.hbase
-        2.5.0-SNAPSHOT
-        ..
-    
-    hbase-shaded-mapreduce
-    Apache HBase - Shaded - MapReduce
-    
-        
-            
-                org.apache.maven.plugins
-                maven-site-plugin
-                
-                    true
-                
-            
-            
+  4.0.0
+  
+    hbase-shaded
+    org.apache.hbase
+    2.5.0-SNAPSHOT
+    ..
+  
+  hbase-shaded-mapreduce
+  Apache HBase - Shaded - MapReduce
+  
+    
+      
+        org.apache.maven.plugins
+        maven-site-plugin
+        
+          true
+        
+      
+      
                 
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-                org.apache.maven.plugins
-                maven-jar-plugin
-                
-                    
-                        
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+        org.apache.maven.plugins
+        maven-jar-plugin
+        
+          
+            
                             
-                            org/apache/hadoop/hbase/mapreduce/Driver
-                        
-                    
-                
-            
-            
-                org.apache.maven.plugins
-                maven-shade-plugin
-            
-        
-    
-    
+              org/apache/hadoop/hbase/mapreduce/Driver
+            
+          
+        
+      
+      
+        org.apache.maven.plugins
+        maven-shade-plugin
+      
+    
+  
+  
         
-        
-            org.apache.hbase
-            hbase-mapreduce
-            
-              
-              
-                javax.xml.bind
-                jaxb-api
-              
-              
-                javax.ws.rs
-                jsr311-api
-              
-              
-              
-                javax.ws.rs
-                javax.ws.rs-api
-              
-              
-                com.sun.jersey
-                jersey-server
-              
-              
-                com.sun.jersey
-                jersey-client
-              
-              
-                com.sun.jersey
-                jersey-core
-              
-              
-                com.sun.jersey
-                jersey-json
-              
-              
-                com.sun.jersey.contribs
-                jersey-guice
-              
-              
-              
-                javax.servlet
-                javax.servlet-api
-              
-              
-                org.eclipse.jetty
-                jetty-http
-              
-              
-                org.eclipse.jetty
-                jetty-security
-              
-              
-                org.eclipse.jetty
-                jetty-server
-              
-              
-                org.eclipse.jetty
-                jetty-servlet
-              
-              
-                org.eclipse.jetty
-                jetty-util
-              
-              
-                org.eclipse.jetty
-                jetty-util-ajax
-              
-              
-                org.glassfish
-                javax.el
-              
-              
-                org.eclipse.jetty
-                jetty-webapp
-              
-              
-                org.glassfish.jersey.core
-                jersey-server
-              
-              
-                org.glassfish.jersey.containers
-                jersey-container-servlet-core
-              
-              
-              
-                org.glassfish.web
-                javax.servlet.jsp
-              
-              
-                javax.servlet.jsp
-                javax.servlet.jsp-api
-              
-            
-        
-    
+    
+      org.apache.hadoop
+      hadoop-common
+      provided
+    
+  
 
-    
+  
         
         
-        
-          hadoop-2.0
-          
-            
+    
+      hadoop-2.0
+      
+        
                 
-                !hadoop.profile
-            
-          
-          
-            
-              org.apache.hadoop
-              hadoop-common
-              provided
-              
-                
-                  net.java.dev.jets3t
-                  jets3t
-                
-                
-                  javax.servlet.jsp
-                  jsp-api
-                
-                
-                  org.mortbay.jetty
-                  jetty
-                
-                
-                  com.sun.jersey
-                  jersey-server
-                
-                
-                  com.sun.jersey
-                  jersey-core
-                
-                
-                  com.sun.jersey
-                  jersey-json
-                
-                
-                  javax.servlet
-                  servlet-api
-                
-                
-                  tomcat
-                  jasper-compiler
-                
-                
-                  tomcat
-                  jasper-runtime
-                
-                
-                  com.google.code.findbugs
-                  jsr305
-                
-              
-            
-            
-              org.apache.hadoop
-              hadoop-hdfs
-              provided
-              
-                
-                  javax.servlet.jsp
-                  jsp-api
-                
-                
-                  javax.servlet
-                  servlet-api
-                
-                
-                  io.netty
-                  netty
-                
-                
-                  stax
-                  stax-api
-                
-                
-                  xerces
-                  xercesImpl
-                
-              
-              ${hadoop-two.version}
-            
-            
-              org.apache.hadoop
-              hadoop-mapreduce-client-core
-              provided
-              
-                
-                  com.google.guava
-                  guava
-                
-              
-            
-            
+                
+          !hadoop.profile
+        
+      
+      
+        
+          org.apache.hadoop
+          hadoop-common
+          provided
+          
+            
+              net.java.dev.jets3t
+              jets3t
+            
+            
+              javax.servlet.jsp
+              jsp-api
+            
+            
+              org.mortbay.jetty
+              jetty
+            
+            
+              com.sun.jersey
+              jersey-server
+            
+            
+              com.sun.jersey
+              jersey-core
+            
+            
+              com.sun.jersey
+              jersey-json
+            
+            
+              javax.servlet
+              servlet-api
+            
+            
+              tomcat
+              jasper-compiler
+            
+            
+              tomcat
+              jasper-runtime
+            
+            
+              com.google.code.findbugs
+              jsr305
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-hdfs
+          provided
+          
+            
+              javax.servlet.jsp
+              jsp-api
+            
+            
+              javax.servlet
+              servlet-api
+            
+            
+              io.netty
+              netty
+            
+            
+              stax
+              stax-api
+            
+            
+              xerces
+              xercesImpl
+            
+          
+          ${hadoop-two.version}
+        
+        
+          org.apache.hadoop
+          hadoop-mapreduce-client-core
+          provided
+          
+            
+              com.google.guava
+              guava
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-jaxrs
+          1.9.13
+          provided
+          
+            
               org.codehaus.jackson
-              jackson-jaxrs
-              1.9.13
-              provided
-              
-                
-                  org.codehaus.jackson
-                  jackson-mapper-asl
-                
-                
-                  org.codehaus.jackson
-                  jackson-core-asl
-                
-              
-            
-            
+              jackson-mapper-asl
+            
+            
               org.codehaus.jackson
-              jackson-xc
-              1.9.13
-              provided
-              
-                
-                  org.codehaus.jackson
-                  jackson-mapper-asl
-                
-                
-                  org.codehaus.jackson
-                  jackson-core-asl
-                
-              
-            
-            
-              org.apache.hadoop
-              hadoop-auth
-              provided
-            
-          
-        
+              jackson-core-asl
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-xc
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-auth
+          provided
+        
+      
+    
 
         
-        
-          hadoop-3.0
-          
-            
-              hadoop.profile
-              3.0
-            
-          
-          
-            ${hadoop-three.version}
-          
-          
-            
-              org.apache.hadoop
-              hadoop-common
-              provided
-            
-            
-              org.apache.hadoop
-              hadoop-hdfs
-              provided
-            
-            
-              org.apache.hadoop
-              hadoop-auth
-              provided
-            
-            
-              org.apache.hadoop
-              hadoop-mapreduce-client-core
-              provided
-              
-                
-                  com.google.guava
-                  guava
-                
-                
-                  javax.xml.bind
-                  jaxb-api
-                
-                
-                  javax.ws.rs
-                  jsr311-api
-                
-              
-            
-            
+    
+      hadoop-3.0
+      
+        
+          hadoop.profile
+          3.0
+        
+      
+      
+        ${hadoop-three.version}
+      
+      
+        
+          org.apache.hadoop
+          hadoop-common
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-hdfs
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-auth
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-mapreduce-client-core
+          provided
+          
+            
+              com.google.guava
+              guava
+            
+            
+              javax.xml.bind
+              jaxb-api
+            
+            
+              javax.ws.rs
+              jsr311-api
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-jaxrs
+          1.9.13
+          provided
+          
+            
               org.codehaus.jackson
-              jackson-jaxrs
-              1.9.13
-              provided
-              
-                
-                  org.codehaus.jackson
-                  jackson-mapper-asl
-                
-                
-                  org.codehaus.jackson
-                  jackson-core-asl
-                
-              
-            
-            
+              jackson-mapper-asl
+            
+            
               org.codehaus.jackson
-              jackson-xc
-              1.9.13
-              provided
-              
-                
-                  org.codehaus.jackson
-                  jackson-mapper-asl
-                
-                
-                  org.codehaus.jackson
-                  jackson-core-asl
-                
-              
-            
-          
-        
-    
+              jackson-core-asl
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-xc
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+      
+    
+  
 
diff --git a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
index e8f88eb472b..0bf2d92077f 100644
--- a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
+++ b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
@@ -1,25 +1,25 @@
 
-    
+
   4.0.0
 
   
@@ -56,8 +56,23 @@
       test
     
     
-      org.slf4j
-      slf4j-reload4j
+      org.apache.logging.log4j
+      log4j-api
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-1.2-api
       test
     
     
diff --git a/hbase-shaded/hbase-shaded-testing-util/pom.xml b/hbase-shaded/hbase-shaded-testing-util/pom.xml
index f46d0f6fdb3..c0198344044 100644
--- a/hbase-shaded/hbase-shaded-testing-util/pom.xml
+++ b/hbase-shaded/hbase-shaded-testing-util/pom.xml
@@ -1,183 +1,179 @@
 
-    
-    4.0.0
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  4.0.0
+  
+    hbase-shaded
+    org.apache.hbase
+    2.5.0-SNAPSHOT
+    ..
+  
 
-    
-        hbase-shaded
-        org.apache.hbase
-        2.5.0-SNAPSHOT
-        ..
-    
+  hbase-shaded-testing-util
+  Apache HBase - Shaded - Testing Util
 
-    hbase-shaded-testing-util
-    Apache HBase - Shaded - Testing Util
-
-    
+  
         
-        
-            org.apache.hadoop
-            hadoop-common
-            test-jar
-            compile
-        
-        
-            org.apache.hadoop
-            hadoop-hdfs
-            test-jar
-            compile
-        
-        
-            org.apache.hadoop
-            hadoop-mapreduce-client-app
-            test-jar
-            compile
-        
-        
-            org.apache.hadoop
-            hadoop-mapreduce-client-jobclient
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-common
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-server
-            test-jar
-            compile
-            
-              
-                 javax.xml.bind
-                 jaxb-api
-              
-            
-        
-        
-            org.apache.hbase
-            hbase-asyncfs
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-zookeeper
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-hadoop-compat
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-hadoop2-compat
-            test-jar
-            compile
-        
-        
-            org.codehaus.jackson
-            jackson-jaxrs
-            1.9.13
-            compile
-        
-        
-            org.apache.hbase
-            hbase-testing-util
-            ${project.version}
-            compile
-            
-              
-                javax.xml.bind
-                jaxb-api
-              
-            
-        
-    
+    
+      org.apache.hadoop
+      hadoop-common
+      test-jar
+      compile
+    
+    
+      org.apache.hadoop
+      hadoop-hdfs
+      test-jar
+      compile
+    
+    
+      org.apache.hadoop
+      hadoop-mapreduce-client-app
+      test-jar
+      compile
+    
+    
+      org.apache.hadoop
+      hadoop-mapreduce-client-jobclient
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-common
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-server
+      test-jar
+      compile
+      
+        
+          javax.xml.bind
+          jaxb-api
+        
+      
+    
+    
+      org.apache.hbase
+      hbase-asyncfs
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-zookeeper
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-hadoop-compat
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-hadoop2-compat
+      test-jar
+      compile
+    
+    
+      org.codehaus.jackson
+      jackson-jaxrs
+      1.9.13
+      compile
+    
+    
+      org.apache.hbase
+      hbase-testing-util
+      compile
+      
+        
+          javax.xml.bind
+          jaxb-api
+        
+      
+    
+  
 
-    
-        
-            
-                org.apache.maven.plugins
-                maven-site-plugin
-                
-                    true
-                
-            
-            
+  
+    
+      
+        org.apache.maven.plugins
+        maven-site-plugin
+        
+          true
+        
+      
+      
                 
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-                org.apache.maven.plugins
-                maven-shade-plugin
-                
-                    
-                        aggregate-into-a-jar-with-relocated-third-parties
-                        
-                            
-                                
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+        org.apache.maven.plugins
+        maven-shade-plugin
+        
+          
+            aggregate-into-a-jar-with-relocated-third-parties
+            
+              
+                
                                     
-                                    javax.annotation:javax.annotation-api
-                                    javax.activation:javax.activation-api
-                                    jakarta.ws.rs:jakarta.ws.rs-api
-                                    jakarta.annotation:jakarta.annotation-api
-                                    jakarta.validation:jakarta.validation-api
-                                    org.glassfish.hk2.external:jakarta.inject
+                  javax.annotation:javax.annotation-api
+                  javax.activation:javax.activation-api
+                  jakarta.ws.rs:jakarta.ws.rs-api
+                  jakarta.annotation:jakarta.annotation-api
+                  jakarta.validation:jakarta.validation-api
+                  org.glassfish.hk2.external:jakarta.inject
                                     
                                     
-                                    org.apache.hbase:hbase-resource-bundle
-                                    org.slf4j:*
-                                    com.google.code.findbugs:*
-                                    com.github.stephenc.findbugs:*
-                                    com.github.spotbugs:*
-                                    org.apache.htrace:*
-                                    org.apache.yetus:*
-                                    log4j:*
-                                    ch.qos.reload4j:*
-                                    commons-logging:*
-                                    org.javassist:*
-                                    io.opentelemetry:*
-                                
-                            
-                        
-                    
-                
-            
-        
-    
-
+                  org.apache.hbase:hbase-resource-bundle
+                  org.slf4j:*
+                  com.google.code.findbugs:*
+                  com.github.stephenc.findbugs:*
+                  com.github.spotbugs:*
+                  org.apache.htrace:*
+                  org.apache.yetus:*
+                  org.apache.logging.log4j:*
+                  commons-logging:*
+                  org.javassist:*
+                  io.opentelemetry:*
+                
+              
+            
+          
+        
+      
+    
+  
 
diff --git a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
index 027236a9343..850d1614689 100644
--- a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
@@ -40,7 +40,6 @@
     
       org.apache.hbase
       hbase-shaded-client
-      ${project.version}
     
     
     
@@ -49,8 +48,18 @@
       provided
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-api
+      provided
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      provided
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
       provided
     
     
@@ -97,8 +106,7 @@
                   
                     
                     org.slf4j:*
-                    log4j:*
-                    ch.qos.reload4j:*
+                    org.apache.logging.log4j:*
                     commons-logging:*
                     
                     com.google.code.findbugs:*
diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml
index 180965b8a7e..a0dba53b54d 100644
--- a/hbase-shaded/pom.xml
+++ b/hbase-shaded/pom.xml
@@ -1,5 +1,7 @@
 
-
+
     
-    4.0.0
-    
-        hbase-build-configuration
-        org.apache.hbase
-        2.5.0-SNAPSHOT
-        ../hbase-build-configuration
-    
-    hbase-shaded
-    Apache HBase - Shaded
-    Module of HBase with most deps shaded.
-    pom
-    
+  4.0.0
+  
+    hbase-build-configuration
+    org.apache.hbase
+    2.5.0-SNAPSHOT
+    ../hbase-build-configuration
+  
+  hbase-shaded
+  Apache HBase - Shaded
+  Module of HBase with most deps shaded.
+  pom
+  
       
-      true
+    true
       
-      true
-      true
-      org.apache.hadoop.hbase.shaded
-    
-    
-        hbase-shaded-client-byo-hadoop
-        hbase-shaded-client
-        hbase-shaded-mapreduce
-        hbase-shaded-testing-util
-        hbase-shaded-testing-util-tester
-        hbase-shaded-check-invariants
-        hbase-shaded-with-hadoop-check-invariants
-    
-    
-      
-         org.apache.hbase
-         hbase-resource-bundle
-         true
-      
-      
-      
-        ch.qos.reload4j
-        reload4j
-        true
-      
-      
-        org.slf4j
-        slf4j-reload4j
-        true
-      
-    
-    
-        
-            
-                
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-            
-              org.apache.maven.plugins
-              maven-remote-resources-plugin
-              
-                
-                  aggregate-licenses
-                  
-                    process
-                  
-                  
-                    
-                      ${build.year}
-                      ${license.debug.print.included}
-                      ${license.bundles.dependencies}
-                      ${license.bundles.jquery}
-                      ${license.bundles.logo}
-                      ${license.bundles.bootstrap}
-                    
-                    
-                      ${project.groupId}:hbase-resource-bundle:${project.version}
-                    
-                    
-                      ${project.groupId}:hbase-resource-bundle:${project.version}
-                    
-                    
-                      supplemental-models.xml
-                    
-                  
-                
-              
-            
-        
-        
-            
-                
-                    
-                    maven-assembly-plugin
-                    
-                        true
-                    
-                
-                
-                    org.apache.maven.plugins
-                    maven-shade-plugin
-                    3.2.4
-                    
-                        
-                            aggregate-into-a-jar-with-relocated-third-parties
-                            package
-                            
-                                shade
-                            
-                            
-                                false
-                                false
-                                true
-                                false
-                                
-                                    
-                                        
-                                        javax.annotation:javax.annotation-api
-                                        javax.activation:javax.activation-api
-                                        jakarta.activation:jakarta.activation-api 
-                                        jakarta.ws.rs:jakarta.ws.rs-api
-                                        jakarta.annotation:jakarta.annotation-api
-                                        jakarta.validation:jakarta.validation-api
-                                        org.glassfish.hk2.external:jakarta.inject
-                                        
-                                        org.apache.hadoop:*
-                                        
-                                        org.apache.hbase:hbase-resource-bundle
-                                        org.slf4j:*
-                                        com.google.code.findbugs:*
-                                        com.github.stephenc.findbugs:*
-                                        com.github.spotbugs:*
-                                        org.apache.htrace:*
-                                        org.apache.yetus:*
-                                        log4j:*
-                                        ch.qos.reload4j:*
-                                        commons-logging:*
-                                        org.javassist:*
-                                        io.opentelemetry:*
-                                    
-                                
-                                
-                                    
-                                    
-                                        com.cedarsoftware
-                                        ${shaded.prefix}.com.cedarsoftware
-                                    
-                                    
-                                        com.codahale
-                                        ${shaded.prefix}.com.codahale
-                                    
-                                    
-                                        com.ctc
-                                        ${shaded.prefix}.com.ctc
-                                    
-                                    
-                                        com.dropwizard
-                                        ${shaded.prefix}.com.dropwizard
-                                    
-                                    
-                                        com.fasterxml
-                                        ${shaded.prefix}.com.fasterxml
-                                    
-                                    
-                                        com.github.benmanes.caffeine
-                                        ${shaded.prefix}.com.github.benmanes.caffeine
-                                    
-                                    
-                                        com.google
-                                        ${shaded.prefix}.com.google
-                                    
-                                    
-                                        com.jamesmurty
-                                        ${shaded.prefix}.com.jamesmurty
-                                    
-                                    
-                                        com.jcraft
-                                        ${shaded.prefix}.com.jcraft
-                                    
-                                    
-                                        com.lmax
-                                        ${shaded.prefix}.com.lmax
-                                    
-                                    
-                                        com.microsoft
-                                        ${shaded.prefix}.com.microsoft
-                                    
-                                    
-                                        com.nimbusds
-                                        ${shaded.prefix}.com.nimbusds
-                                    
-                                    
-                                        com.squareup
-                                        ${shaded.prefix}.com.squareup
-                                    
-                                    
-                                        com.thoughtworks
-                                        ${shaded.prefix}.com.thoughtworks
-                                    
-                                    
-                                        com.zaxxer
-                                        ${shaded.prefix}.com.zaxxer
-                                    
+    true
+    true
+    org.apache.hadoop.hbase.shaded
+  
+  
+    hbase-shaded-client-byo-hadoop
+    hbase-shaded-client
+    hbase-shaded-mapreduce
+    hbase-shaded-testing-util
+    hbase-shaded-testing-util-tester
+    hbase-shaded-check-invariants
+    hbase-shaded-with-hadoop-check-invariants
+  
+  
+    
+      org.apache.hbase
+      hbase-resource-bundle
+      true
+    
+    
+    
+      org.apache.logging.log4j
+      log4j-api
+      true
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      true
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
+      true
+    
+  
+  
+    
+      
+        
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+      
+        org.apache.maven.plugins
+        maven-remote-resources-plugin
+        
+          
+            aggregate-licenses
+            
+              process
+            
+            
+              
+                ${build.year}
+                ${license.debug.print.included}
+                ${license.bundles.dependencies}
+                ${license.bundles.jquery}
+                ${license.bundles.logo}
+                ${license.bundles.bootstrap}
+              
+              
+                ${project.groupId}:hbase-resource-bundle:${project.version}
+              
+              
+                ${project.groupId}:hbase-resource-bundle:${project.version}
+              
+              
+                supplemental-models.xml
+              
+            
+          
+        
+      
+    
+    
+      
+        
+          
+          maven-assembly-plugin
+          
+            true
+          
+        
+        
+          org.apache.maven.plugins
+          maven-shade-plugin
+          3.2.4
+          
+            
+              aggregate-into-a-jar-with-relocated-third-parties
+              package
+              
+                shade
+              
+              
+                false
+                false
+                true
+                false
+                
+                  
+                    
+                    javax.annotation:javax.annotation-api
+                    javax.activation:javax.activation-api
+                    jakarta.activation:jakarta.activation-api 
+                    jakarta.ws.rs:jakarta.ws.rs-api
+                    jakarta.annotation:jakarta.annotation-api
+                    jakarta.validation:jakarta.validation-api
+                    org.glassfish.hk2.external:jakarta.inject
+                    
+                    org.apache.hadoop:*
+                    
+                    org.apache.hbase:hbase-resource-bundle
+                    org.slf4j:*
+                    com.google.code.findbugs:*
+                    com.github.stephenc.findbugs:*
+                    com.github.spotbugs:*
+                    org.apache.htrace:*
+                    org.apache.yetus:*
+                    org.apache.logging.log4j:*
+                    commons-logging:*
+                    org.javassist:*
+                    io.opentelemetry:*
+                  
+                
+                
+                  
+                  
+                    com.cedarsoftware
+                    ${shaded.prefix}.com.cedarsoftware
+                  
+                  
+                    com.codahale
+                    ${shaded.prefix}.com.codahale
+                  
+                  
+                    com.ctc
+                    ${shaded.prefix}.com.ctc
+                  
+                  
+                    com.dropwizard
+                    ${shaded.prefix}.com.dropwizard
+                  
+                  
+                    com.fasterxml
+                    ${shaded.prefix}.com.fasterxml
+                  
+                  
+                    com.github.benmanes.caffeine
+                    ${shaded.prefix}.com.github.benmanes.caffeine
+                  
+                  
+                    com.google
+                    ${shaded.prefix}.com.google
+                  
+                  
+                    com.jamesmurty
+                    ${shaded.prefix}.com.jamesmurty
+                  
+                  
+                    com.jcraft
+                    ${shaded.prefix}.com.jcraft
+                  
+                  
+                    com.lmax
+                    ${shaded.prefix}.com.lmax
+                  
+                  
+                    com.microsoft
+                    ${shaded.prefix}.com.microsoft
+                  
+                  
+                    com.nimbusds
+                    ${shaded.prefix}.com.nimbusds
+                  
+                  
+                    com.squareup
+                    ${shaded.prefix}.com.squareup
+                  
+                  
+                    com.thoughtworks
+                    ${shaded.prefix}.com.thoughtworks
+                  
+                  
+                    com.zaxxer
+                    ${shaded.prefix}.com.zaxxer
+                  
 
-                                    
-                                    
-                                        org.xbill
-                                        ${shaded.prefix}.org.xbill
-                                    
+                  
+                  
+                    org.xbill
+                    ${shaded.prefix}.org.xbill
+                  
 
-                                    
-                                    
-                                        org.jboss.netty
-                                        ${shaded.prefix}.org.jboss.netty
-                                    
-                                    
-                                        io.netty
-                                        ${shaded.prefix}.io.netty
-                                    
+                  
+                  
+                    org.jboss.netty
+                    ${shaded.prefix}.org.jboss.netty
+                  
+                  
+                    io.netty
+                    ${shaded.prefix}.io.netty
+                  
 
-                                    
-                                    
-                                        okio
-                                        ${shaded.prefix}.okio
-                                    
+                  
+                  
+                    okio
+                    ${shaded.prefix}.okio
+                  
 
-                                    
-                                    
-                                      org.checkerframework
-                                      ${shaded.prefix}.org.checkerframework
-                                    
-                                    
-                                      org.codehaus
-                                      ${shaded.prefix}.org.codehaus
-                                    
-                                    
-                                        org.eclipse
-                                        ${shaded.prefix}.org.eclipse
-                                    
-                                    
-                                        org.ehcache
-                                        ${shaded.prefix}.org.ehcache
-                                    
-                                    
-                                        org.jcodings
-                                        ${shaded.prefix}.org.jcodings
-                                    
-                                    
-                                        org.joni
-                                        ${shaded.prefix}.org.joni
-                                    
-                                    
-                                        org.mortbay
-                                        ${shaded.prefix}.org.mortbay
-                                    
-                                    
-                                        org.nustaq
-                                        ${shaded.prefix}.org.nustaq
-                                    
-                                    
-                                        org.terracotta
-                                        ${shaded.prefix}.org.terracotta
-                                    
-                                    
-                                        org.tukaani
-                                        ${shaded.prefix}.org.tukaani
-                                    
-                                    
-                                        org.xerial
-                                        ${shaded.prefix}.org.xerial
-                                    
-                                    
-                                        org.znerd
-                                        ${shaded.prefix}.org.znerd
-                                    
-                                    
-                                        org.aopalliance
-                                        ${shaded.prefix}.org.aopalliance
-                                    
-                                    
-                                        org.fusesource
-                                        ${shaded.prefix}.org.fusesource
-                                    
-                                    
-                                        org.iq80
-                                        ${shaded.prefix}.org.iq80
-                                    
-                                    
-                                        org.jamon
-                                        ${shaded.prefix}.org.jamon
-                                    
-                                    
-                                        org.jets3t
-                                        ${shaded.prefix}.org.jets3t
-                                    
-                                    
-                                    
-                                        contribs.mx
-                                        ${shaded.prefix}.contribs.mx
-                                    
-                                    
-                                        org.objectweb
-                                        ${shaded.prefix}.org.objectweb
-                                    
+                  
+                  
+                    org.checkerframework
+                    ${shaded.prefix}.org.checkerframework
+                  
+                  
+                    org.codehaus
+                    ${shaded.prefix}.org.codehaus
+                  
+                  
+                    org.eclipse
+                    ${shaded.prefix}.org.eclipse
+                  
+                  
+                    org.ehcache
+                    ${shaded.prefix}.org.ehcache
+                  
+                  
+                    org.jcodings
+                    ${shaded.prefix}.org.jcodings
+                  
+                  
+                    org.joni
+                    ${shaded.prefix}.org.joni
+                  
+                  
+                    org.mortbay
+                    ${shaded.prefix}.org.mortbay
+                  
+                  
+                    org.nustaq
+                    ${shaded.prefix}.org.nustaq
+                  
+                  
+                    org.terracotta
+                    ${shaded.prefix}.org.terracotta
+                  
+                  
+                    org.tukaani
+                    ${shaded.prefix}.org.tukaani
+                  
+                  
+                    org.xerial
+                    ${shaded.prefix}.org.xerial
+                  
+                  
+                    org.znerd
+                    ${shaded.prefix}.org.znerd
+                  
+                  
+                    org.aopalliance
+                    ${shaded.prefix}.org.aopalliance
+                  
+                  
+                    org.fusesource
+                    ${shaded.prefix}.org.fusesource
+                  
+                  
+                    org.iq80
+                    ${shaded.prefix}.org.iq80
+                  
+                  
+                    org.jamon
+                    ${shaded.prefix}.org.jamon
+                  
+                  
+                    org.jets3t
+                    ${shaded.prefix}.org.jets3t
+                  
+                  
+                  
+                    contribs.mx
+                    ${shaded.prefix}.contribs.mx
+                  
+                  
+                    org.objectweb
+                    ${shaded.prefix}.org.objectweb
+                  
 
 
-                                    
-                                    
-                                        org.apache.avro
-                                        ${shaded.prefix}.org.apache.avro
-                                    
-                                    
-                                        org.apache.curator
-                                        ${shaded.prefix}.org.apache.curator
-                                    
-                                    
-                                        org.apache.directory
-                                        ${shaded.prefix}.org.apache.directory
-                                    
-                                    
-                                        org.apache.http
-                                        ${shaded.prefix}.org.apache.http
-                                    
-                                    
-                                        org.apache.jasper
-                                        ${shaded.prefix}.org.apache.jasper
-                                    
-                                    
-                                        org.apache.jute
-                                        ${shaded.prefix}.org.apache.jute
-                                    
-                                    
-                                        org.apache.kerby
-                                        ${shaded.prefix}.org.apache.kerby
-                                    
-                                    
-                                        org.apache.taglibs
-                                        ${shaded.prefix}.org.apache.taglibs
-                                    
-                                    
-                                        org.apache.zookeeper
-                                        ${shaded.prefix}.org.apache.zookeeper
-                                    
+                  
+                  
+                    org.apache.avro
+                    ${shaded.prefix}.org.apache.avro
+                  
+                  
+                    org.apache.curator
+                    ${shaded.prefix}.org.apache.curator
+                  
+                  
+                    org.apache.directory
+                    ${shaded.prefix}.org.apache.directory
+                  
+                  
+                    org.apache.http
+                    ${shaded.prefix}.org.apache.http
+                  
+                  
+                    org.apache.jasper
+                    ${shaded.prefix}.org.apache.jasper
+                  
+                  
+                    org.apache.jute
+                    ${shaded.prefix}.org.apache.jute
+                  
+                  
+                    org.apache.kerby
+                    ${shaded.prefix}.org.apache.kerby
+                  
+                  
+                    org.apache.taglibs
+                    ${shaded.prefix}.org.apache.taglibs
+                  
+                  
+                    org.apache.zookeeper
+                    ${shaded.prefix}.org.apache.zookeeper
+                  
 
-                                    
-                                    
-                                        org.apache.commons.beanutils
-                                        ${shaded.prefix}.org.apache.commons.beanutils
-                                    
-                                    
-                                        org.apache.commons.cli
-                                        ${shaded.prefix}.org.apache.commons.cli
-                                    
-                                    
-                                        org.apache.commons.collections
-                                        ${shaded.prefix}.org.apache.commons.collections
-                                    
-                                    
-                                        org.apache.commons.configuration
-                                        ${shaded.prefix}.org.apache.commons.configuration
-                                    
-                                    
-                                        org.apache.commons.crypto
-                                        ${shaded.prefix}.org.apache.commons.crypto
-                                    
-                                    
-                                        org.apache.commons.csv
-                                        ${shaded.prefix}.org.apache.commons.csv
-                                    
-                                    
-                                        org.apache.commons.daemon
-                                        ${shaded.prefix}.org.apache.commons.daemon
-                                    
-                                    
-                                        org.apache.commons.io
-                                        ${shaded.prefix}.org.apache.commons.io
-                                    
-                                    
-                                        org.apache.commons.math
-                                        ${shaded.prefix}.org.apache.commons.math
-                                    
-                                    
-                                        org.apache.commons.math3
-                                        ${shaded.prefix}.org.apache.commons.math3
-                                    
-                                    
-                                        org.apache.commons.net
-                                        ${shaded.prefix}.org.apache.commons.net
-                                    
-                                    
-                                        org.apache.commons.lang
-                                        ${shaded.prefix}.org.apache.commons.lang
-                                    
-                                    
-                                        org.apache.commons.lang3
-                                        ${shaded.prefix}.org.apache.commons.lang3
-                                    
-                                    
-                                        org.apache.commons.el
-                                        ${shaded.prefix}.org.apache.commons.el
-                                    
-                                    
-                                        org.apache.commons.httpclient
-                                        ${shaded.prefix}.org.apache.commons.httpclient
-                                    
-                                    
-                                        org.apache.commons.compress
-                                        ${shaded.prefix}.org.apache.commons.compress
-                                    
-                                    
-                                        org.apache.commons.digester
-                                        ${shaded.prefix}.org.apache.commons.digester
-                                    
-                                    
-                                        org.apache.commons.codec
-                                        ${shaded.prefix}.org.apache.commons.codec
-                                    
-                                    
-                                        org.apache.commons.text
-                                        ${shaded.prefix}.org.apache.commons.text
-                                    
+                  
+                  
+                    org.apache.commons.beanutils
+                    ${shaded.prefix}.org.apache.commons.beanutils
+                  
+                  
+                    org.apache.commons.cli
+                    ${shaded.prefix}.org.apache.commons.cli
+                  
+                  
+                    org.apache.commons.collections
+                    ${shaded.prefix}.org.apache.commons.collections
+                  
+                  
+                    org.apache.commons.configuration
+                    ${shaded.prefix}.org.apache.commons.configuration
+                  
+                  
+                    org.apache.commons.crypto
+                    ${shaded.prefix}.org.apache.commons.crypto
+                  
+                  
+                    org.apache.commons.csv
+                    ${shaded.prefix}.org.apache.commons.csv
+                  
+                  
+                    org.apache.commons.daemon
+                    ${shaded.prefix}.org.apache.commons.daemon
+                  
+                  
+                    org.apache.commons.io
+                    ${shaded.prefix}.org.apache.commons.io
+                  
+                  
+                    org.apache.commons.math
+                    ${shaded.prefix}.org.apache.commons.math
+                  
+                  
+                    org.apache.commons.math3
+                    ${shaded.prefix}.org.apache.commons.math3
+                  
+                  
+                    org.apache.commons.net
+                    ${shaded.prefix}.org.apache.commons.net
+                  
+                  
+                    org.apache.commons.lang
+                    ${shaded.prefix}.org.apache.commons.lang
+                  
+                  
+                    org.apache.commons.lang3
+                    ${shaded.prefix}.org.apache.commons.lang3
+                  
+                  
+                    org.apache.commons.el
+                    ${shaded.prefix}.org.apache.commons.el
+                  
+                  
+                    org.apache.commons.httpclient
+                    ${shaded.prefix}.org.apache.commons.httpclient
+                  
+                  
+                    org.apache.commons.compress
+                    ${shaded.prefix}.org.apache.commons.compress
+                  
+                  
+                    org.apache.commons.digester
+                    ${shaded.prefix}.org.apache.commons.digester
+                  
+                  
+                    org.apache.commons.codec
+                    ${shaded.prefix}.org.apache.commons.codec
+                  
+                  
+                    org.apache.commons.text
+                    ${shaded.prefix}.org.apache.commons.text
+                  
 
-                                    
-                                    
-                                        net/
-                                        ${shaded.prefix}.net.
-                                    
-                                    
-                                        org.agrona
-                                        ${shaded.prefix}.org.agrona
-                                    
-                                
-                                
-                                  
-                                  
-                                    
-                                      LICENSE.txt
-                                      ASL2.0
+                  
+                  
+                    net/
+                    ${shaded.prefix}.net.
+                  
+                  
+                    org.agrona
+                    ${shaded.prefix}.org.agrona
+                  
+                
+                
+                  
+                  
+                    
+                      LICENSE.txt
+                      ASL2.0
                                       
-                                      overview.html
-                                    
-                                  
-                                  
-                                  
-                                    false
-                                    ${project.name}
-                                  
-                                  
-                                  
-                                
-                                
-                                    
-                                    
-                                        dnsjava:dnsjava
-                                        
-                                            dig*
-                                            jnamed*
-                                            lookup*
-                                            update*
-                                        
-                                    
-                                  
-                                    
-                                    org.eclipse.jetty.orbit:javax.servlet.jsp.jstl
-                                    
-                                      META-INF/ECLIPSEF.SF
-                                      META-INF/ECLIPSEF.RSA
-                                    
-                                  
-                                  
-                                    
-                                    commons-beanutils:commons-beanutils-core
-                                    
-                                      org/apache/commons/collections/*.class
-                                    
-                                  
-                                  
-                                    
-                                    org.apache.hadoop:hadoop-yarn-common
-                                    
-                                      webapps/*
-                                      webapps/**/*
-                                    
-                                  
-                                  
-                                    *:*
-                                    
-                                      
-                                      *.proto
-                                      **/*.proto
-                                      
-                                      LICENSE
-                                      NOTICE
-                                    
-                                  
-                                  
-                                    
-                                    org.apache.commons:commons-math3
-                                    
-                                      assets/org/apache/commons/math3/**/*
-                                    
-                                  
-                                  
-                                    
-                                    org.apache.hadoop:*
-                                    
-                                      mapred-default.xml.orig
-                                    
-                                  
-                                  
-                                  
-                                    org.eclipse.jetty:*
-                                    
-                                      about.html
-                                      jetty-dir.css
-                                    
-                                  
-                                  
-                                    org.apache.kerby:*
-                                    
-                                      krb5-template.conf
-                                      krb5_udp-template.conf
-                                      ccache.txt
-                                      keytab.txt
-                                    
-                                  
-                                
-                            
-                        
-                    
-                
-            
-        
-    
+                      overview.html
+                    
+                  
+                  
+                  
+                    false
+                    ${project.name}
+                  
+                  
+                  
+                
+                
+                  
+                  
+                    dnsjava:dnsjava
+                    
+                      dig*
+                      jnamed*
+                      lookup*
+                      update*
+                    
+                  
+                  
+                    
+                    org.eclipse.jetty.orbit:javax.servlet.jsp.jstl
+                    
+                      META-INF/ECLIPSEF.SF
+                      META-INF/ECLIPSEF.RSA
+                    
+                  
+                  
+                    
+                    commons-beanutils:commons-beanutils-core
+                    
+                      org/apache/commons/collections/*.class
+                    
+                  
+                  
+                    
+                    org.apache.hadoop:hadoop-yarn-common
+                    
+                      webapps/*
+                      webapps/**/*
+                    
+                  
+                  
+                    *:*
+                    
+                      
+                      *.proto
+                      **/*.proto
+                      
+                      LICENSE
+                      NOTICE
+                    
+                  
+                  
+                    
+                    org.apache.commons:commons-math3
+                    
+                      assets/org/apache/commons/math3/**/*
+                    
+                  
+                  
+                    
+                    org.apache.hadoop:*
+                    
+                      mapred-default.xml.orig
+                    
+                  
+                  
+                  
+                    org.eclipse.jetty:*
+                    
+                      about.html
+                      jetty-dir.css
+                    
+                  
+                  
+                    org.apache.kerby:*
+                    
+                      krb5-template.conf
+                      krb5_udp-template.conf
+                      ccache.txt
+                      keytab.txt
+                    
+                  
+                
+              
+            
+          
+        
+      
+    
+  
 
diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml
index d75e5204024..936434a72f0 100644
--- a/hbase-shell/pom.xml
+++ b/hbase-shell/pom.xml
@@ -147,13 +147,23 @@
       test
     
     
-      org.slf4j
-      slf4j-reload4j
+      org.apache.logging.log4j
+      log4j-api
       test
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-core
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-1.2-api
       test
     
   
diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml
index 9da3ff4f26c..5af1bbceca7 100644
--- a/hbase-testing-util/pom.xml
+++ b/hbase-testing-util/pom.xml
@@ -1,5 +1,7 @@
 
-
+
     
-    4.0.0
-    
-        hbase-build-configuration
-        org.apache.hbase
-        2.5.0-SNAPSHOT
-        ../hbase-build-configuration
-    
-    hbase-testing-util
-    Apache HBase - Testing Util
-    HBase Testing Utilities.
-    
+  4.0.0
+  
+    hbase-build-configuration
+    org.apache.hbase
+    2.5.0-SNAPSHOT
+    ../hbase-build-configuration
+  
+  hbase-testing-util
+  Apache HBase - Testing Util
+  HBase Testing Utilities.
+  
         
         
-        
-            org.apache.hbase
-            hbase-logging
-            test-jar
-            test
-        
-        
-            org.apache.hbase
-            hbase-common
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-common
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-annotations
-            test-jar
-            compile
-            
-                
-                    jdk.tools
-                    jdk.tools
-                
-            
-        
-        
-            org.apache.hbase
-            hbase-protocol
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-client
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-zookeeper
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-zookeeper
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-server
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-server
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-asyncfs
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-hadoop-compat
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-hadoop-compat
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            ${compat.module}
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            ${compat.module}
-            test-jar
-            compile
-        
-        
-            org.slf4j
-            jcl-over-slf4j
-            test
-        
-        
-            org.slf4j
-            jul-to-slf4j
-            test
-        
-        
-            org.slf4j
-            slf4j-reload4j
-            test
-        
-        
-            ch.qos.reload4j
-            reload4j
-            test
-        
-    
+    
+      org.apache.hbase
+      hbase-logging
+      test-jar
+      test
+    
+    
+      org.apache.hbase
+      hbase-common
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-common
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-annotations
+      test-jar
+      compile
+      
+        
+          jdk.tools
+          jdk.tools
+        
+      
+    
+    
+      org.apache.hbase
+      hbase-protocol
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-client
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-zookeeper
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-zookeeper
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-server
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-server
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-asyncfs
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-hadoop-compat
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-hadoop-compat
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      ${compat.module}
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      ${compat.module}
+      test-jar
+      compile
+    
+    
+      org.slf4j
+      jcl-over-slf4j
+      test
+    
+    
+      org.slf4j
+      jul-to-slf4j
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-api
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-1.2-api
+      test
+    
+  
 
-    
+  
         
         
 
         
-        
-            hadoop-2.0
-            
-                
+    
+      hadoop-2.0
+      
+        
                     
-                    !hadoop.profile
-                
-            
-            
-                
-                    org.apache.hadoop
-                    hadoop-common
-                    compile
-                    
-                      
-                        javax.xml.bind
-                        jaxb-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-auth
-                    compile
-                
-                
-                    org.apache.hadoop
-                    hadoop-client
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                      
-                        javax.xml.bind
-                        jaxb-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-mapreduce-client-core
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                      
-                        javax.xml.bind
-                        jaxb-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-mapreduce-client-jobclient
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-hdfs
-                    compile
-                
-                
-                    org.apache.hadoop
-                    hadoop-hdfs
-                    test-jar
-                    compile
-                
-                
-                    org.apache.hadoop
-                    hadoop-minicluster
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                      
-                        org.apache.zookeeper
-                        zookeeper
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-minikdc
-                
-            
-        
+                    
+          !hadoop.profile
+        
+      
+      
+        
+          org.apache.hadoop
+          hadoop-common
+          compile
+          
+            
+              javax.xml.bind
+              jaxb-api
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-auth
+          compile
+        
+        
+          org.apache.hadoop
+          hadoop-client
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+            
+              javax.xml.bind
+              jaxb-api
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-mapreduce-client-core
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+            
+              javax.xml.bind
+              jaxb-api
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-mapreduce-client-jobclient
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-hdfs
+          compile
+        
+        
+          org.apache.hadoop
+          hadoop-hdfs
+          test-jar
+          compile
+        
+        
+          org.apache.hadoop
+          hadoop-minicluster
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+            
+              org.apache.zookeeper
+              zookeeper
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-minikdc
+        
+      
+    
         
-        
-            hadoop-3.0
-            
-                
-                    hadoop.profile
-                    3.0
-                
-            
-            
-                
-                    org.apache.hadoop
-                    hadoop-common
-                    
-                      
-                         javax.xml.bind
-                         jaxb-api
-                      
-                      
-                       javax.ws.rs
-                       jsr311-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-minicluster
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                      
-                       javax.ws.rs
-                       jsr311-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-minikdc
-                
-            
-        
-    
+    
+      hadoop-3.0
+      
+        
+          hadoop.profile
+          3.0
+        
+      
+      
+        
+          org.apache.hadoop
+          hadoop-common
+          
+            
+              javax.xml.bind
+              jaxb-api
+            
+            
+              javax.ws.rs
+              jsr311-api
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-minicluster
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+            
+              javax.ws.rs
+              jsr311-api
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-minikdc
+        
+      
+    
+  
 
diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml
index 2891f95865e..8856ad385ab 100644
--- a/hbase-thrift/pom.xml
+++ b/hbase-thrift/pom.xml
@@ -255,13 +255,23 @@
       test
     
     
-      org.slf4j
-      slf4j-reload4j
+      org.apache.logging.log4j
+      log4j-api
       test
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-core
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-1.2-api
       test
     
   
diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml
index e23e5268c1a..4bbf0d044f1 100644
--- a/hbase-zookeeper/pom.xml
+++ b/hbase-zookeeper/pom.xml
@@ -174,13 +174,18 @@
       test
     
     
-      org.slf4j
-      slf4j-reload4j
+      org.apache.logging.log4j
+      log4j-api
       test
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-core
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
       test
     
   
diff --git a/pom.xml b/pom.xml
index 34925d59e51..e6a513213d0 100755
--- a/pom.xml
+++ b/pom.xml
@@ -688,7 +688,7 @@
               
               
                 
-                  log4j.properties
+                  log4j2.xml
                 
               
             
@@ -1052,7 +1052,29 @@
             
           
           
-            banned-log4j
+            banned-other-logging-framework
+            
+              enforce
+            
+            
+              
+                
+                  
+                    log4j:*
+                    org.slf4j:slf4j-log4j12
+                    ch.qos.reload4j:*
+                    org.slf4j:slf4j-reload4j
+                    ch.qos.logback:*
+                  
+                  
+                    We do not allow other logging frameworks as now we use log4j2
+                  
+                
+              
+            
+          
+          
+            banned-slf4j-log4j12
             
               enforce
             
@@ -1060,11 +1082,10 @@
               
                 
                   
-                    log4j:**
                     org.slf4j:slf4j-log4j12
                   
                   
-                    Use reload4j instead
+                    We do not allow slf4j-log4j12 dependency as now we use log4j-slf4j-impl
                   
                 
               
@@ -1119,16 +1140,18 @@
                   Use SLF4j for logging
                   
                     org.apache.commons.logging.**
+                    org.apache.log4j.**
+                    org.apache.logging.log4j.**
                   
                 
                 
                   false
                   512
                   
-                    Do not use log4j directly in code, see Log4jUtils in hbase-logging for more details.
+                    Do not use log4j2 directly in code, see Log4jUtils in hbase-logging for more details.
                   
                   
-                    org.apache.log4j.**
+                    org.apache.logging.log4j.**
                   
                 
                 
@@ -1533,7 +1556,7 @@
     1.3
     1.0.1
     1.0.1
-    1.2.19
+    2.17.2
     2.28.2
     
     com.google.protobuf
@@ -1993,8 +2016,8 @@
       
       
       
         org.slf4j
         slf4j-api
         ${slf4j.version}
       
-      
-        org.slf4j
-        slf4j-reload4j
-        ${slf4j.version}
-      
       
         org.slf4j
         jcl-over-slf4j
@@ -2027,9 +2046,24 @@
         ${slf4j.version}
       
       
-        ch.qos.reload4j
-        reload4j
-        ${reload4j.version}
+        org.apache.logging.log4j
+        log4j-api
+        ${log4j2.version}
+      
+      
+        org.apache.logging.log4j
+        log4j-core
+        ${log4j2.version}
+      
+      
+        org.apache.logging.log4j
+        log4j-slf4j-impl
+        ${log4j2.version}
+      
+      
+        org.apache.logging.log4j
+        log4j-1.2-api
+        ${log4j2.version}
       
       
       
@@ -2037,8 +2071,6 @@
         avro
         ${avro.version}
       
-      
       
         com.github.ben-manes.caffeine
         caffeine
@@ -3388,6 +3420,46 @@
               
            
          
+         
+           org.apache.hadoop
+           hadoop-mapreduce-client-app
+           ${hadoop-three.version}
+           test-jar
+           
+             
+               org.codehaus.jackson
+               jackson-mapper-asl
+             
+             
+               org.codehaus.jackson
+               jackson-core-asl
+             
+             
+               org.codehaus.jackson
+               jackson-jaxrs
+             
+             
+               org.codehaus.jackson
+               jackson-xc
+             
+             
+               javax.xml.bind
+               jaxb-api
+             
+             
+               javax.ws.rs
+               jsr311-api
+             
+             
+               org.slf4j
+              slf4j-log4j12
+             
+             
+                log4j
+                log4j
+              
+           
+         
          
            org.apache.hadoop
            hadoop-mapreduce-client-jobclient
@@ -3414,10 +3486,6 @@
                org.slf4j
                slf4j-log4j12
              
-             
-               log4j
-               log4j
-             
            
          
          
@@ -3447,10 +3515,6 @@
                 org.slf4j
                 slf4j-log4j12
               
-              
-                log4j
-                log4j
-              
            
          
          
@@ -3863,10 +3927,6 @@
                org.slf4j
                slf4j-log4j12
              
-             
-               log4j
-               log4j
-