HBASE-26802 Backport the log4j2 changes to branch-2 (#4166)

Signed-off-by: Andrew Purtell <apurtell@apache.org>

Conflicts:
	hbase-hadoop-compat/pom.xml
	hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
	hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
	hbase-shaded/hbase-shaded-client/pom.xml
	hbase-shaded/hbase-shaded-mapreduce/pom.xml
	hbase-shaded/hbase-shaded-testing-util/pom.xml
	hbase-shaded/pom.xml
	hbase-testing-util/pom.xml
This commit is contained in:
Duo Zhang 2022-03-12 03:17:43 +08:00 committed by Andrew Purtell
parent bf9233f511
commit 5844b53dea
79 changed files with 3383 additions and 2803 deletions

View File

@ -305,10 +305,13 @@ else
# make it easier to check for shaded/not later on. # make it easier to check for shaded/not later on.
shaded_jar="" shaded_jar=""
fi fi
# here we will add slf4j-api, commons-logging, jul-to-slf4j, jcl-over-slf4j
# to classpath, as they are all logging bridges. Only exclude log4j* so we
# will not actually log anything out. Add it later if necessary
for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \ if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
[ "${f}" != "htrace-core.jar$" ] && \ [[ "${f}" != "htrace-core.jar$" ]] && \
[[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then
CLASSPATH="${CLASSPATH}:${f}" CLASSPATH="${CLASSPATH}:${f}"
fi fi
done done
@ -671,7 +674,7 @@ elif [ "$COMMAND" = "mapredcp" ] ; then
for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \ if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
[ "${f}" != "htrace-core.jar$" ] && \ [ "${f}" != "htrace-core.jar$" ] && \
[[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then
echo -n ":${f}" echo -n ":${f}"
fi fi
done done
@ -720,8 +723,8 @@ elif [ "$COMMAND" = "hbtop" ] ; then
done done
fi fi
if [ -f "${HBASE_HOME}/conf/log4j-hbtop.properties" ] ; then if [ -f "${HBASE_HOME}/conf/log4j2-hbtop.properties" ] ; then
HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j.configuration=file:${HBASE_HOME}/conf/log4j-hbtop.properties" HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j2.configurationFile=file:${HBASE_HOME}/conf/log4j2-hbtop.properties"
fi fi
HBASE_OPTS="${HBASE_OPTS} ${HBASE_HBTOP_OPTS}" HBASE_OPTS="${HBASE_OPTS} ${HBASE_HBTOP_OPTS}"
else else
@ -810,10 +813,9 @@ fi
HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX" HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX"
# by now if we're running a command it means we need logging # by now if we're running a command it means we need logging
for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do for f in ${HBASE_HOME}/lib/client-facing-thirdparty/log4j*.jar; do
if [ -f "${f}" ]; then if [ -f "${f}" ]; then
CLASSPATH="${CLASSPATH}:${f}" CLASSPATH="${CLASSPATH}:${f}"
break
fi fi
done done

View File

@ -332,6 +332,7 @@ set HBASE_OPTS=%HBASE_OPTS% -Djava.util.logging.config.class="org.apache.hadoop.
if not defined HBASE_ROOT_LOGGER ( if not defined HBASE_ROOT_LOGGER (
set HBASE_ROOT_LOGGER=INFO,console set HBASE_ROOT_LOGGER=INFO,console
) )
set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger="%HBASE_ROOT_LOGGER%" set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger="%HBASE_ROOT_LOGGER%"
if defined JAVA_LIBRARY_PATH ( if defined JAVA_LIBRARY_PATH (
@ -348,6 +349,7 @@ if not defined HBASE_SECURITY_LOGGER (
set HBASE_SECURITY_LOGGER=INFO,DRFAS set HBASE_SECURITY_LOGGER=INFO,DRFAS
) )
) )
set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger="%HBASE_SECURITY_LOGGER%" set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger="%HBASE_SECURITY_LOGGER%"
set HEAP_SETTINGS=%JAVA_HEAP_MAX% %JAVA_OFFHEAP_MAX% set HEAP_SETTINGS=%JAVA_HEAP_MAX% %JAVA_OFFHEAP_MAX%

View File

@ -32,7 +32,7 @@
@rem set HBASE_OFFHEAPSIZE=1000 @rem set HBASE_OFFHEAPSIZE=1000
@rem For example, to allocate 8G of offheap, to 8G: @rem For example, to allocate 8G of offheap, to 8G:
@rem etHBASE_OFFHEAPSIZE=8G @rem set HBASE_OFFHEAPSIZE=8G
@rem Extra Java runtime options. @rem Extra Java runtime options.
@rem Below are what we set by default. May only work with SUN JVM. @rem Below are what we set by default. May only work with SUN JVM.
@ -82,6 +82,9 @@ set HBASE_OPTS=%HBASE_OPTS% "-XX:+UseConcMarkSweepGC" "-Djava.net.preferIPv4Stac
@rem Tell HBase whether it should manage it's own instance of ZooKeeper or not. @rem Tell HBase whether it should manage it's own instance of ZooKeeper or not.
@rem set HBASE_MANAGES_ZK=true @rem set HBASE_MANAGES_ZK=true
@rem Tell HBase the logger level and appenders
@rem set HBASE_ROOT_LOGGER=INFO,DRFA
@rem Uncomment to enable trace, you can change the options to use other exporters such as jaeger or @rem Uncomment to enable trace, you can change the options to use other exporters such as jaeger or
@rem zipkin. See https://github.com/open-telemetry/opentelemetry-java-instrumentation on how to @rem zipkin. See https://github.com/open-telemetry/opentelemetry-java-instrumentation on how to
@rem configure exporters and other components through system properties. @rem configure exporters and other components through system properties.

View File

@ -126,11 +126,11 @@
# export HBASE_MANAGES_ZK=true # export HBASE_MANAGES_ZK=true
# The default log rolling policy is RFA, where the log file is rolled as per the size defined for the # The default log rolling policy is RFA, where the log file is rolled as per the size defined for the
# RFA appender. Please refer to the log4j.properties file to see more details on this appender. # RFA appender. Please refer to the log4j2.properties file to see more details on this appender.
# In case one needs to do log rolling on a date change, one should set the environment property # In case one needs to do log rolling on a date change, one should set the environment property
# HBASE_ROOT_LOGGER to "<DESIRED_LOG LEVEL>,DRFA". # HBASE_ROOT_LOGGER to "<DESIRED_LOG LEVEL>,DRFA".
# For example: # For example:
# HBASE_ROOT_LOGGER=INFO,DRFA # export HBASE_ROOT_LOGGER=INFO,DRFA
# The reason for changing default to RFA is to avoid the boundary case of filling out disk space as # The reason for changing default to RFA is to avoid the boundary case of filling out disk space as
# DRFA doesn't put any cap on the log size. Please refer to HBase-5655 for more context. # DRFA doesn't put any cap on the log size. Please refer to HBase-5655 for more context.

View File

@ -1,27 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=WARN,console
log4j.threshold=WARN
# console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# ZooKeeper will still put stuff at WARN
log4j.logger.org.apache.zookeeper=ERROR

View File

@ -1,139 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.security.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
hbase.log.level=INFO
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
# Rolling File Appender properties
hbase.log.maxfilesize=256MB
hbase.log.maxbackupindex=20
# Rolling File Appender
log4j.appender.RFA=org.apache.log4j.RollingFileAppender
log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
#
# Security audit appender
#
hbase.security.log.file=SecurityAuth.audit
hbase.security.log.maxfilesize=256MB
hbase.security.log.maxbackupindex=20
log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %.1000m%n
log4j.category.SecurityLogger=${hbase.security.logger}
log4j.additivity.SecurityLogger=false
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
#
# Null Appender
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender
log4j.appender.asyncconsole.target=System.err
# Custom Logging levels
log4j.logger.org.apache.zookeeper=${hbase.log.level}
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.hbase=${hbase.log.level}
log4j.logger.org.apache.hadoop.hbase.META=${hbase.log.level}
# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=${hbase.log.level}
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=${hbase.log.level}
#log4j.logger.org.apache.hadoop.dfs=DEBUG
# Set this class to log INFO only otherwise its OTT
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE
# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
# Uncomment the below if you want to remove logging of client region caching'
# and scan of hbase:meta messages
# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=INFO
# EventCounter
# Add "EventCounter" to rootlogger if you want to use this
# Uncomment the line below to add EventCounter information
# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
# Prevent metrics subsystem start/stop messages (HBASE-17722)
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
# Disable request log by default, you can enable this by changing the appender
log4j.category.http.requests=INFO,NullAppender
log4j.additivity.http.requests=false
# Replace the above with this configuration if you want an http access.log
#log4j.appender.accessRFA=org.apache.log4j.RollingFileAppender
#log4j.appender.accessRFA.File=/var/log/hbase/access.log
#log4j.appender.accessRFA.layout=org.apache.log4j.PatternLayout
#log4j.appender.accessRFA.layout.ConversionPattern=%m%n
#log4j.appender.accessRFA.MaxFileSize=200MB
#log4j.appender.accessRFA.MaxBackupIndex=10
# route http.requests to the accessRFA appender
#log4j.logger.http.requests=INFO,accessRFA
# disable http.requests.* entries going up to the root logger
#log4j.additivity.http.requests=false

View File

@ -0,0 +1,35 @@
#/**
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
status = warn
dest = err
name = PropertiesConfig
# console
appender.console.type = Console
appender.console.target = SYSTEM_ERR
appender.console.name = console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %m%n
rootLogger = WARN,console
# ZooKeeper will still put stuff at WARN
logger.zookeeper.name = org.apache.zookeeper
logger.zookeeper.level = ERROR

137
conf/log4j2.properties Normal file
View File

@ -0,0 +1,137 @@
#/**
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
status = warn
dest = err
name = PropertiesConfig
# Console appender
appender.console.type = Console
appender.console.target = SYSTEM_ERR
appender.console.name = console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
# Daily Rolling File Appender
appender.DRFA.type = RollingFile
appender.DRFA.name = DRFA
appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}
appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd}
appender.DRFA.createOnDemand = true
appender.DRFA.layout.type = PatternLayout
appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
appender.DRFA.policies.type = Policies
appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
appender.DRFA.policies.time.interval = 1
appender.DRFA.policies.time.modulate = true
appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB}
appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20}
# Rolling File Appender
appender.RFA.type = RollingFile
appender.RFA.name = RFA
appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}
appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i
appender.RFA.createOnDemand = true
appender.RFA.layout.type = PatternLayout
appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
appender.RFA.policies.type = Policies
appender.RFA.policies.size.type = SizeBasedTriggeringPolicy
appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB}
appender.RFA.strategy.type = DefaultRolloverStrategy
appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20}
# Security Audit Appender
appender.RFAS.type = RollingFile
appender.RFAS.name = RFAS
appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}
appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i
appender.RFAS.createOnDemand = true
appender.RFAS.layout.type = PatternLayout
appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
appender.RFAS.policies.type = Policies
appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy
appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB}
appender.RFAS.strategy.type = DefaultRolloverStrategy
appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20}
# Http Access Log RFA, uncomment this if you want an http access.log
# appender.AccessRFA.type = RollingFile
# appender.AccessRFA.name = AccessRFA
# appender.AccessRFA.fileName = /var/log/hbase/access.log
# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i
# appender.AccessRFA.createOnDemand = true
# appender.AccessRFA.layout.type = PatternLayout
# appender.AccessRFA.layout.pattern = %m%n
# appender.AccessRFA.policies.type = Policies
# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy
# appender.AccessRFA.policies.size.size = 200MB
# appender.AccessRFA.strategy.type = DefaultRolloverStrategy
# appender.AccessRFA.strategy.max = 10
# Null Appender
appender.NullAppender.type = Null
appender.NullAppender.name = NullAppender
rootLogger = ${sys:hbase.root.logger:-INFO,console}
logger.SecurityLogger.name = SecurityLogger
logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console}
logger.SecurityLogger.additivity = false
# Custom Logging levels
# logger.zookeeper.name = org.apache.zookeeper
# logger.zookeeper.level = ERROR
# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem
# logger.FSNamesystem.level = DEBUG
# logger.hbase.name = org.apache.hadoop.hbase
# logger.hbase.level = DEBUG
# logger.META.name = org.apache.hadoop.hbase.META
# logger.META.level = DEBUG
# Make these two classes below DEBUG to see more zk debug.
# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil
# logger.ZKUtil.level = DEBUG
# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher
# logger.ZKWatcher.level = DEBUG
# logger.dfs.name = org.apache.hadoop.dfs
# logger.dfs.level = DEBUG
# Prevent metrics subsystem start/stop messages (HBASE-17722)
logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig
logger.MetricsConfig.level = WARN
logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter
logger.MetricsSinkAdapte.level = WARN
logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl
logger.MetricsSystemImpl.level = WARN
# Disable request log by default, you can enable this by changing the appender
logger.http.name = http.requests
logger.http.additivity = false
logger.http = INFO,NullAppender
# Replace the above with this configuration if you want an http access.log
# logger.http = INFO,AccessRFA

View File

@ -64,13 +64,23 @@
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -1,121 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.security.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# Rolling File Appender properties
hbase.log.maxfilesize=256MB
hbase.log.maxbackupindex=20
# Rolling File Appender
log4j.appender.RFA=org.apache.log4j.RollingFileAppender
log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
#
# Security audit appender
#
hbase.security.log.file=SecurityAuth.audit
hbase.security.log.maxfilesize=256MB
hbase.security.log.maxbackupindex=20
log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.category.SecurityLogger=${hbase.security.logger}
log4j.additivity.SecurityLogger=false
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
#
# Null Appender
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# Custom Logging levels
log4j.logger.org.apache.zookeeper=INFO
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.hbase=INFO
# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO
#log4j.logger.org.apache.hadoop.dfs=DEBUG
# Set this class to log INFO only otherwise its OTT
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
# Uncomment the below if you want to remove logging of client region caching'
# and scan of hbase:meta messages
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
# EventCounter
# Add "EventCounter" to rootlogger if you want to use this
# Uncomment the line below to add EventCounter information
# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
# Prevent metrics subsystem start/stop messages (HBASE-17722)
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN

View File

@ -0,0 +1,137 @@
#/**
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
status = warn
dest = err
name = PropertiesConfig
# Console appender
appender.console.type = Console
appender.console.target = SYSTEM_ERR
appender.console.name = console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
# Daily Rolling File Appender
appender.DRFA.type = RollingFile
appender.DRFA.name = DRFA
appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}
appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd}
appender.DRFA.createOnDemand = true
appender.DRFA.layout.type = PatternLayout
appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
appender.DRFA.policies.type = Policies
appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
appender.DRFA.policies.time.interval = 1
appender.DRFA.policies.time.modulate = true
appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB}
appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20}
# Rolling File Appender
appender.RFA.type = RollingFile
appender.RFA.name = RFA
appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}
appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i
appender.RFA.createOnDemand = true
appender.RFA.layout.type = PatternLayout
appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
appender.RFA.policies.type = Policies
appender.RFA.policies.size.type = SizeBasedTriggeringPolicy
appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB}
appender.RFA.strategy.type = DefaultRolloverStrategy
appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20}
# Security Audit Appender
appender.RFAS.type = RollingFile
appender.RFAS.name = RFAS
appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}
appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i
appender.RFAS.createOnDemand = true
appender.RFAS.layout.type = PatternLayout
appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
appender.RFAS.policies.type = Policies
appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy
appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB}
appender.RFAS.strategy.type = DefaultRolloverStrategy
appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20}
# Http Access Log RFA, uncomment this if you want an http access.log
# appender.AccessRFA.type = RollingFile
# appender.AccessRFA.name = AccessRFA
# appender.AccessRFA.fileName = /var/log/hbase/access.log
# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i
# appender.AccessRFA.createOnDemand = true
# appender.AccessRFA.layout.type = PatternLayout
# appender.AccessRFA.layout.pattern = %m%n
# appender.AccessRFA.policies.type = Policies
# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy
# appender.AccessRFA.policies.size.size = 200MB
# appender.AccessRFA.strategy.type = DefaultRolloverStrategy
# appender.AccessRFA.strategy.max = 10
# Null Appender
appender.NullAppender.type = Null
appender.NullAppender.name = NullAppender
rootLogger = ${sys:hbase.root.logger:-INFO,console}
logger.SecurityLogger.name = SecurityLogger
logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console}
logger.SecurityLogger.additivity = false
# Custom Logging levels
# logger.zookeeper.name = org.apache.zookeeper
# logger.zookeeper.level = ERROR
# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem
# logger.FSNamesystem.level = DEBUG
# logger.hbase.name = org.apache.hadoop.hbase
# logger.hbase.level = DEBUG
# logger.META.name = org.apache.hadoop.hbase.META
# logger.META.level = DEBUG
# Make these two classes below DEBUG to see more zk debug.
# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil
# logger.ZKUtil.level = DEBUG
# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher
# logger.ZKWatcher.level = DEBUG
# logger.dfs.name = org.apache.hadoop.dfs
# logger.dfs.level = DEBUG
# Prevent metrics subsystem start/stop messages (HBASE-17722)
logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig
logger.MetricsConfig.level = WARN
logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter
logger.MetricsSinkAdapte.level = WARN
logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl
logger.MetricsSystemImpl.level = WARN
# Disable request log by default, you can enable this by changing the appender
logger.http.name = http.requests
logger.http.additivity = false
logger.http = INFO,NullAppender
# Replace the above with this configuration if you want an http access.log
# logger.http = INFO,AccessRFA

View File

@ -70,13 +70,23 @@
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -1,121 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.security.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# Rolling File Appender properties
hbase.log.maxfilesize=256MB
hbase.log.maxbackupindex=20
# Rolling File Appender
log4j.appender.RFA=org.apache.log4j.RollingFileAppender
log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
#
# Security audit appender
#
hbase.security.log.file=SecurityAuth.audit
hbase.security.log.maxfilesize=256MB
hbase.security.log.maxbackupindex=20
log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.category.SecurityLogger=${hbase.security.logger}
log4j.additivity.SecurityLogger=false
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
#
# Null Appender
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
# Custom Logging levels
log4j.logger.org.apache.zookeeper=INFO
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.hbase=INFO
# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO
#log4j.logger.org.apache.hadoop.dfs=DEBUG
# Set this class to log INFO only otherwise its OTT
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE
# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
# Uncomment the below if you want to remove logging of client region caching'
# and scan of hbase:meta messages
# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO
# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO
# EventCounter
# Add "EventCounter" to rootlogger if you want to use this
# Uncomment the line below to add EventCounter information
# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
# Prevent metrics subsystem start/stop messages (HBASE-17722)
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN

View File

@ -0,0 +1,137 @@
#/**
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
status = warn
dest = err
name = PropertiesConfig
# Console appender
appender.console.type = Console
appender.console.target = SYSTEM_ERR
appender.console.name = console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
# Daily Rolling File Appender
appender.DRFA.type = RollingFile
appender.DRFA.name = DRFA
appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}
appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd}
appender.DRFA.createOnDemand = true
appender.DRFA.layout.type = PatternLayout
appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
appender.DRFA.policies.type = Policies
appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
appender.DRFA.policies.time.interval = 1
appender.DRFA.policies.time.modulate = true
appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB}
appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20}
# Rolling File Appender
appender.RFA.type = RollingFile
appender.RFA.name = RFA
appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}
appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i
appender.RFA.createOnDemand = true
appender.RFA.layout.type = PatternLayout
appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
appender.RFA.policies.type = Policies
appender.RFA.policies.size.type = SizeBasedTriggeringPolicy
appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB}
appender.RFA.strategy.type = DefaultRolloverStrategy
appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20}
# Security Audit Appender
appender.RFAS.type = RollingFile
appender.RFAS.name = RFAS
appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}
appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i
appender.RFAS.createOnDemand = true
appender.RFAS.layout.type = PatternLayout
appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
appender.RFAS.policies.type = Policies
appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy
appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB}
appender.RFAS.strategy.type = DefaultRolloverStrategy
appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20}
# Http Access Log RFA, uncomment this if you want an http access.log
# appender.AccessRFA.type = RollingFile
# appender.AccessRFA.name = AccessRFA
# appender.AccessRFA.fileName = /var/log/hbase/access.log
# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i
# appender.AccessRFA.createOnDemand = true
# appender.AccessRFA.layout.type = PatternLayout
# appender.AccessRFA.layout.pattern = %m%n
# appender.AccessRFA.policies.type = Policies
# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy
# appender.AccessRFA.policies.size.size = 200MB
# appender.AccessRFA.strategy.type = DefaultRolloverStrategy
# appender.AccessRFA.strategy.max = 10
# Null Appender
appender.NullAppender.type = Null
appender.NullAppender.name = NullAppender
rootLogger = ${sys:hbase.root.logger:-INFO,console}
logger.SecurityLogger.name = SecurityLogger
logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console}
logger.SecurityLogger.additivity = false
# Custom Logging levels
# logger.zookeeper.name = org.apache.zookeeper
# logger.zookeeper.level = ERROR
# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem
# logger.FSNamesystem.level = DEBUG
# logger.hbase.name = org.apache.hadoop.hbase
# logger.hbase.level = DEBUG
# logger.META.name = org.apache.hadoop.hbase.META
# logger.META.level = DEBUG
# Make these two classes below DEBUG to see more zk debug.
# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil
# logger.ZKUtil.level = DEBUG
# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher
# logger.ZKWatcher.level = DEBUG
# logger.dfs.name = org.apache.hadoop.dfs
# logger.dfs.level = DEBUG
# Prevent metrics subsystem start/stop messages (HBASE-17722)
logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig
logger.MetricsConfig.level = WARN
logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter
logger.MetricsSinkAdapte.level = WARN
logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl
logger.MetricsSystemImpl.level = WARN
# Disable request log by default, you can enable this by changing the appender
logger.http.name = http.requests
logger.http.additivity = false
logger.http = INFO,NullAppender
# Replace the above with this configuration if you want an http access.log
# logger.http = INFO,AccessRFA

View File

@ -352,12 +352,16 @@
<artifactId>jul-to-slf4j</artifactId> <artifactId>jul-to-slf4j</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
</dependency> </dependency>
<!-- Include OpenTelemetry agent --> <!-- Include OpenTelemetry agent -->
<dependency> <dependency>
@ -365,6 +369,10 @@
<artifactId>opentelemetry-javaagent</artifactId> <artifactId>opentelemetry-javaagent</artifactId>
<classifier>all</classifier> <classifier>all</classifier>
</dependency> </dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
</dependency>
</dependencies> </dependencies>
<profiles> <profiles>
<profile> <profile>

View File

@ -54,19 +54,14 @@
<exclude>jline:jline</exclude> <exclude>jline:jline</exclude>
<exclude>com.github.stephenc.findbugs:findbugs-annotations</exclude> <exclude>com.github.stephenc.findbugs:findbugs-annotations</exclude>
<exclude>commons-logging:commons-logging</exclude> <exclude>commons-logging:commons-logging</exclude>
<exclude>log4j:log4j</exclude>
<exclude>ch.qos.reload4j:reload4j</exclude>
<exclude>org.apache.hbase:hbase-shaded-client</exclude> <exclude>org.apache.hbase:hbase-shaded-client</exclude>
<exclude>org.apache.hbase:hbase-shaded-client-byo-hadoop</exclude> <exclude>org.apache.hbase:hbase-shaded-client-byo-hadoop</exclude>
<exclude>org.apache.hbase:hbase-shaded-mapreduce</exclude> <exclude>org.apache.hbase:hbase-shaded-mapreduce</exclude>
<exclude>org.apache.htrace:htrace-core4</exclude> <exclude>org.apache.htrace:htrace-core4</exclude>
<exclude>org.apache.htrace:htrace-core</exclude> <exclude>org.apache.htrace:htrace-core</exclude>
<exclude>org.apache.yetus:audience-annotations</exclude> <exclude>org.apache.yetus:audience-annotations</exclude>
<exclude>org.slf4j:slf4j-api</exclude> <exclude>org.slf4j:*</exclude>
<exclude>org.slf4j:jcl-over-slf4j</exclude> <exclude>org.apache.logging.log4j:*</exclude>
<exclude>org.slf4j:jul-to-slf4j</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude>
<exclude>org.slf4j:slf4j-reload4j</exclude>
<exclude>io.opentelemetry.javaagent:*</exclude> <exclude>io.opentelemetry.javaagent:*</exclude>
</excludes> </excludes>
</dependencySet> </dependencySet>
@ -148,14 +143,11 @@
<includes> <includes>
<include>com.github.stephenc.findbugs:findbugs-annotations</include> <include>com.github.stephenc.findbugs:findbugs-annotations</include>
<include>commons-logging:commons-logging</include> <include>commons-logging:commons-logging</include>
<include>ch.qos.reload4j:reload4j</include>
<include>org.apache.htrace:htrace-core4</include> <include>org.apache.htrace:htrace-core4</include>
<include>org.apache.htrace:htrace-core</include> <include>org.apache.htrace:htrace-core</include>
<include>org.apache.yetus:audience-annotations</include> <include>org.apache.yetus:audience-annotations</include>
<include>org.slf4j:slf4j-api</include> <include>org.slf4j:*</include>
<include>org.slf4j:jcl-over-slf4j</include> <include>org.apache.logging.log4j:*</include>
<include>org.slf4j:jul-to-slf4j</include>
<include>org.slf4j:slf4j-reload4j</include>
<include>io.opentelemetry:*</include> <include>io.opentelemetry:*</include>
</includes> </includes>
</dependencySet> </dependencySet>

View File

@ -49,11 +49,9 @@
<include>org.apache.hbase:hbase-metrics</include> <include>org.apache.hbase:hbase-metrics</include>
<include>org.apache.hbase:hbase-metrics-api</include> <include>org.apache.hbase:hbase-metrics-api</include>
<include>org.apache.hbase:hbase-procedure</include> <include>org.apache.hbase:hbase-procedure</include>
<include>org.apache.hbase:hbase-protocol</include>
<include>org.apache.hbase:hbase-protocol-shaded</include> <include>org.apache.hbase:hbase-protocol-shaded</include>
<include>org.apache.hbase:hbase-replication</include> <include>org.apache.hbase:hbase-replication</include>
<include>org.apache.hbase:hbase-rest</include> <include>org.apache.hbase:hbase-rest</include>
<include>org.apache.hbase:hbase-rsgroup</include>
<include>org.apache.hbase:hbase-server</include> <include>org.apache.hbase:hbase-server</include>
<include>org.apache.hbase:hbase-shell</include> <include>org.apache.hbase:hbase-shell</include>
<include>org.apache.hbase:hbase-testing-util</include> <include>org.apache.hbase:hbase-testing-util</include>
@ -106,14 +104,11 @@
<exclude>org.apache.hbase:hbase-shaded-mapreduce</exclude> <exclude>org.apache.hbase:hbase-shaded-mapreduce</exclude>
<exclude>com.github.stephenc.findbugs:findbugs-annotations</exclude> <exclude>com.github.stephenc.findbugs:findbugs-annotations</exclude>
<exclude>commons-logging:commons-logging</exclude> <exclude>commons-logging:commons-logging</exclude>
<exclude>log4j:log4j</exclude>
<exclude>ch.qos.reload4j:reload4j</exclude>
<exclude>org.apache.htrace:htrace-core4</exclude> <exclude>org.apache.htrace:htrace-core4</exclude>
<exclude>org.apache.htrace:htrace-core</exclude> <exclude>org.apache.htrace:htrace-core</exclude>
<exclude>org.apache.yetus:audience-annotations</exclude> <exclude>org.apache.yetus:audience-annotations</exclude>
<exclude>org.slf4j:slf4j-api</exclude> <exclude>org.slf4j:*</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude> <exclude>org.apache.logging.log4j:*</exclude>
<exclude>org.slf4j:slf4j-reload4j</exclude>
<exclude>io.opentelemetry.javaagent:*</exclude> <exclude>io.opentelemetry.javaagent:*</exclude>
</excludes> </excludes>
</dependencySet> </dependencySet>
@ -207,14 +202,11 @@
<includes> <includes>
<include>com.github.stephenc.findbugs:findbugs-annotations</include> <include>com.github.stephenc.findbugs:findbugs-annotations</include>
<include>commons-logging:commons-logging</include> <include>commons-logging:commons-logging</include>
<include>ch.qos.reload4j:reload4j</include>
<include>org.apache.htrace:htrace-core4</include> <include>org.apache.htrace:htrace-core4</include>
<include>org.apache.htrace:htrace-core</include> <include>org.apache.htrace:htrace-core</include>
<include>org.apache.yetus:audience-annotations</include> <include>org.apache.yetus:audience-annotations</include>
<include>org.slf4j:slf4j-api</include> <include>org.slf4j:*</include>
<include>org.slf4j:jcl-over-slf4j</include> <include>org.apache.logging.log4j:*</include>
<include>org.slf4j:jul-to-slf4j</include>
<include>org.slf4j:slf4j-reload4j</include>
<include>io.opentelemetry:*</include> <include>io.opentelemetry:*</include>
</includes> </includes>
</dependencySet> </dependencySet>

View File

@ -103,8 +103,6 @@
<exclude>com.sun.jersey:*</exclude> <exclude>com.sun.jersey:*</exclude>
<exclude>commons-logging:commons-logging</exclude> <exclude>commons-logging:commons-logging</exclude>
<exclude>jline:jline</exclude> <exclude>jline:jline</exclude>
<exclude>log4j:log4j</exclude>
<exclude>ch.qos.reload4j:reload4j</exclude>
<exclude>org.apache.hbase:hbase-shaded-client-byo-hadoop</exclude> <exclude>org.apache.hbase:hbase-shaded-client-byo-hadoop</exclude>
<exclude>org.apache.hbase:hbase-shaded-client</exclude> <exclude>org.apache.hbase:hbase-shaded-client</exclude>
<exclude>org.apache.hbase:hbase-shaded-mapreduce</exclude> <exclude>org.apache.hbase:hbase-shaded-mapreduce</exclude>
@ -112,9 +110,8 @@
<exclude>org.apache.htrace:htrace-core</exclude> <exclude>org.apache.htrace:htrace-core</exclude>
<exclude>org.apache.yetus:audience-annotations</exclude> <exclude>org.apache.yetus:audience-annotations</exclude>
<exclude>org.jruby:jruby-complete</exclude> <exclude>org.jruby:jruby-complete</exclude>
<exclude>org.slf4j:slf4j-api</exclude> <exclude>org.slf4j:*</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude> <exclude>org.apache.logging.log4j:*</exclude>
<exclude>org.slf4j:slf4j-reload4j</exclude>
<exclude>io.opentelemetry.javaagent:*</exclude> <exclude>io.opentelemetry.javaagent:*</exclude>
</excludes> </excludes>
</dependencySet> </dependencySet>
@ -208,12 +205,11 @@
<includes> <includes>
<include>com.github.stephenc.findbugs:findbugs-annotations</include> <include>com.github.stephenc.findbugs:findbugs-annotations</include>
<include>commons-logging:commons-logging</include> <include>commons-logging:commons-logging</include>
<include>ch.qos.reload4j:reload4j</include>
<include>org.apache.htrace:htrace-core4</include> <include>org.apache.htrace:htrace-core4</include>
<include>org.apache.htrace:htrace-core</include> <include>org.apache.htrace:htrace-core</include>
<include>org.apache.yetus:audience-annotations</include> <include>org.apache.yetus:audience-annotations</include>
<include>org.slf4j:slf4j-api</include> <include>org.slf4j:*</include>
<include>org.slf4j:slf4j-reload4j</include> <include>org.apache.logging.log4j:*</include>
<include>io.opentelemetry:*</include> <include>io.opentelemetry:*</include>
</includes> </includes>
</dependencySet> </dependencySet>

View File

@ -149,13 +149,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -96,11 +96,6 @@ public abstract class AsyncFSTestBase {
createDirsAndSetProperties(); createDirsAndSetProperties();
Configuration conf = UTIL.getConfiguration(); Configuration conf = UTIL.getConfiguration();
// Error level to skip some warnings specific to the minicluster. See HBASE-4709
org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.util.MBeans.class)
.setLevel(org.apache.log4j.Level.ERROR);
org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class)
.setLevel(org.apache.log4j.Level.ERROR);
CLUSTER = new MiniDFSCluster.Builder(conf).numDataNodes(servers).build(); CLUSTER = new MiniDFSCluster.Builder(conf).numDataNodes(servers).build();
CLUSTER.waitClusterUp(); CLUSTER.waitClusterUp();

View File

@ -177,13 +177,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -17,73 +17,82 @@
*/ */
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.log4j.Appender;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith; import org.mockito.invocation.InvocationOnMock;
import org.mockito.ArgumentCaptor; import org.mockito.stubbing.Answer;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
@Category({ ClientTests.class, SmallTests.class }) @Category({ ClientTests.class, SmallTests.class })
public class TestFailedServersLog { public class TestFailedServersLog {
@ClassRule @ClassRule
public static final HBaseClassTestRule CLASS_RULE = public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestFailedServersLog.class); HBaseClassTestRule.forClass(TestFailedServersLog.class);
static final int TEST_PORT = 9999; static final int TEST_PORT = 9999;
private Address addr; private Address addr;
@Mock private org.apache.logging.log4j.core.Appender mockAppender;
private Appender mockAppender;
@Captor
private ArgumentCaptor captorLoggingEvent;
@Before @Before
public void setup() { public void setup() {
LogManager.getRootLogger().addAppender(mockAppender); mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
when(mockAppender.getName()).thenReturn("mockAppender");
when(mockAppender.isStarted()).thenReturn(true);
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(FailedServers.class)).addAppender(mockAppender);
} }
@After @After
public void teardown() { public void teardown() {
LogManager.getRootLogger().removeAppender(mockAppender); ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(FailedServers.class)).removeAppender(mockAppender);
} }
@Test @Test
public void testAddToFailedServersLogging() { public void testAddToFailedServersLogging() {
Throwable nullException = new NullPointerException(); AtomicReference<org.apache.logging.log4j.Level> level = new AtomicReference<>();
AtomicReference<String> msg = new AtomicReference<String>();
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
org.apache.logging.log4j.core.LogEvent logEvent =
invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
level.set(logEvent.getLevel());
msg.set(logEvent.getMessage().getFormattedMessage());
return null;
}
}).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class));
Throwable nullException = new NullPointerException();
FailedServers fs = new FailedServers(new Configuration()); FailedServers fs = new FailedServers(new Configuration());
addr = Address.fromParts("localhost", TEST_PORT); addr = Address.fromParts("localhost", TEST_PORT);
fs.addToFailedServers(addr, nullException); fs.addToFailedServers(addr, nullException);
Mockito.verify(mockAppender).doAppend((LoggingEvent) captorLoggingEvent.capture()); verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class));
LoggingEvent loggingEvent = (LoggingEvent) captorLoggingEvent.getValue(); assertEquals(org.apache.logging.log4j.Level.DEBUG, level.get());
assertThat(loggingEvent.getLevel(), is(Level.DEBUG)); assertEquals("Added failed server with address " + addr.toString() + " to list caused by " +
assertEquals("Added failed server with address " + addr.toString() + " to list caused by " nullException.toString(), msg.get());
+ nullException.toString(),
loggingEvent.getRenderedMessage());
} }
} }

View File

@ -30,7 +30,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;
import java.util.Map; import java.util.Map;
import javax.security.auth.callback.Callback; import javax.security.auth.callback.Callback;
import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.PasswordCallback;
@ -39,7 +38,6 @@ import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.sasl.RealmCallback; import javax.security.sasl.RealmCallback;
import javax.security.sasl.Sasl; import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient; import javax.security.sasl.SaslClient;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
@ -55,16 +53,15 @@ import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException; import org.junit.rules.ExpectedException;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Strings; import org.apache.hbase.thirdparty.com.google.common.base.Strings;
@ -83,17 +80,12 @@ public class TestHBaseSaslRpcClient {
static final String DEFAULT_USER_NAME = "principal"; static final String DEFAULT_USER_NAME = "principal";
static final String DEFAULT_USER_PASSWORD = "password"; static final String DEFAULT_USER_PASSWORD = "password";
private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class); private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class);
@Rule @Rule
public ExpectedException exception = ExpectedException.none(); public ExpectedException exception = ExpectedException.none();
@BeforeClass
public static void before() {
Logger.getRootLogger().setLevel(Level.DEBUG);
}
@Test @Test
public void testSaslClientUsesGivenRpcProtection() throws Exception { public void testSaslClientUsesGivenRpcProtection() throws Exception {
Token<? extends TokenIdentifier> token = createTokenMockWithCredentials(DEFAULT_USER_NAME, Token<? extends TokenIdentifier> token = createTokenMockWithCredentials(DEFAULT_USER_NAME,

View File

@ -235,13 +235,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -17,27 +17,26 @@
*/ */
package org.apache.hadoop.hbase.logging; package org.apache.hadoop.hbase.logging;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times; import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.log4j.Appender;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.mockito.ArgumentCaptor; import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/** /**
* This should be in the hbase-logging module but the {@link HBaseClassTestRule} is in hbase-common * This should be in the hbase-logging module but the {@link HBaseClassTestRule} is in hbase-common
@ -56,27 +55,42 @@ public class TestJul2Slf4j {
private String loggerName = getClass().getName(); private String loggerName = getClass().getName();
private Appender mockAppender; private org.apache.logging.log4j.core.Appender mockAppender;
@Before @Before
public void setUp() { public void setUp() {
mockAppender = mock(Appender.class); mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
LogManager.getRootLogger().addAppender(mockAppender); when(mockAppender.getName()).thenReturn("mockAppender");
when(mockAppender.isStarted()).thenReturn(true);
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(loggerName)).addAppender(mockAppender);
} }
@After @After
public void tearDown() { public void tearDown() {
LogManager.getRootLogger().removeAppender(mockAppender); ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(loggerName)).removeAppender(mockAppender);
} }
@Test @Test
public void test() throws IOException { public void test() throws IOException {
AtomicReference<org.apache.logging.log4j.Level> level = new AtomicReference<>();
AtomicReference<String> msg = new AtomicReference<String>();
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
org.apache.logging.log4j.core.LogEvent logEvent =
invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
level.set(logEvent.getLevel());
msg.set(logEvent.getMessage().getFormattedMessage());
return null;
}
}).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class));
java.util.logging.Logger logger = java.util.logging.Logger.getLogger(loggerName); java.util.logging.Logger logger = java.util.logging.Logger.getLogger(loggerName);
logger.info(loggerName); logger.info(loggerName);
ArgumentCaptor<LoggingEvent> captor = ArgumentCaptor.forClass(LoggingEvent.class); verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class));
verify(mockAppender, times(1)).doAppend(captor.capture()); assertEquals(org.apache.logging.log4j.Level.INFO, level.get());
LoggingEvent loggingEvent = captor.getValue(); assertEquals(loggerName, msg.get());
assertThat(loggingEvent.getLevel(), is(Level.INFO));
assertEquals(loggerName, loggingEvent.getRenderedMessage());
} }
} }

View File

@ -24,9 +24,6 @@ import java.io.IOException;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -44,23 +41,29 @@ public class TestLog4jUtils {
@Test @Test
public void test() { public void test() {
Logger zk = LogManager.getLogger("org.apache.zookeeper"); org.apache.logging.log4j.Logger zk =
Level zkLevel = zk.getEffectiveLevel(); org.apache.logging.log4j.LogManager.getLogger("org.apache.zookeeper");
Logger hbaseZk = LogManager.getLogger("org.apache.hadoop.hbase.zookeeper"); org.apache.logging.log4j.Level zkLevel = zk.getLevel();
Level hbaseZkLevel = hbaseZk.getEffectiveLevel(); org.apache.logging.log4j.Logger hbaseZk =
Logger client = LogManager.getLogger("org.apache.hadoop.hbase.client"); org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.zookeeper");
Level clientLevel = client.getEffectiveLevel(); org.apache.logging.log4j.Level hbaseZkLevel = hbaseZk.getLevel();
org.apache.logging.log4j.Logger client =
org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.client");
org.apache.logging.log4j.Level clientLevel = client.getLevel();
Log4jUtils.disableZkAndClientLoggers(); Log4jUtils.disableZkAndClientLoggers();
assertEquals(Level.OFF, zk.getLevel()); assertEquals(org.apache.logging.log4j.Level.OFF, zk.getLevel());
assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(zk.getName())); assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
assertEquals(Level.OFF, hbaseZk.getLevel()); Log4jUtils.getEffectiveLevel(zk.getName()));
assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(hbaseZk.getName())); assertEquals(org.apache.logging.log4j.Level.OFF, hbaseZk.getLevel());
assertEquals(Level.OFF, client.getLevel()); assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(client.getName())); Log4jUtils.getEffectiveLevel(hbaseZk.getName()));
assertEquals(org.apache.logging.log4j.Level.OFF, client.getLevel());
assertEquals(org.apache.logging.log4j.Level.OFF.toString(),
Log4jUtils.getEffectiveLevel(client.getName()));
// restore the level // restore the level
zk.setLevel(zkLevel); org.apache.logging.log4j.core.config.Configurator.setLevel(zk.getName(), zkLevel);
hbaseZk.setLevel(hbaseZkLevel); org.apache.logging.log4j.core.config.Configurator.setLevel(hbaseZk.getName(), hbaseZkLevel);
client.setLevel(clientLevel); org.apache.logging.log4j.core.config.Configurator.setLevel(client.getName(), clientLevel);
} }
@Test @Test

View File

@ -135,13 +135,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -124,13 +124,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -124,13 +124,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -108,13 +108,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -124,13 +124,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -228,13 +228,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -225,13 +225,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -1,5 +1,7 @@
<?xml version="1.0"?> <?xml version="1.0"?>
<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> <project xmlns="https://maven.apache.org/POM/4.0.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- <!--
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
@ -19,23 +21,23 @@
* limitations under the License. * limitations under the License.
*/ */
--> -->
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<artifactId>hbase-build-configuration</artifactId> <artifactId>hbase-build-configuration</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<version>2.5.0-SNAPSHOT</version> <version>2.5.0-SNAPSHOT</version>
<relativePath>../hbase-build-configuration</relativePath> <relativePath>../hbase-build-configuration</relativePath>
</parent> </parent>
<artifactId>hbase-hadoop-compat</artifactId> <artifactId>hbase-hadoop-compat</artifactId>
<name>Apache HBase - Hadoop Compatibility</name> <name>Apache HBase - Hadoop Compatibility</name>
<description> <description>
Interfaces to be implemented in order to smooth Interfaces to be implemented in order to smooth
over hadoop version differences over hadoop version differences
</description> </description>
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<!--Make it so assembly:single does nothing in here--> <!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>
@ -43,117 +45,126 @@
<skipAssembly>true</skipAssembly> <skipAssembly>true</skipAssembly>
</configuration> </configuration>
</plugin> </plugin>
<!-- Make a jar and put the sources in the jar --> <!-- Make a jar and put the sources in the jar -->
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId> <artifactId>maven-source-plugin</artifactId>
</plugin> </plugin>
<plugin> <plugin>
<groupId>net.revelc.code</groupId> <groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId> <artifactId>warbucks-maven-plugin</artifactId>
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId> <artifactId>hbase-annotations</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-logging</artifactId> <artifactId>hbase-logging</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId> <artifactId>hbase-common</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase.thirdparty</groupId> <groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-miscellaneous</artifactId> <artifactId>hbase-shaded-miscellaneous</artifactId>
</dependency> </dependency>
<!-- General dependencies --> <!-- General dependencies -->
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics-api</artifactId> <artifactId>hbase-metrics-api</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId> <artifactId>jcl-over-slf4j</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId> <artifactId>jul-to-slf4j</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> <dependency>
<groupId>org.apache.logging.log4j</groupId>
<profiles> <artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<!-- Skip the tests in this module --> <!-- Skip the tests in this module -->
<profile> <profile>
<id>skipHadoopCompatTests</id> <id>skipHadoopCompatTests</id>
<activation> <activation>
<property> <property>
<name>skipHadoopCompatTests</name> <name>skipHadoopCompatTests</name>
</property> </property>
</activation> </activation>
<properties> <properties>
<surefire.skipFirstPart>true</surefire.skipFirstPart> <surefire.skipFirstPart>true</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart> <surefire.skipSecondPart>true</surefire.skipSecondPart>
</properties> </properties>
</profile> </profile>
<profile> <profile>
<id>eclipse-specific</id> <id>eclipse-specific</id>
<activation> <activation>
<property> <property>
<name>m2e.version</name> <name>m2e.version</name>
</property> </property>
</activation> </activation>
<build> <build>
<pluginManagement> <pluginManagement>
<plugins> <plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings <!--This plugin's configuration is used to store Eclipse m2e settings
only. It has no influence on the Maven build itself.--> only. It has no influence on the Maven build itself.-->
<plugin> <plugin>
<groupId>org.eclipse.m2e</groupId> <groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId> <artifactId>lifecycle-mapping</artifactId>
<configuration> <configuration>
<lifecycleMappingMetadata> <lifecycleMappingMetadata>
<pluginExecutions> <pluginExecutions>
</pluginExecutions> </pluginExecutions>
</lifecycleMappingMetadata> </lifecycleMappingMetadata>
</configuration> </configuration>
</plugin> </plugin>
</plugins> </plugins>
</pluginManagement> </pluginManagement>
</build> </build>
</profile> </profile>
</profiles> </profiles>
</project> </project>

View File

@ -180,13 +180,23 @@ limitations under the License.
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -92,13 +92,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -246,13 +246,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -45,7 +45,6 @@ import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.ServletUtil; import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -307,8 +306,7 @@ public final class LogLevel {
/** /**
* A servlet implementation * A servlet implementation
*/ */
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.Private
@InterfaceStability.Unstable
public static class Servlet extends HttpServlet { public static class Servlet extends HttpServlet {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;

View File

@ -22,6 +22,7 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.BindException; import java.net.BindException;
@ -53,9 +54,6 @@ import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
@ -65,11 +63,11 @@ import org.junit.experimental.categories.Category;
/** /**
* Test LogLevel. * Test LogLevel.
*/ */
@Category({MiscTests.class, SmallTests.class}) @Category({ MiscTests.class, SmallTests.class })
public class TestLogLevel { public class TestLogLevel {
@ClassRule @ClassRule
public static final HBaseClassTestRule CLASS_RULE = public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestLogLevel.class); HBaseClassTestRule.forClass(TestLogLevel.class);
private static String keystoresDir; private static String keystoresDir;
private static String sslConfDir; private static String sslConfDir;
@ -79,9 +77,10 @@ public class TestLogLevel {
private static final String logName = TestLogLevel.class.getName(); private static final String logName = TestLogLevel.class.getName();
private static final String protectedPrefix = "protected"; private static final String protectedPrefix = "protected";
private static final String protectedLogName = protectedPrefix + "." + logName; private static final String protectedLogName = protectedPrefix + "." + logName;
private static final Logger log = LogManager.getLogger(logName); private static final org.apache.logging.log4j.Logger log =
org.apache.logging.log4j.LogManager.getLogger(logName);
private final static String PRINCIPAL = "loglevel.principal"; private final static String PRINCIPAL = "loglevel.principal";
private final static String KEYTAB = "loglevel.keytab"; private final static String KEYTAB = "loglevel.keytab";
private static MiniKdc kdc; private static MiniKdc kdc;
@ -111,8 +110,7 @@ public class TestLogLevel {
} }
/** /**
* Sets up {@link MiniKdc} for testing security. * Sets up {@link MiniKdc} for testing security. Copied from HBaseTestingUtility#setupMiniKdc().
* Copied from HBaseTestingUtility#setupMiniKdc().
*/ */
static private MiniKdc setupMiniKdc() throws Exception { static private MiniKdc setupMiniKdc() throws Exception {
Properties conf = MiniKdc.createConf(); Properties conf = MiniKdc.createConf();
@ -130,7 +128,7 @@ public class TestLogLevel {
kdc = new MiniKdc(conf, dir); kdc = new MiniKdc(conf, dir);
kdc.start(); kdc.start();
} catch (BindException e) { } catch (BindException e) {
FileUtils.deleteDirectory(dir); // clean directory FileUtils.deleteDirectory(dir); // clean directory
numTries++; numTries++;
if (numTries == 3) { if (numTries == 3) {
log.error("Failed setting up MiniKDC. Tried " + numTries + " times."); log.error("Failed setting up MiniKDC. Tried " + numTries + " times.");
@ -156,15 +154,15 @@ public class TestLogLevel {
} }
/** /**
* Get the SSL configuration. * Get the SSL configuration. This method is copied from KeyStoreTestUtil#getSslConfig() in
* This method is copied from KeyStoreTestUtil#getSslConfig() in Hadoop. * Hadoop.
* @return {@link Configuration} instance with ssl configs loaded. * @return {@link Configuration} instance with ssl configs loaded.
* @param conf to pull client/server SSL settings filename from * @param conf to pull client/server SSL settings filename from
*/ */
private static Configuration getSslConfig(Configuration conf){ private static Configuration getSslConfig(Configuration conf) {
Configuration sslConf = new Configuration(false); Configuration sslConf = new Configuration(false);
String sslServerConfFile = conf.get(SSLFactory.SSL_SERVER_CONF_KEY); String sslServerConfFile = conf.get(SSLFactory.SSL_SERVER_CONF_KEY);
String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY); String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY);
sslConf.addResource(sslServerConfFile); sslConf.addResource(sslServerConfFile);
sslConf.addResource(sslClientConfFile); sslConf.addResource(sslClientConfFile);
sslConf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile); sslConf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile);
@ -189,36 +187,29 @@ public class TestLogLevel {
public void testCommandOptions() throws Exception { public void testCommandOptions() throws Exception {
final String className = this.getClass().getName(); final String className = this.getClass().getName();
assertFalse(validateCommand(new String[] {"-foo" })); assertFalse(validateCommand(new String[] { "-foo" }));
// fail due to insufficient number of arguments // fail due to insufficient number of arguments
assertFalse(validateCommand(new String[] {})); assertFalse(validateCommand(new String[] {}));
assertFalse(validateCommand(new String[] {"-getlevel" })); assertFalse(validateCommand(new String[] { "-getlevel" }));
assertFalse(validateCommand(new String[] {"-setlevel" })); assertFalse(validateCommand(new String[] { "-setlevel" }));
assertFalse(validateCommand(new String[] {"-getlevel", "foo.bar:8080" })); assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080" }));
// valid command arguments // valid command arguments
assertTrue(validateCommand( assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className }));
new String[] {"-getlevel", "foo.bar:8080", className })); assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" }));
assertTrue(validateCommand( assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className }));
new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" })); assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" }));
assertTrue(validateCommand(
new String[] {"-getlevel", "foo.bar:8080", className }));
assertTrue(validateCommand(
new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" }));
// fail due to the extra argument // fail due to the extra argument
assertFalse(validateCommand( assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "blah" }));
new String[] {"-getlevel", "foo.bar:8080", className, "blah" })); assertFalse(
assertFalse(validateCommand( validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG", "blah" }));
new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG", "blah" })); assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "-setlevel",
assertFalse(validateCommand( "foo.bar:8080", className }));
new String[] {"-getlevel", "foo.bar:8080", className, "-setlevel", "foo.bar:8080",
className }));
} }
/** /**
* Check to see if a command can be accepted. * Check to see if a command can be accepted.
*
* @param args a String array of arguments * @param args a String array of arguments
* @return true if the command can be accepted, false if not. * @return true if the command can be accepted, false if not.
*/ */
@ -237,40 +228,32 @@ public class TestLogLevel {
} }
/** /**
* Creates and starts a Jetty server binding at an ephemeral port to run * Creates and starts a Jetty server binding at an ephemeral port to run LogLevel servlet.
* LogLevel servlet.
* @param protocol "http" or "https" * @param protocol "http" or "https"
* @param isSpnego true if SPNEGO is enabled * @param isSpnego true if SPNEGO is enabled
* @return a created HttpServer object * @return a created HttpServer object
* @throws Exception if unable to create or start a Jetty server * @throws Exception if unable to create or start a Jetty server
*/ */
private HttpServer createServer(String protocol, boolean isSpnego) private HttpServer createServer(String protocol, boolean isSpnego) throws Exception {
throws Exception { HttpServer.Builder builder = new HttpServer.Builder().setName("..")
HttpServer.Builder builder = new HttpServer.Builder() .addEndpoint(new URI(protocol + "://localhost:0")).setFindPort(true).setConf(serverConf);
.setName("..")
.addEndpoint(new URI(protocol + "://localhost:0"))
.setFindPort(true)
.setConf(serverConf);
if (isSpnego) { if (isSpnego) {
// Set up server Kerberos credentials. // Set up server Kerberos credentials.
// Since the server may fall back to simple authentication, // Since the server may fall back to simple authentication,
// use ACL to make sure the connection is Kerberos/SPNEGO authenticated. // use ACL to make sure the connection is Kerberos/SPNEGO authenticated.
builder.setSecurityEnabled(true) builder.setSecurityEnabled(true).setUsernameConfKey(PRINCIPAL).setKeytabConfKey(KEYTAB)
.setUsernameConfKey(PRINCIPAL) .setACL(new AccessControlList("client"));
.setKeytabConfKey(KEYTAB)
.setACL(new AccessControlList("client"));
} }
// if using HTTPS, configure keystore/truststore properties. // if using HTTPS, configure keystore/truststore properties.
if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) { if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) {
builder = builder. builder = builder.keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
keyPassword(sslConf.get("ssl.server.keystore.keypassword")) .keyStore(sslConf.get("ssl.server.keystore.location"),
.keyStore(sslConf.get("ssl.server.keystore.location"), sslConf.get("ssl.server.keystore.password"),
sslConf.get("ssl.server.keystore.password"), sslConf.get("ssl.server.keystore.type", "jks"))
sslConf.get("ssl.server.keystore.type", "jks")) .trustStore(sslConf.get("ssl.server.truststore.location"),
.trustStore(sslConf.get("ssl.server.truststore.location"), sslConf.get("ssl.server.truststore.password"),
sslConf.get("ssl.server.truststore.password"), sslConf.get("ssl.server.truststore.type", "jks"));
sslConf.get("ssl.server.truststore.type", "jks"));
} }
HttpServer server = builder.build(); HttpServer server = builder.build();
@ -279,38 +262,38 @@ public class TestLogLevel {
} }
private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol,
final boolean isSpnego) final boolean isSpnego) throws Exception {
throws Exception { testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego,
testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, logName, Level.DEBUG.toString()); logName,
org.apache.logging.log4j.Level.DEBUG.toString());
} }
private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol,
final boolean isSpnego, final String newLevel) final boolean isSpnego, final String newLevel) throws Exception {
throws Exception { testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego,
testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, logName, newLevel); logName,
newLevel);
} }
/** /**
* Run both client and server using the given protocol. * Run both client and server using the given protocol.
*
* @param bindProtocol specify either http or https for server * @param bindProtocol specify either http or https for server
* @param connectProtocol specify either http or https for client * @param connectProtocol specify either http or https for client
* @param isSpnego true if SPNEGO is enabled * @param isSpnego true if SPNEGO is enabled
* @throws Exception if client can't accesss server. * @throws Exception if client can't accesss server.
*/ */
private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol,
final boolean isSpnego, final String loggerName, final String newLevel) final boolean isSpnego, final String loggerName, final String newLevel) throws Exception {
throws Exception {
if (!LogLevel.isValidProtocol(bindProtocol)) { if (!LogLevel.isValidProtocol(bindProtocol)) {
throw new Exception("Invalid server protocol " + bindProtocol); throw new Exception("Invalid server protocol " + bindProtocol);
} }
if (!LogLevel.isValidProtocol(connectProtocol)) { if (!LogLevel.isValidProtocol(connectProtocol)) {
throw new Exception("Invalid client protocol " + connectProtocol); throw new Exception("Invalid client protocol " + connectProtocol);
} }
Logger log = LogManager.getLogger(loggerName); org.apache.logging.log4j.Logger log = org.apache.logging.log4j.LogManager.getLogger(loggerName);
Level oldLevel = log.getLevel(); org.apache.logging.log4j.Level oldLevel = log.getLevel();
assertNotEquals("Get default Log Level which shouldn't be ERROR.", assertNotEquals("Get default Log Level which shouldn't be ERROR.",
Level.ERROR, oldLevel); org.apache.logging.log4j.Level.ERROR, oldLevel);
// configs needed for SPNEGO at server side // configs needed for SPNEGO at server side
if (isSpnego) { if (isSpnego) {
@ -331,8 +314,8 @@ public class TestLogLevel {
String keytabFilePath = keyTabFile.getAbsolutePath(); String keytabFilePath = keyTabFile.getAbsolutePath();
UserGroupInformation clientUGI = UserGroupInformation. UserGroupInformation clientUGI =
loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath); UserGroupInformation.loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath);
try { try {
clientUGI.doAs((PrivilegedExceptionAction<Void>) () -> { clientUGI.doAs((PrivilegedExceptionAction<Void>) () -> {
// client command line // client command line
@ -346,41 +329,37 @@ public class TestLogLevel {
} }
// restore log level // restore log level
GenericTestUtils.setLogLevel(log, oldLevel); org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), oldLevel);
} }
/** /**
* Run LogLevel command line to start a client to get log level of this test * Run LogLevel command line to start a client to get log level of this test class.
* class.
*
* @param protocol specify either http or https * @param protocol specify either http or https
* @param authority daemon's web UI address * @param authority daemon's web UI address
* @throws Exception if unable to connect * @throws Exception if unable to connect
*/ */
private void getLevel(String protocol, String authority, String logName) throws Exception { private void getLevel(String protocol, String authority, String logName) throws Exception {
String[] getLevelArgs = {"-getlevel", authority, logName, "-protocol", protocol}; String[] getLevelArgs = { "-getlevel", authority, logName, "-protocol", protocol };
CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf); CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf);
cli.run(getLevelArgs); cli.run(getLevelArgs);
} }
/** /**
* Run LogLevel command line to start a client to set log level of this test * Run LogLevel command line to start a client to set log level of this test class to debug.
* class to debug.
*
* @param protocol specify either http or https * @param protocol specify either http or https
* @param authority daemon's web UI address * @param authority daemon's web UI address
* @throws Exception if unable to run or log level does not change as expected * @throws Exception if unable to run or log level does not change as expected
*/ */
private void setLevel(String protocol, String authority, String logName, String newLevel) private void setLevel(String protocol, String authority, String logName, String newLevel)
throws Exception { throws Exception {
String[] setLevelArgs = {"-setlevel", authority, logName, newLevel, "-protocol", protocol}; String[] setLevelArgs = { "-setlevel", authority, logName, newLevel, "-protocol", protocol };
CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf); CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf);
cli.run(setLevelArgs); cli.run(setLevelArgs);
Logger log = LogManager.getLogger(logName); org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getLogger(logName);
assertEquals("new level not equal to expected: ", newLevel.toUpperCase(), assertEquals("new level not equal to expected: ", newLevel.toUpperCase(),
log.getEffectiveLevel().toString()); logger.getLevel().toString());
} }
@Test @Test
@ -397,7 +376,6 @@ public class TestLogLevel {
/** /**
* Test setting log level to "Info". * Test setting log level to "Info".
*
* @throws Exception if client can't set log level to INFO. * @throws Exception if client can't set log level to INFO.
*/ */
@Test @Test
@ -407,7 +385,6 @@ public class TestLogLevel {
/** /**
* Test setting log level to "Error". * Test setting log level to "Error".
*
* @throws Exception if client can't set log level to ERROR. * @throws Exception if client can't set log level to ERROR.
*/ */
@Test @Test
@ -417,18 +394,15 @@ public class TestLogLevel {
/** /**
* Server runs HTTP, no SPNEGO. * Server runs HTTP, no SPNEGO.
* * @throws Exception if http client can't access http server, or http client can access https
* @throws Exception if http client can't access http server, * server.
* or http client can access https server.
*/ */
@Test @Test
public void testLogLevelByHttp() throws Exception { public void testLogLevelByHttp() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, false); testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, false);
try { try {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, false);
false); fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server");
fail("An HTTPS Client should not have succeeded in connecting to a " +
"HTTP server");
} catch (SSLException e) { } catch (SSLException e) {
exceptionShouldContains("Unrecognized SSL message", e); exceptionShouldContains("Unrecognized SSL message", e);
} }
@ -436,18 +410,15 @@ public class TestLogLevel {
/** /**
* Server runs HTTP + SPNEGO. * Server runs HTTP + SPNEGO.
* * @throws Exception if http client can't access http server, or http client can access https
* @throws Exception if http client can't access http server, * server.
* or http client can access https server.
*/ */
@Test @Test
public void testLogLevelByHttpWithSpnego() throws Exception { public void testLogLevelByHttpWithSpnego() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, true); testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, true);
try { try {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, true);
true); fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server");
fail("An HTTPS Client should not have succeeded in connecting to a " +
"HTTP server");
} catch (SSLException e) { } catch (SSLException e) {
exceptionShouldContains("Unrecognized SSL message", e); exceptionShouldContains("Unrecognized SSL message", e);
} }
@ -455,19 +426,15 @@ public class TestLogLevel {
/** /**
* Server runs HTTPS, no SPNEGO. * Server runs HTTPS, no SPNEGO.
* * @throws Exception if https client can't access https server, or https client can access http
* @throws Exception if https client can't access https server, * server.
* or https client can access http server.
*/ */
@Test @Test
public void testLogLevelByHttps() throws Exception { public void testLogLevelByHttps() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, false);
false);
try { try {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, false);
false); fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server");
fail("An HTTP Client should not have succeeded in connecting to a " +
"HTTPS server");
} catch (SocketException e) { } catch (SocketException e) {
exceptionShouldContains("Unexpected end of file from server", e); exceptionShouldContains("Unexpected end of file from server", e);
} }
@ -475,32 +442,27 @@ public class TestLogLevel {
/** /**
* Server runs HTTPS + SPNEGO. * Server runs HTTPS + SPNEGO.
* * @throws Exception if https client can't access https server, or https client can access http
* @throws Exception if https client can't access https server, * server.
* or https client can access http server.
*/ */
@Test @Test
public void testLogLevelByHttpsWithSpnego() throws Exception { public void testLogLevelByHttpsWithSpnego() throws Exception {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, true);
true);
try { try {
testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, true);
true); fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server");
fail("An HTTP Client should not have succeeded in connecting to a " + } catch (SocketException e) {
"HTTPS server");
} catch (SocketException e) {
exceptionShouldContains("Unexpected end of file from server", e); exceptionShouldContains("Unexpected end of file from server", e);
} }
} }
/** /**
* Assert that a throwable or one of its causes should contain the substr in its message. * Assert that a throwable or one of its causes should contain the substr in its message. Ideally
* * we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util method
* Ideally we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util * which asserts t.toString() contains the substr. As the original throwable may have been wrapped
* method which asserts t.toString() contains the substr. As the original throwable may have been * in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. After stop
* wrapped in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. * supporting Hadoop2, this method can be removed and assertion in tests can use t.getCause()
* After stop supporting Hadoop2, this method can be removed and assertion in tests can use * directly, similar to HADOOP-15280.
* t.getCause() directly, similar to HADOOP-15280.
*/ */
private static void exceptionShouldContains(String substr, Throwable throwable) { private static void exceptionShouldContains(String substr, Throwable throwable) {
Throwable t = throwable; Throwable t = throwable;
@ -512,6 +474,6 @@ public class TestLogLevel {
t = t.getCause(); t = t.getCause();
} }
throw new AssertionError("Expected to find '" + substr + "' but got unexpected exception:" + throw new AssertionError("Expected to find '" + substr + "' but got unexpected exception:" +
StringUtils.stringifyException(throwable), throwable); StringUtils.stringifyException(throwable), throwable);
} }
} }

View File

@ -268,13 +268,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -38,7 +38,7 @@
<testResource> <testResource>
<directory>src/test/resources</directory> <directory>src/test/resources</directory>
<includes> <includes>
<include>log4j.properties</include> <include>log4j2.properties</include>
</includes> </includes>
</testResource> </testResource>
</testResources> </testResources>
@ -80,7 +80,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>jcl-over-slf4j</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
@ -89,9 +89,24 @@
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
</project> </project>

View File

@ -1,46 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Logger class that buffers before trying to log to the specified console.
*/
@InterfaceAudience.Private
public class AsyncConsoleAppender extends org.apache.log4j.AsyncAppender {
private final org.apache.log4j.ConsoleAppender consoleAppender;
public AsyncConsoleAppender() {
super();
consoleAppender = new org.apache.log4j.ConsoleAppender(
new org.apache.log4j.PatternLayout("%d{ISO8601} %-5p [%t] %c{2}: %m%n"));
this.addAppender(consoleAppender);
}
public void setTarget(String value) {
consoleAppender.setTarget(value);
}
@Override
public void activateOptions() {
consoleAppender.activateOptions();
super.activateOptions();
}
}

View File

@ -19,16 +19,15 @@ package org.apache.hadoop.hbase.logging;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.Enumeration;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* The actual class for operating on log4j. * The actual class for operating on log4j2.
* <p/> * <p/>
* This class will depend on log4j directly, so callers should not use this class directly to avoid * This class will depend on log4j directly, so callers should not use this class directly to avoid
* introducing log4j dependencies to downstream users. Please call the methods in * introducing log4j2 dependencies to downstream users. Please call the methods in
* {@link Log4jUtils}, as they will call the methods here through reflection. * {@link Log4jUtils}, as they will call the methods here through reflection.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
@ -38,32 +37,53 @@ final class InternalLog4jUtils {
} }
static void setLogLevel(String loggerName, String levelName) { static void setLogLevel(String loggerName, String levelName) {
org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName); org.apache.logging.log4j.Level level =
org.apache.log4j.Level level = org.apache.log4j.Level.toLevel(levelName.toUpperCase()); org.apache.logging.log4j.Level.toLevel(levelName.toUpperCase());
if (!level.toString().equalsIgnoreCase(levelName)) { if (!level.toString().equalsIgnoreCase(levelName)) {
throw new IllegalArgumentException("Unsupported log level " + levelName); throw new IllegalArgumentException("Unsupported log level " + levelName);
} }
logger.setLevel(level); org.apache.logging.log4j.core.config.Configurator.setLevel(loggerName, level);
} }
static String getEffectiveLevel(String loggerName) { static String getEffectiveLevel(String loggerName) {
org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName); org.apache.logging.log4j.Logger logger =
return logger.getEffectiveLevel().toString(); org.apache.logging.log4j.LogManager.getLogger(loggerName);
return logger.getLevel().name();
} }
static Set<File> getActiveLogFiles() throws IOException { static Set<File> getActiveLogFiles() throws IOException {
Set<File> ret = new HashSet<>(); Set<File> ret = new HashSet<>();
org.apache.log4j.Appender a; org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
@SuppressWarnings("unchecked") if (!(logger instanceof org.apache.logging.log4j.core.Logger)) {
Enumeration<org.apache.log4j.Appender> e = return ret;
org.apache.log4j.Logger.getRootLogger().getAllAppenders(); }
while (e.hasMoreElements()) { org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger;
a = e.nextElement(); for (org.apache.logging.log4j.core.Appender appender : coreLogger.getAppenders().values()) {
if (a instanceof org.apache.log4j.FileAppender) { if (appender instanceof org.apache.logging.log4j.core.appender.FileAppender) {
org.apache.log4j.FileAppender fa = (org.apache.log4j.FileAppender) a; String fileName =
String filename = fa.getFile(); ((org.apache.logging.log4j.core.appender.FileAppender) appender).getFileName();
ret.add(new File(filename)); ret.add(new File(fileName));
} } else if (appender instanceof org.apache.logging.log4j.core.appender.AbstractFileAppender) {
String fileName =
((org.apache.logging.log4j.core.appender.AbstractFileAppender<?>) appender).getFileName();
ret.add(new File(fileName));
} else if (appender instanceof org.apache.logging.log4j.core.appender.RollingFileAppender) {
String fileName =
((org.apache.logging.log4j.core.appender.RollingFileAppender) appender).getFileName();
ret.add(new File(fileName));
} else
if (appender instanceof org.apache.logging.log4j.core.appender.RandomAccessFileAppender) {
String fileName =
((org.apache.logging.log4j.core.appender.RandomAccessFileAppender) appender)
.getFileName();
ret.add(new File(fileName));
} else
if (appender instanceof org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) {
String fileName =
((org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) appender)
.getFileName();
ret.add(new File(fileName));
}
} }
return ret; return ret;
} }

View File

@ -0,0 +1,288 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.log4j;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.Writer;
/**
* Just a copy of the old log4j12 FileAppender. The ContainerLogAppender for YARN NodeManager needs
* this class but the log4j-1.2-api bridge does not provide it which causes the UTs in
* hbase-mapreduce module to fail if we start a separated MR cluster.
*/
public class FileAppender extends WriterAppender {
/**
* Controls file truncatation. The default value for this variable is <code>true</code>, meaning
* that by default a <code>FileAppender</code> will append to an existing file and not truncate
* it.
* <p>
* This option is meaningful only if the FileAppender opens the file.
*/
protected boolean fileAppend = true;
/**
* The name of the log file.
*/
protected String fileName = null;
/**
* Do we do bufferedIO?
*/
protected boolean bufferedIO = false;
/**
* Determines the size of IO buffer be. Default is 8K.
*/
protected int bufferSize = 8 * 1024;
/**
* The default constructor does not do anything.
*/
public FileAppender() {
}
/**
* Instantiate a <code>FileAppender</code> and open the file designated by <code>fileName</code>.
* The opened filename will become the output destination for this appender.
* <p>
* If the <code>append</code> parameter is true, the file will be appended to. Otherwise, the file
* designated by <code>fileName</code> will be truncated before being opened.
* <p>
* If the <code>bufferedIO</code> parameter is <code>true</code>, then buffered IO will be used to
* write to the output file.
*/
public FileAppender(Layout layout, String fileName, boolean append, boolean bufferedIO,
int bufferSize) throws IOException {
this.layout = layout;
this.setFile(fileName, append, bufferedIO, bufferSize);
}
/**
* Instantiate a FileAppender and open the file designated by <code>fileName</code>. The opened
* filename will become the output destination for this appender.
* <p>
* If the <code>append</code> parameter is true, the file will be appended to. Otherwise, the file
* designated by <code>fileName</code> will be truncated before being opened.
*/
public FileAppender(Layout layout, String fileName, boolean append) throws IOException {
this.layout = layout;
this.setFile(fileName, append, false, bufferSize);
}
/**
* Instantiate a FileAppender and open the file designated by <code>filename</code>. The opened
* filename will become the output destination for this appender.
* <p>
* The file will be appended to.
*/
public FileAppender(Layout layout, String fileName) throws IOException {
this(layout, fileName, true);
}
/**
* The <b>File</b> property takes a string value which should be the name of the file to append
* to.
* <p>
* <font color="#DD0044"><b>Note that the special values "System.out" or "System.err" are no
* longer honored.</b></font>
* <p>
* Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
* options are set.
*/
public void setFile(String file) {
// Trim spaces from both ends. The users probably does not want
// trailing spaces in file names.
String val = file.trim();
fileName = val;
}
/**
* Returns the value of the <b>Append</b> option.
*/
public boolean getAppend() {
return fileAppend;
}
/** Returns the value of the <b>File</b> option. */
public String getFile() {
return fileName;
}
/**
* If the value of <b>File</b> is not <code>null</code>, then {@link #setFile} is called with the
* values of <b>File</b> and <b>Append</b> properties.
* @since 0.8.1
*/
@Override
public void activateOptions() {
if (fileName != null) {
try {
setFile(fileName, fileAppend, bufferedIO, bufferSize);
} catch (java.io.IOException e) {
errorHandler.error("setFile(" + fileName + "," + fileAppend + ") call failed.", e,
org.apache.log4j.spi.ErrorCode.FILE_OPEN_FAILURE);
}
}
}
/**
* Closes the previously opened file.
*/
protected void closeFile() {
if (this.qw != null) {
try {
this.qw.close();
} catch (java.io.IOException e) {
if (e instanceof InterruptedIOException) {
Thread.currentThread().interrupt();
}
// Exceptionally, it does not make sense to delegate to an
// ErrorHandler. Since a closed appender is basically dead.
}
}
}
/**
* Get the value of the <b>BufferedIO</b> option.
* <p>
* BufferedIO will significatnly increase performance on heavily loaded systems.
*/
public boolean getBufferedIO() {
return this.bufferedIO;
}
/**
* Get the size of the IO buffer.
*/
public int getBufferSize() {
return this.bufferSize;
}
/**
* The <b>Append</b> option takes a boolean value. It is set to <code>true</code> by default. If
* true, then <code>File</code> will be opened in append mode by {@link #setFile setFile} (see
* above). Otherwise, {@link #setFile setFile} will open <code>File</code> in truncate mode.
* <p>
* Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
* options are set.
*/
public void setAppend(boolean flag) {
fileAppend = flag;
}
/**
* The <b>BufferedIO</b> option takes a boolean value. It is set to <code>false</code> by default.
* If true, then <code>File</code> will be opened and the resulting {@link java.io.Writer} wrapped
* around a {@link BufferedWriter}. BufferedIO will significatnly increase performance on heavily
* loaded systems.
*/
public void setBufferedIO(boolean bufferedIO) {
this.bufferedIO = bufferedIO;
if (bufferedIO) {
immediateFlush = false;
}
}
/**
* Set the size of the IO buffer.
*/
public void setBufferSize(int bufferSize) {
this.bufferSize = bufferSize;
}
/**
* <p>
* Sets and <i>opens</i> the file where the log output will go. The specified file must be
* writable.
* <p>
* If there was already an opened file, then the previous file is closed first.
* <p>
* <b>Do not use this method directly. To configure a FileAppender or one of its subclasses, set
* its properties one by one and then call activateOptions.</b>
* @param fileName The path to the log file.
* @param append If true will append to fileName. Otherwise will truncate fileName.
*/
public synchronized void setFile(String fileName, boolean append, boolean bufferedIO,
int bufferSize) throws IOException {
// It does not make sense to have immediate flush and bufferedIO.
if (bufferedIO) {
setImmediateFlush(false);
}
reset();
FileOutputStream ostream = null;
try {
//
// attempt to create file
//
ostream = new FileOutputStream(fileName, append);
} catch (FileNotFoundException ex) {
//
// if parent directory does not exist then
// attempt to create it and try to create file
// see bug 9150
//
String parentName = new File(fileName).getParent();
if (parentName != null) {
File parentDir = new File(parentName);
if (!parentDir.exists() && parentDir.mkdirs()) {
ostream = new FileOutputStream(fileName, append);
} else {
throw ex;
}
} else {
throw ex;
}
}
Writer fw = createWriter(ostream);
if (bufferedIO) {
fw = new BufferedWriter(fw, bufferSize);
}
this.setQWForFiles(fw);
this.fileName = fileName;
this.fileAppend = append;
this.bufferedIO = bufferedIO;
this.bufferSize = bufferSize;
writeHeader();
}
/**
* Sets the quiet writer being used. This method is overriden by {@code RollingFileAppender}.
*/
protected void setQWForFiles(Writer writer) {
this.qw = new org.apache.log4j.helpers.QuietWriter(writer, errorHandler);
}
/**
* Close any previously opened file and call the parent's <code>reset</code>.
*/
@Override
protected void reset() {
closeFile();
this.fileName = null;
super.reset();
}
}

View File

@ -1,68 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log
# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}
# Logging Threshold
log4j.threshold=ALL
#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
# Debugging Pattern format
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
#
# console
# Add "console" to rootlogger above if you want to use this
#
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
# Custom Logging levels
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop=WARN
log4j.logger.org.apache.zookeeper=ERROR
log4j.logger.org.apache.hadoop.hbase=DEBUG
#These settings are workarounds against spurious logs from the minicluster.
#See HBASE-4709
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
# Enable this to get detailed connection error/retry logging.
# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE

View File

@ -0,0 +1,68 @@
#/**
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
status = debug
dest = err
name = PropertiesConfig
appender.console.type = Console
appender.console.target = SYSTEM_ERR
appender.console.name = Console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
rootLogger = INFO,Console
logger.hadoop.name = org.apache.hadoop
logger.hadoop.level = WARN
logger.zookeeper.name = org.apache.zookeeper
logger.zookeeper.level = ERROR
logger.hbase.name = org.apache.hadoop.hbase
logger.hbase.level = DEBUG
# These settings are workarounds against spurious logs from the minicluster. See HBASE-4709
logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig
logger.MetricsConfig.level = WARN
logger.MetricsSinkAdapter.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter
logger.MetricsSinkAdapter.level = WARN
logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl
logger.MetricsSystemImpl.level = WARN
logger.MBeans.name = org.apache.hadoop.metrics2.util.MBeans
logger.MBeans.level = WARN
logger.directory.name = org.apache.directory
logger.directory.level = WARN
logger.directory.additivity = false
logger.netty.name = org.apache.hbase.thirdparty.io.netty.channel
logger.netty.level = DEBUG
# For testing where we want to capture the log message of these special loggers
logger.FailedServers.name = org.apache.hadoop.hbase.ipc.FailedServers
logger.FailedServers.level = DEBUG
logger.RSRpcServices.name = org.apache.hadoop.hbase.regionserver.RSRpcServices
logger.RSRpcServices.level = DEBUG
logger.TestJul2Slf4j.name = org.apache.hadoop.hbase.logging.TestJul2Slf4j
logger.TestJul2Slf4j.level = DEBUG

View File

@ -306,13 +306,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -24,24 +24,16 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import javax.crypto.spec.SecretKeySpec; import javax.crypto.spec.SecretKeySpec;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.ZooKeeper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
@ -51,6 +43,7 @@ import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils; import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
@ -60,6 +53,10 @@ import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL; import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.ZooKeeper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.org.apache.commons.cli.AlreadySelectedException; import org.apache.hbase.thirdparty.org.apache.commons.cli.AlreadySelectedException;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
@ -582,7 +579,7 @@ public class LoadTestTool extends AbstractHBaseTool {
@Override @Override
protected int doWork() throws IOException { protected int doWork() throws IOException {
if (!isVerbose) { if (!isVerbose) {
LogManager.getLogger(ZooKeeper.class.getName()).setLevel(Level.WARN); Log4jUtils.setLogLevel(ZooKeeper.class.getName(), "WARN");
} }
if (numTables > 1) { if (numTables > 1) {
return parallelLoadTables(); return parallelLoadTables();

View File

@ -133,13 +133,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -141,13 +141,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -130,13 +130,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -162,10 +162,9 @@
<exclude>com.google.j2objc:j2objc-annotations</exclude> <exclude>com.google.j2objc:j2objc-annotations</exclude>
<exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude> <exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
<exclude>junit:junit</exclude> <exclude>junit:junit</exclude>
<exclude>log4j:log4j</exclude>
<exclude>ch.qos.reload4j:*</exclude>
<exclude>commons-logging:commons-logging</exclude> <exclude>commons-logging:commons-logging</exclude>
<exclude>org.slf4j:slf4j-api</exclude> <exclude>org.slf4j:*</exclude>
<exclude>org.apache.logging.log4j:*</exclude>
<exclude>org.apache.yetus:audience-annotations</exclude> <exclude>org.apache.yetus:audience-annotations</exclude>
<exclude>com.github.stephenc.fingbugs:*</exclude> <exclude>com.github.stephenc.fingbugs:*</exclude>
<exclude>com.github.spotbugs:*</exclude> <exclude>com.github.spotbugs:*</exclude>

View File

@ -136,13 +136,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -372,13 +372,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -181,13 +181,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -533,13 +533,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -696,7 +696,6 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
* This is used before starting HDFS and map-reduce mini-clusters Run something like the below to * This is used before starting HDFS and map-reduce mini-clusters Run something like the below to
* check for the likes of '/tmp' references -- i.e. references outside of the test data dir -- in * check for the likes of '/tmp' references -- i.e. references outside of the test data dir -- in
* the conf. * the conf.
*
* <pre> * <pre>
* Configuration conf = TEST_UTIL.getConfiguration(); * Configuration conf = TEST_UTIL.getConfiguration();
* for (Iterator&lt;Map.Entry&lt;String, String&gt;&gt; i = conf.iterator(); i.hasNext();) { * for (Iterator&lt;Map.Entry&lt;String, String&gt;&gt; i = conf.iterator(); i.hasNext();) {

View File

@ -28,11 +28,10 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
@ -60,10 +59,13 @@ public class TestAsyncTableBatchRetryImmediately {
private static AsyncConnection CONN; private static AsyncConnection CONN;
private static String LOG_LEVEL;
@BeforeClass @BeforeClass
public static void setUp() throws Exception { public static void setUp() throws Exception {
// disable the debug log to avoid flooding the output // disable the debug log to avoid flooding the output
LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO); LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, 1024); UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, 1024);
UTIL.startMiniCluster(1); UTIL.startMiniCluster(1);
Table table = UTIL.createTable(TABLE_NAME, FAMILY); Table table = UTIL.createTable(TABLE_NAME, FAMILY);
@ -78,6 +80,9 @@ public class TestAsyncTableBatchRetryImmediately {
@AfterClass @AfterClass
public static void tearDown() throws Exception { public static void tearDown() throws Exception {
if (LOG_LEVEL != null) {
Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
}
CONN.close(); CONN.close();
UTIL.shutdownMiniCluster(); UTIL.shutdownMiniCluster();
} }

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.apache.hadoop.hbase.test.MetricsAssertHelper;
@ -64,15 +65,18 @@ public class TestMultiRespectsLimits {
CompatibilityFactory.getInstance(MetricsAssertHelper.class); CompatibilityFactory.getInstance(MetricsAssertHelper.class);
private final static byte[] FAMILY = Bytes.toBytes("D"); private final static byte[] FAMILY = Bytes.toBytes("D");
public static final int MAX_SIZE = 100; public static final int MAX_SIZE = 100;
private static String LOG_LEVEL;
@Rule @Rule
public TestName name = new TestName(); public TestName name = new TestName();
@BeforeClass @BeforeClass
public static void setUpBeforeClass() throws Exception { public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setLong( // disable the debug log to avoid flooding the output
HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
MAX_SIZE); Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
TEST_UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY,
MAX_SIZE);
// Only start on regionserver so that all regions are on the same server. // Only start on regionserver so that all regions are on the same server.
TEST_UTIL.startMiniCluster(1); TEST_UTIL.startMiniCluster(1);
@ -80,6 +84,9 @@ public class TestMultiRespectsLimits {
@AfterClass @AfterClass
public static void tearDownAfterClass() throws Exception { public static void tearDownAfterClass() throws Exception {
if (LOG_LEVEL != null) {
Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
}
TEST_UTIL.shutdownMiniCluster(); TEST_UTIL.shutdownMiniCluster();
} }

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.testclassification.RPCTests; import org.apache.hadoop.hbase.testclassification.RPCTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.After; import org.junit.After;
@ -82,10 +83,8 @@ public class TestProtoBufRpc {
this.conf = HBaseConfiguration.create(); this.conf = HBaseConfiguration.create();
this.conf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY, this.conf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY,
rpcServerImpl); rpcServerImpl);
org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer") Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer", "ERROR");
.setLevel(org.apache.log4j.Level.ERROR); Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer.trace", "TRACE");
org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace")
.setLevel(org.apache.log4j.Level.TRACE);
// Create server side implementation // Create server side implementation
// Get RPC server for server side implementation // Get RPC server for server side implementation
this.server = RpcServerFactory.createRpcServer(null, "testrpc", this.server = RpcServerFactory.createRpcServer(null, "testrpc",

View File

@ -35,17 +35,19 @@ import org.mockito.Mockito;
public class TestRpcServerTraceLogging { public class TestRpcServerTraceLogging {
@ClassRule @ClassRule
public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule public static final HBaseClassTestRule CLASS_RULE =
.forClass(TestRpcServerTraceLogging.class); HBaseClassTestRule.forClass(TestRpcServerTraceLogging.class);
static org.apache.log4j.Logger rpcServerLog = org.apache.log4j.Logger.getLogger(RpcServer.class); private static final org.apache.logging.log4j.core.Logger rpcServerLog =
(org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(RpcServer.class);
static final String TRACE_LOG_MSG = static final String TRACE_LOG_MSG =
"This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" +
+ " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " +
+ "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " +
+ "number_of_rows: 2147483647 close_scanner: false client_handles_partials: " "number_of_rows: 2147483647 close_scanner: false client_handles_partials: " +
+ "true client_handles_heartbeats: true track_scan_metrics: false"; "true client_handles_heartbeats: true track_scan_metrics: false";
static final int TRACE_LOG_LENGTH = TRACE_LOG_MSG.length(); static final int TRACE_LOG_LENGTH = TRACE_LOG_MSG.length();
@ -62,7 +64,7 @@ public class TestRpcServerTraceLogging {
@Test @Test
public void testLoggingWithTraceOff() { public void testLoggingWithTraceOff() {
conf.setInt("hbase.ipc.trace.log.max.length", 250); conf.setInt("hbase.ipc.trace.log.max.length", 250);
rpcServerLog.setLevel(org.apache.log4j.Level.DEBUG); rpcServerLog.setLevel(org.apache.logging.log4j.Level.DEBUG);
String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG); String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
assertEquals(150 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length()); assertEquals(150 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@ -72,7 +74,7 @@ public class TestRpcServerTraceLogging {
@Test @Test
public void testLoggingWithTraceOn() { public void testLoggingWithTraceOn() {
conf.setInt("hbase.ipc.trace.log.max.length", 250); conf.setInt("hbase.ipc.trace.log.max.length", 250);
rpcServerLog.setLevel(org.apache.log4j.Level.TRACE); rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG); String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
assertEquals(250 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length()); assertEquals(250 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@ -82,7 +84,7 @@ public class TestRpcServerTraceLogging {
@Test @Test
public void testLoggingWithTraceOnLargeMax() { public void testLoggingWithTraceOnLargeMax() {
conf.setInt("hbase.ipc.trace.log.max.length", 2000); conf.setInt("hbase.ipc.trace.log.max.length", 2000);
rpcServerLog.setLevel(org.apache.log4j.Level.TRACE); rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG); String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
assertEquals(TRACE_LOG_LENGTH, truncatedString.length()); assertEquals(TRACE_LOG_LENGTH, truncatedString.length());

View File

@ -7055,7 +7055,7 @@ public class TestHRegion {
// using small heart beat cells // using small heart beat cells
conf.setLong(StoreScanner.HBASE_CELLS_SCANNED_PER_HEARTBEAT_CHECK, 2); conf.setLong(StoreScanner.HBASE_CELLS_SCANNED_PER_HEARTBEAT_CHECK, 2);
region = HBaseTestingUtil region = HBaseTestingUtility
.createRegionAndWAL(RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(), .createRegionAndWAL(RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(),
TEST_UTIL.getDataTestDir(), conf, tableDescriptor); TEST_UTIL.getDataTestDir(), conf, tableDescriptor);
assertNotNull(region); assertNotNull(region);

View File

@ -20,14 +20,16 @@ package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.atLeastOnce; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset; import static org.mockito.Mockito.when;
import static org.mockito.Mockito.verify;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -36,10 +38,6 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Appender;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.ClassRule; import org.junit.ClassRule;
@ -47,8 +45,9 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
@ -80,7 +79,7 @@ public class TestMultiLogThreshold {
private HRegionServer rs; private HRegionServer rs;
private RSRpcServices services; private RSRpcServices services;
private Appender appender; private org.apache.logging.log4j.core.Appender appender;
@Parameterized.Parameter @Parameterized.Parameter
public static boolean rejectLargeBatchOp; public static boolean rejectLargeBatchOp;
@ -90,6 +89,21 @@ public class TestMultiLogThreshold {
return Arrays.asList(new Object[] { false }, new Object[] { true }); return Arrays.asList(new Object[] { false }, new Object[] { true });
} }
private final class LevelAndMessage {
final org.apache.logging.log4j.Level level;
final String msg;
public LevelAndMessage(org.apache.logging.log4j.Level level, String msg) {
this.level = level;
this.msg = msg;
}
}
// log4j2 will reuse the LogEvent so we need to copy the level and message out.
private BlockingDeque<LevelAndMessage> logs = new LinkedBlockingDeque<>();
@Before @Before
public void setupTest() throws Exception { public void setupTest() throws Exception {
util = new HBaseTestingUtility(); util = new HBaseTestingUtility();
@ -100,13 +114,28 @@ public class TestMultiLogThreshold {
util.startMiniCluster(); util.startMiniCluster();
util.createTable(NAME, TEST_FAM); util.createTable(NAME, TEST_FAM);
rs = util.getRSForFirstRegionInTable(NAME); rs = util.getRSForFirstRegionInTable(NAME);
appender = mock(Appender.class); appender = mock(org.apache.logging.log4j.core.Appender.class);
LogManager.getLogger(RSRpcServices.class).addAppender(appender); when(appender.getName()).thenReturn("mockAppender");
when(appender.isStarted()).thenReturn(true);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
org.apache.logging.log4j.core.LogEvent logEvent =
invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
logs.add(
new LevelAndMessage(logEvent.getLevel(), logEvent.getMessage().getFormattedMessage()));
return null;
}
}).when(appender).append(any(org.apache.logging.log4j.core.LogEvent.class));
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(RSRpcServices.class)).addAppender(appender);
} }
@After @After
public void tearDown() throws Exception { public void tearDown() throws Exception {
LogManager.getLogger(RSRpcServices.class).removeAppender(appender); ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(RSRpcServices.class)).removeAppender(appender);
util.shutdownMiniCluster(); util.shutdownMiniCluster();
} }
@ -149,17 +178,16 @@ public class TestMultiLogThreshold {
} }
private void assertLogBatchWarnings(boolean expected) { private void assertLogBatchWarnings(boolean expected) {
ArgumentCaptor<LoggingEvent> captor = ArgumentCaptor.forClass(LoggingEvent.class); assertFalse(logs.isEmpty());
verify(appender, atLeastOnce()).doAppend(captor.capture());
boolean actual = false; boolean actual = false;
for (LoggingEvent event : captor.getAllValues()) { for (LevelAndMessage event : logs) {
if (event.getLevel() == Level.WARN && if (event.level == org.apache.logging.log4j.Level.WARN &&
event.getRenderedMessage().contains("Large batch operation detected")) { event.msg.contains("Large batch operation detected")) {
actual = true; actual = true;
break; break;
} }
} }
reset(appender); logs.clear();
assertEquals(expected, actual); assertEquals(expected, actual);
} }

View File

@ -42,11 +42,6 @@ import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread; import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.log4j.Appender;
import org.apache.log4j.Layout;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.WriterAppender;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
@ -56,6 +51,8 @@ import org.junit.experimental.categories.Category;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
@Category(LargeTests.class) @Category(LargeTests.class)
public class TestRegionServerReportForDuty { public class TestRegionServerReportForDuty {
@ -91,26 +88,15 @@ public class TestRegionServerReportForDuty {
testUtil.shutdownMiniDFSCluster(); testUtil.shutdownMiniDFSCluster();
} }
/** private static class LogCapturer {
* LogCapturer is similar to {@link org.apache.hadoop.test.GenericTestUtils.LogCapturer}
* except that this implementation has a default appender to the root logger.
* Hadoop 2.8+ supports the default appender in the LogCapture it ships and this can be replaced.
* TODO: This class can be removed after we upgrade Hadoop dependency.
*/
static class LogCapturer {
private StringWriter sw = new StringWriter(); private StringWriter sw = new StringWriter();
private WriterAppender appender; private org.apache.logging.log4j.core.appender.WriterAppender appender;
private org.apache.log4j.Logger logger; private org.apache.logging.log4j.core.Logger logger;
LogCapturer(org.apache.log4j.Logger logger) { LogCapturer(org.apache.logging.log4j.core.Logger logger) {
this.logger = logger; this.logger = logger;
Appender defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("stdout"); this.appender = org.apache.logging.log4j.core.appender.WriterAppender.newBuilder()
if (defaultAppender == null) { .setName("test").setTarget(sw).build();
defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("console");
}
final Layout layout = (defaultAppender == null) ? new PatternLayout() :
defaultAppender.getLayout();
this.appender = new WriterAppender(layout, sw);
this.logger.addAppender(this.appender); this.logger.addAppender(this.appender);
} }
@ -146,7 +132,9 @@ public class TestRegionServerReportForDuty {
master = cluster.addMaster(); master = cluster.addMaster();
master.start(); master.start();
LogCapturer capturer = new LogCapturer(org.apache.log4j.Logger.getLogger(HRegionServer.class)); LogCapturer capturer =
new LogCapturer((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger(HRegionServer.class));
// Set sleep interval relatively low so that exponential backoff is more demanding. // Set sleep interval relatively low so that exponential backoff is more demanding.
int msginterval = 100; int msginterval = 100;
cluster.getConfiguration().setInt("hbase.regionserver.msginterval", msginterval); cluster.getConfiguration().setInt("hbase.regionserver.msginterval", msginterval);

View File

@ -26,24 +26,36 @@ import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
/**
* This is not a unit test. It is not run as part of the general unit test suite. It is for
* comparing compaction policies. You must run it explicitly;
* e.g. mvn test -Dtest=PerfTestCompactionPolicies
*/
@Category({RegionServerTests.class, MediumTests.class}) @Category({RegionServerTests.class, MediumTests.class})
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class PerfTestCompactionPolicies extends MockStoreFileGenerator { public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(PerfTestCompactionPolicies.class);
private final RatioBasedCompactionPolicy cp; private final RatioBasedCompactionPolicy cp;
private final StoreFileListGenerator generator; private final StoreFileListGenerator generator;
private final HStore store; private final HStore store;
@ -119,12 +131,9 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
this.ratio = inRatio; this.ratio = inRatio;
// Hide lots of logging so the system out is usable as a tab delimited file. // Hide lots of logging so the system out is usable as a tab delimited file.
org.apache.log4j.Logger.getLogger(CompactionConfiguration.class). Log4jUtils.setLogLevel(CompactionConfiguration.class.getName(), "ERROR");
setLevel(org.apache.log4j.Level.ERROR); Log4jUtils.setLogLevel(RatioBasedCompactionPolicy.class.getName(), "ERROR");
org.apache.log4j.Logger.getLogger(RatioBasedCompactionPolicy.class). Log4jUtils.setLogLevel(cpClass.getName(), "ERROR");
setLevel(org.apache.log4j.Level.ERROR);
org.apache.log4j.Logger.getLogger(cpClass).setLevel(org.apache.log4j.Level.ERROR);
Configuration configuration = HBaseConfiguration.create(); Configuration configuration = HBaseConfiguration.create();
@ -196,6 +205,7 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
HStore s = mock(HStore.class); HStore s = mock(HStore.class);
when(s.getStoreFileTtl()).thenReturn(Long.MAX_VALUE); when(s.getStoreFileTtl()).thenReturn(Long.MAX_VALUE);
when(s.getBlockingFileCount()).thenReturn(7L); when(s.getBlockingFileCount()).thenReturn(7L);
when(s.getRegionInfo()).thenReturn(RegionInfoBuilder.FIRST_META_REGIONINFO);
return s; return s;
} }

View File

@ -27,15 +27,17 @@ import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isA; import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never; import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy; import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times; import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ScheduledThreadPoolExecutor;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
@ -50,9 +52,6 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Appender;
import org.apache.log4j.LogManager;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.ClassRule; import org.junit.ClassRule;
@ -60,19 +59,14 @@ import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName; import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.mockito.ArgumentMatcher; import org.mockito.ArgumentMatcher;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
@RunWith(MockitoJUnitRunner.class) @Category({ LargeTests.class })
@Category({LargeTests.class})
public class TestCanaryTool { public class TestCanaryTool {
@ClassRule @ClassRule
public static final HBaseClassTestRule CLASS_RULE = public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCanaryTool.class); HBaseClassTestRule.forClass(TestCanaryTool.class);
private HBaseTestingUtility testingUtility; private HBaseTestingUtility testingUtility;
private static final byte[] FAMILY = Bytes.toBytes("f"); private static final byte[] FAMILY = Bytes.toBytes("f");
@ -81,22 +75,26 @@ public class TestCanaryTool {
@Rule @Rule
public TestName name = new TestName(); public TestName name = new TestName();
private org.apache.logging.log4j.core.Appender mockAppender;
@Before @Before
public void setUp() throws Exception { public void setUp() throws Exception {
testingUtility = new HBaseTestingUtility(); testingUtility = new HBaseTestingUtility();
testingUtility.startMiniCluster(); testingUtility.startMiniCluster();
LogManager.getRootLogger().addAppender(mockAppender); mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
when(mockAppender.getName()).thenReturn("mockAppender");
when(mockAppender.isStarted()).thenReturn(true);
((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender);
} }
@After @After
public void tearDown() throws Exception { public void tearDown() throws Exception {
testingUtility.shutdownMiniCluster(); testingUtility.shutdownMiniCluster();
LogManager.getRootLogger().removeAppender(mockAppender); ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
.getLogger("org.apache.hadoop.hbase")).removeAppender(mockAppender);
} }
@Mock
Appender mockAppender;
@Test @Test
public void testBasicZookeeperCanaryWorks() throws Exception { public void testBasicZookeeperCanaryWorks() throws Exception {
final String[] args = { "-t", "10000", "-zookeeper" }; final String[] args = { "-t", "10000", "-zookeeper" };
@ -105,7 +103,8 @@ public class TestCanaryTool {
@Test @Test
public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception { public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception {
final String[] args = { "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" }; final String[] args =
{ "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" };
testZookeeperCanaryWithArgs(args); testZookeeperCanaryWithArgs(args);
} }
@ -114,7 +113,7 @@ public class TestCanaryTool {
final TableName tableName = TableName.valueOf(name.getMethodName()); final TableName tableName = TableName.valueOf(name.getMethodName());
Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
// insert some test rows // insert some test rows
for (int i=0; i<1000; i++) { for (int i = 0; i < 1000; i++) {
byte[] iBytes = Bytes.toBytes(i); byte[] iBytes = Bytes.toBytes(i);
Put p = new Put(iBytes); Put p = new Put(iBytes);
p.addColumn(FAMILY, COLUMN, iBytes); p.addColumn(FAMILY, COLUMN, iBytes);
@ -155,7 +154,7 @@ public class TestCanaryTool {
// the test table has two column family. If readAllCF set true, // the test table has two column family. If readAllCF set true,
// we expect read count is double of region count // we expect read count is double of region count
int expectedReadCount = int expectedReadCount =
readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions(); readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions();
assertEquals("canary region success count should equal total expected read count", assertEquals("canary region success count should equal total expected read count",
expectedReadCount, sink.getReadSuccessCount()); expectedReadCount, sink.getReadSuccessCount());
Map<String, List<CanaryTool.RegionTaskResult>> regionMap = sink.getRegionMap(); Map<String, List<CanaryTool.RegionTaskResult>> regionMap = sink.getRegionMap();
@ -183,7 +182,7 @@ public class TestCanaryTool {
TableName tableName = TableName.valueOf("testCanaryRegionTaskResult"); TableName tableName = TableName.valueOf("testCanaryRegionTaskResult");
Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
// insert some test rows // insert some test rows
for (int i=0; i<1000; i++) { for (int i = 0; i < 1000; i++) {
byte[] iBytes = Bytes.toBytes(i); byte[] iBytes = Bytes.toBytes(i);
Put p = new Put(iBytes); Put p = new Put(iBytes);
p.addColumn(FAMILY, COLUMN, iBytes); p.addColumn(FAMILY, COLUMN, iBytes);
@ -212,7 +211,7 @@ public class TestCanaryTool {
assertFalse("verify region map has size > 0", regionMap.isEmpty()); assertFalse("verify region map has size > 0", regionMap.isEmpty());
for (String regionName : regionMap.keySet()) { for (String regionName : regionMap.keySet()) {
for (CanaryTool.RegionTaskResult res: regionMap.get(regionName)) { for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) {
assertNotNull("verify getRegionNameAsString()", regionName); assertNotNull("verify getRegionNameAsString()", regionName);
assertNotNull("verify getRegionInfo()", res.getRegionInfo()); assertNotNull("verify getRegionInfo()", res.getRegionInfo());
assertNotNull("verify getTableName()", res.getTableName()); assertNotNull("verify getTableName()", res.getTableName());
@ -235,24 +234,25 @@ public class TestCanaryTool {
// Ignore this test. It fails w/ the below on some mac os x. // Ignore this test. It fails w/ the below on some mac os x.
// [ERROR] Failures: // [ERROR] Failures:
// [ERROR] TestCanaryTool.testReadTableTimeouts:216 // [ERROR] TestCanaryTool.testReadTableTimeouts:216
// Argument(s) are different! Wanted: // Argument(s) are different! Wanted:
// mockAppender.doAppend( // mockAppender.doAppend(
// <custom argument matcher> // <custom argument matcher>
// ); // );
// -> at org.apache.hadoop.hbase.tool.TestCanaryTool // -> at org.apache.hadoop.hbase.tool.TestCanaryTool
// .testReadTableTimeouts(TestCanaryTool.java:216) // .testReadTableTimeouts(TestCanaryTool.java:216)
// Actual invocations have different arguments: // Actual invocations have different arguments:
// mockAppender.doAppend( // mockAppender.doAppend(
// org.apache.log4j.spi.LoggingEvent@2055cfc1 // org.apache.log4j.spi.LoggingEvent@2055cfc1
// ); // );
// ) // )
// ) // )
// //
@org.junit.Ignore @Test @org.junit.Ignore
@Test
public void testReadTableTimeouts() throws Exception { public void testReadTableTimeouts() throws Exception {
final TableName [] tableNames = new TableName[] {TableName.valueOf(name.getMethodName() + "1"), final TableName[] tableNames = new TableName[] { TableName.valueOf(name.getMethodName() + "1"),
TableName.valueOf(name.getMethodName() + "2")}; TableName.valueOf(name.getMethodName() + "2") };
// Create 2 test tables. // Create 2 test tables.
for (int j = 0; j < 2; j++) { for (int j = 0; j < 2; j++) {
Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY }); Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY });
@ -269,8 +269,8 @@ public class TestCanaryTool {
CanaryTool canary = new CanaryTool(executor, sink); CanaryTool canary = new CanaryTool(executor, sink);
String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," + String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," +
tableNames[1].getNameAsString() + "=0"; tableNames[1].getNameAsString() + "=0";
String[] args = {"-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1", String[] args = { "-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1",
name.getMethodName() + "2"}; name.getMethodName() + "2" };
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class)); verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class));
for (int i = 0; i < 2; i++) { for (int i = 0; i < 2; i++) {
@ -280,18 +280,21 @@ public class TestCanaryTool {
sink.getReadLatencyMap().get(tableNames[i].getNameAsString())); sink.getReadLatencyMap().get(tableNames[i].getNameAsString()));
} }
// One table's timeout is set for 0 ms and thus, should lead to an error. // One table's timeout is set for 0 ms and thus, should lead to an error.
verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() { verify(mockAppender, times(1))
@Override .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
public boolean matches(LoggingEvent argument) { @Override
return argument.getRenderedMessage().contains("exceeded the configured read timeout."); public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
} return argument.getMessage().getFormattedMessage()
})); .contains("exceeded the configured read timeout.");
verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() { }
@Override }));
public boolean matches(LoggingEvent argument) { verify(mockAppender, times(2))
return argument.getRenderedMessage().contains("Configured read timeout"); .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
} @Override
})); public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
return argument.getMessage().getFormattedMessage().contains("Configured read timeout");
}
}));
} }
@Test @Test
@ -299,43 +302,47 @@ public class TestCanaryTool {
ExecutorService executor = new ScheduledThreadPoolExecutor(1); ExecutorService executor = new ScheduledThreadPoolExecutor(1);
CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink()); CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink());
CanaryTool canary = new CanaryTool(executor, sink); CanaryTool canary = new CanaryTool(executor, sink);
String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE)}; String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE) };
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
assertNotEquals("verify non-null write latency", null, sink.getWriteLatency()); assertNotEquals("verify non-null write latency", null, sink.getWriteLatency());
assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency()); assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency());
verify(mockAppender, times(1)).doAppend(argThat( verify(mockAppender, times(1))
new ArgumentMatcher<LoggingEvent>() { .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
@Override @Override
public boolean matches(LoggingEvent argument) { public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
return argument.getRenderedMessage().contains("Configured write timeout"); return argument.getMessage().getFormattedMessage().contains("Configured write timeout");
} }
})); }));
} }
//no table created, so there should be no regions // no table created, so there should be no regions
@Test @Test
public void testRegionserverNoRegions() throws Exception { public void testRegionserverNoRegions() throws Exception {
runRegionserverCanary(); runRegionserverCanary();
verify(mockAppender).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() { verify(mockAppender)
@Override .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
public boolean matches(LoggingEvent argument) { @Override
return argument.getRenderedMessage().contains("Regionserver not serving any regions"); public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
} return argument.getMessage().getFormattedMessage()
})); .contains("Regionserver not serving any regions");
}
}));
} }
//by creating a table, there shouldn't be any region servers not serving any regions // by creating a table, there shouldn't be any region servers not serving any regions
@Test @Test
public void testRegionserverWithRegions() throws Exception { public void testRegionserverWithRegions() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName()); final TableName tableName = TableName.valueOf(name.getMethodName());
testingUtility.createTable(tableName, new byte[][] { FAMILY }); testingUtility.createTable(tableName, new byte[][] { FAMILY });
runRegionserverCanary(); runRegionserverCanary();
verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() { verify(mockAppender, never())
@Override .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() {
public boolean matches(LoggingEvent argument) { @Override
return argument.getRenderedMessage().contains("Regionserver not serving any regions"); public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
} return argument.getMessage().getFormattedMessage()
})); .contains("Regionserver not serving any regions");
}
}));
} }
@Test @Test
@ -343,7 +350,7 @@ public class TestCanaryTool {
final TableName tableName = TableName.valueOf(name.getMethodName()); final TableName tableName = TableName.valueOf(name.getMethodName());
Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
// insert some test rows // insert some test rows
for (int i=0; i<1000; i++) { for (int i = 0; i < 1000; i++) {
byte[] iBytes = Bytes.toBytes(i); byte[] iBytes = Bytes.toBytes(i);
Put p = new Put(iBytes); Put p = new Put(iBytes);
p.addColumn(FAMILY, COLUMN, iBytes); p.addColumn(FAMILY, COLUMN, iBytes);
@ -357,23 +364,20 @@ public class TestCanaryTool {
new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration()); new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration());
conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true); conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true);
assertEquals(0, ToolRunner.run(conf, canary, args)); assertEquals(0, ToolRunner.run(conf, canary, args));
verify(sink, atLeastOnce()) verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
.publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), isA(ColumnFamilyDescriptor.class), anyLong());
isA(ColumnFamilyDescriptor.class), anyLong());
assertEquals("verify no read error count", 0, canary.getReadFailures().size()); assertEquals("verify no read error count", 0, canary.getReadFailures().size());
} }
private void runRegionserverCanary() throws Exception { private void runRegionserverCanary() throws Exception {
ExecutorService executor = new ScheduledThreadPoolExecutor(1); ExecutorService executor = new ScheduledThreadPoolExecutor(1);
CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink()); CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink());
String[] args = { "-t", "10000", "-regionserver"}; String[] args = { "-t", "10000", "-regionserver" };
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
assertEquals("verify no read error count", 0, canary.getReadFailures().size()); assertEquals("verify no read error count", 0, canary.getReadFailures().size());
} }
private void testZookeeperCanaryWithArgs(String[] args) throws Exception { private void testZookeeperCanaryWithArgs(String[] args) throws Exception {
Integer port =
Iterables.getOnlyElement(testingUtility.getZkCluster().getClientPortList(), null);
String hostPort = testingUtility.getZkCluster().getAddress().toString(); String hostPort = testingUtility.getZkCluster().getAddress().toString();
testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort + "/hbase"); testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort + "/hbase");
ExecutorService executor = new ScheduledThreadPoolExecutor(2); ExecutorService executor = new ScheduledThreadPoolExecutor(2);
@ -381,8 +385,8 @@ public class TestCanaryTool {
CanaryTool canary = new CanaryTool(executor, sink); CanaryTool canary = new CanaryTool(executor, sink);
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
String baseZnode = testingUtility.getConfiguration() String baseZnode = testingUtility.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT,
.get(HConstants.ZOOKEEPER_ZNODE_PARENT, HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT); HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
verify(sink, atLeastOnce()).publishReadTiming(eq(baseZnode), eq(hostPort), anyLong()); verify(sink, atLeastOnce()).publishReadTiming(eq(baseZnode), eq(hostPort), anyLong());
} }
} }

View File

@ -46,12 +46,10 @@
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-mapreduce</artifactId> <artifactId>hbase-shaded-mapreduce</artifactId>
<version>${project.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client-byo-hadoop</artifactId> <artifactId>hbase-shaded-client-byo-hadoop</artifactId>
<version>${project.version}</version>
</dependency> </dependency>
<!-- parent pom defines these for children. :( :( :( --> <!-- parent pom defines these for children. :( :( :( -->
<dependency> <dependency>
@ -60,8 +58,18 @@
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<!-- Test dependencies --> <!-- Test dependencies -->
@ -108,8 +116,7 @@
<excludes> <excludes>
<!-- We leave logging stuff alone --> <!-- We leave logging stuff alone -->
<exclude>org.slf4j:*</exclude> <exclude>org.slf4j:*</exclude>
<exclude>log4j:*</exclude> <exclude>org.apache.logging.log4j:*</exclude>
<exclude>ch.qos.reload4j:*</exclude>
<exclude>commons-logging:*</exclude> <exclude>commons-logging:*</exclude>
<!-- annotations that never change --> <!-- annotations that never change -->
<exclude>com.google.code.findbugs:*</exclude> <exclude>com.google.code.findbugs:*</exclude>

View File

@ -1,6 +1,6 @@
<project xmlns="https://maven.apache.org/POM/4.0.0" <project xmlns="https://maven.apache.org/POM/4.0.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- <!--
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
@ -20,159 +20,161 @@
* limitations under the License. * limitations under the License.
*/ */
--> -->
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<artifactId>hbase-shaded</artifactId> <artifactId>hbase-shaded</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<version>2.5.0-SNAPSHOT</version> <version>2.5.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
<artifactId>hbase-shaded-client-byo-hadoop</artifactId> <artifactId>hbase-shaded-client-byo-hadoop</artifactId>
<name>Apache HBase - Shaded - Client</name> <name>Apache HBase - Shaded - Client</name>
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>
<configuration> <configuration>
<skip>true</skip> <skip>true</skip>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<!--Make it so assembly:single does nothing in here--> <!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>
<configuration> <configuration>
<skipAssembly>true</skipAssembly> <skipAssembly>true</skipAssembly>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId> <artifactId>maven-shade-plugin</artifactId>
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hbase-client</artifactId> <artifactId>hadoop-auth</artifactId>
</dependency> <scope>provided</scope>
</dependencies> </dependency>
</dependencies>
<profiles> <profiles>
<!-- These hadoop profiles should be derived from those in the hbase-client <!-- These hadoop profiles should be derived from those in the hbase-client
module. Essentially, you must list the same hadoop-* dependencies module. Essentially, you must list the same hadoop-* dependencies
so provided dependencies will not be transitively included. so provided dependencies will not be transitively included.
--> -->
<profile> <profile>
<id>hadoop-2.0</id> <id>hadoop-2.0</id>
<activation> <activation>
<property> <property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh--> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2--><name>!hadoop.profile</name> <!--h2-->
</property> <name>!hadoop.profile</name>
</activation> </property>
<dependencies> </activation>
<dependency> <dependencies>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-auth</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>provided</scope> <artifactId>hadoop-auth</artifactId>
</dependency> <scope>provided</scope>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-common</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>provided</scope> <artifactId>hadoop-common</artifactId>
</dependency> <scope>provided</scope>
<dependency> </dependency>
<groupId>org.codehaus.jackson</groupId> <dependency>
<artifactId>jackson-jaxrs</artifactId> <groupId>org.codehaus.jackson</groupId>
<version>1.9.13</version> <artifactId>jackson-jaxrs</artifactId>
<scope>provided</scope> <version>1.9.13</version>
<exclusions> <scope>provided</scope>
<exclusion> <exclusions>
<groupId>org.codehaus.jackson</groupId> <exclusion>
<artifactId>jackson-mapper-asl</artifactId> <groupId>org.codehaus.jackson</groupId>
</exclusion> <artifactId>jackson-mapper-asl</artifactId>
<exclusion> </exclusion>
<groupId>org.codehaus.jackson</groupId> <exclusion>
<artifactId>jackson-core-asl</artifactId> <groupId>org.codehaus.jackson</groupId>
</exclusion> <artifactId>jackson-core-asl</artifactId>
</exclusions> </exclusion>
</dependency> </exclusions>
<dependency> </dependency>
<groupId>org.codehaus.jackson</groupId> <dependency>
<artifactId>jackson-xc</artifactId> <groupId>org.codehaus.jackson</groupId>
<version>1.9.13</version> <artifactId>jackson-xc</artifactId>
<scope>provided</scope> <version>1.9.13</version>
<exclusions> <scope>provided</scope>
<exclusion> <exclusions>
<groupId>org.codehaus.jackson</groupId> <exclusion>
<artifactId>jackson-mapper-asl</artifactId> <groupId>org.codehaus.jackson</groupId>
</exclusion> <artifactId>jackson-mapper-asl</artifactId>
<exclusion> </exclusion>
<groupId>org.codehaus.jackson</groupId> <exclusion>
<artifactId>jackson-core-asl</artifactId> <groupId>org.codehaus.jackson</groupId>
</exclusion> <artifactId>jackson-core-asl</artifactId>
</exclusions> </exclusion>
</dependency> </exclusions>
</dependencies> </dependency>
</profile> </dependencies>
</profile>
<!-- <!--
profile for building against Hadoop 3.0.x. Activate using: profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0 mvn -Dhadoop.profile=3.0
--> -->
<profile> <profile>
<id>hadoop-3.0</id> <id>hadoop-3.0</id>
<activation> <activation>
<property> <property>
<name>hadoop.profile</name> <name>hadoop.profile</name>
<value>3.0</value> <value>3.0</value>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId> <artifactId>hadoop-auth</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId> <artifactId>jackson-jaxrs</artifactId>
<version>1.9.13</version> <version>1.9.13</version>
<scope>provided</scope> <scope>provided</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId> <artifactId>jackson-mapper-asl</artifactId>
</exclusion> </exclusion>
<exclusion> <exclusion>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId> <artifactId>jackson-core-asl</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId> <artifactId>jackson-xc</artifactId>
<version>1.9.13</version> <version>1.9.13</version>
<scope>provided</scope> <scope>provided</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId> <artifactId>jackson-mapper-asl</artifactId>
</exclusion> </exclusion>
<exclusion> <exclusion>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId> <artifactId>jackson-core-asl</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
</dependencies> </dependencies>
</profile> </profile>
</profiles> </profiles>
</project> </project>

View File

@ -1,6 +1,6 @@
<project xmlns="https://maven.apache.org/POM/4.0.0" <project xmlns="https://maven.apache.org/POM/4.0.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- <!--
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
@ -20,80 +20,78 @@
* limitations under the License. * limitations under the License.
*/ */
--> -->
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<artifactId>hbase-shaded</artifactId> <artifactId>hbase-shaded</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<version>2.5.0-SNAPSHOT</version> <version>2.5.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
<artifactId>hbase-shaded-client</artifactId> <artifactId>hbase-shaded-client</artifactId>
<name>Apache HBase - Shaded - Client (with Hadoop bundled)</name> <name>Apache HBase - Shaded - Client (with Hadoop bundled)</name>
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>
<configuration> <configuration>
<skip>true</skip> <skip>true</skip>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<!--Make it so assembly:single does nothing in here--> <!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>
<configuration> <configuration>
<skipAssembly>true</skipAssembly> <skipAssembly>true</skipAssembly>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId> <artifactId>maven-shade-plugin</artifactId>
<executions> <executions>
<execution> <execution>
<id>aggregate-into-a-jar-with-relocated-third-parties</id> <id>aggregate-into-a-jar-with-relocated-third-parties</id>
<configuration> <configuration>
<artifactSet> <artifactSet>
<excludes> <excludes>
<!-- exclude J2EE modules that come in for JDK11+ (since <!-- exclude J2EE modules that come in for JDK11+ (since
hadoop-3.2.0) or modules that come in for JDK8+ but hadoop-3.2.0) or modules that come in for JDK8+ but
need not be included --> need not be included -->
<exclude>javax.annotation:javax.annotation-api</exclude> <exclude>javax.annotation:javax.annotation-api</exclude>
<exclude>javax.activation:javax.activation-api</exclude> <exclude>javax.activation:javax.activation-api</exclude>
<exclude>jakarta.activation:jakarta.activation-api</exclude> <!-- Hadoop 3.3.1 --> <exclude>jakarta.activation:jakarta.activation-api</exclude> <!-- Hadoop 3.3.1 -->
<exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude> <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude>
<exclude>jakarta.annotation:jakarta.annotation-api</exclude> <exclude>jakarta.annotation:jakarta.annotation-api</exclude>
<exclude>jakarta.validation:jakarta.validation-api</exclude> <exclude>jakarta.validation:jakarta.validation-api</exclude>
<exclude>org.glassfish.hk2.external:jakarta.inject</exclude> <exclude>org.glassfish.hk2.external:jakarta.inject</exclude>
<!-- <!--
Tell the shade plugin that in this case we want to include hadoop Tell the shade plugin that in this case we want to include hadoop
by leaving out the exclude. by leaving out the exclude.
--> -->
<!-- The rest of these should be kept in sync with the parent pom --> <!-- The rest of these should be kept in sync with the parent pom -->
<exclude>org.apache.hbase:hbase-resource-bundle</exclude> <exclude>org.apache.hbase:hbase-resource-bundle</exclude>
<exclude>org.slf4j:*</exclude> <exclude>org.slf4j:*</exclude>
<exclude>com.google.code.findbugs:*</exclude> <exclude>com.google.code.findbugs:*</exclude>
<exclude>com.github.stephenc.findbugs:*</exclude> <exclude>com.github.stephenc.findbugs:*</exclude>
<exclude>com.github.spotbugs:*</exclude> <exclude>com.github.spotbugs:*</exclude>
<exclude>org.apache.htrace:*</exclude> <exclude>org.apache.htrace:*</exclude>
<exclude>org.apache.yetus:*</exclude> <exclude>org.apache.yetus:*</exclude>
<exclude>log4j:*</exclude> <exclude>org.apache.logging.log4j:*</exclude>
<exclude>ch.qos.reload4j:*</exclude> <exclude>commons-logging:*</exclude>
<exclude>commons-logging:*</exclude> <exclude>org.javassist:*</exclude>
<exclude>org.javassist:*</exclude> <exclude>io.opentelemetry:*</exclude>
<exclude>io.opentelemetry:*</exclude> </excludes>
</excludes> </artifactSet>
</artifactSet> </configuration>
</configuration> </execution>
</execution> </executions>
</executions> </plugin>
</plugin> </plugins>
</plugins> </build>
</build> <dependencies>
<dependencies> <dependency>
<dependency> <groupId>org.apache.hbase</groupId>
<groupId>org.apache.hbase</groupId> <artifactId>hbase-client</artifactId>
<artifactId>hbase-client</artifactId> </dependency>
</dependency> </dependencies>
</dependencies>
</project> </project>

View File

@ -1,6 +1,6 @@
<project xmlns="https://maven.apache.org/POM/4.0.0" <project xmlns="https://maven.apache.org/POM/4.0.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- <!--
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
@ -20,378 +20,288 @@
* limitations under the License. * limitations under the License.
*/ */
--> -->
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<artifactId>hbase-shaded</artifactId> <artifactId>hbase-shaded</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<version>2.5.0-SNAPSHOT</version> <version>2.5.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
<artifactId>hbase-shaded-mapreduce</artifactId> <artifactId>hbase-shaded-mapreduce</artifactId>
<name>Apache HBase - Shaded - MapReduce</name> <name>Apache HBase - Shaded - MapReduce</name>
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>
<configuration> <configuration>
<skip>true</skip> <skip>true</skip>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<!--Make it so assembly:single does nothing in here--> <!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>
<configuration> <configuration>
<skipAssembly>true</skipAssembly> <skipAssembly>true</skipAssembly>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId> <artifactId>maven-jar-plugin</artifactId>
<configuration> <configuration>
<archive> <archive>
<manifest> <manifest>
<!--Include the Driver class as the 'main'. <!--Include the Driver class as the 'main'.
Executing the jar will then show a list of the basic MR jobs. Executing the jar will then show a list of the basic MR jobs.
--> -->
<mainClass>org/apache/hadoop/hbase/mapreduce/Driver</mainClass> <mainClass>org/apache/hadoop/hbase/mapreduce/Driver</mainClass>
</manifest> </manifest>
</archive> </archive>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId> <artifactId>maven-shade-plugin</artifactId>
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
<dependencies> <dependencies>
<!-- <!--
We want to ensure needed hadoop bits are at provided scope for our shaded We want to ensure needed hadoop bits are at provided scope for our shaded
artifact, so we list them below in hadoop specific profiles. artifact, so we list them below in hadoop specific profiles.
--> -->
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hbase-mapreduce</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions> <scope>provided</scope>
<!-- Jaxb-api is a part of Java SE now --> </dependency>
<exclusion> </dependencies>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
</exclusion>
<!-- Jersey not used by our MR support -->
<exclusion>
<groupId>javax.ws.rs</groupId>
<artifactId>javax.ws.rs-api</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
</exclusion>
<!-- Jetty not used by our MR support -->
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util-ajax</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
</exclusion>
<!-- We excluded the server-side generated classes for JSP, so exclude
their runtime support libraries too
-->
<exclusion>
<groupId>org.glassfish.web</groupId>
<artifactId>javax.servlet.jsp</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>javax.servlet.jsp-api</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<profiles> <profiles>
<!-- These hadoop profiles should be derived from those in the hbase-mapreduce <!-- These hadoop profiles should be derived from those in the hbase-mapreduce
module. Essentially, you must list the same hadoop-* dependencies module. Essentially, you must list the same hadoop-* dependencies
since provided dependencies are not transitively included. since provided dependencies are not transitively included.
--> -->
<!-- profile against Hadoop 2.x: This is the default. --> <!-- profile against Hadoop 2.x: This is the default. -->
<profile> <profile>
<id>hadoop-2.0</id> <id>hadoop-2.0</id>
<activation> <activation>
<property> <property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh--> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2--><name>!hadoop.profile</name> <!--h2-->
</property> <name>!hadoop.profile</name>
</activation> </property>
<dependencies> </activation>
<dependency> <dependencies>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-common</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>provided</scope> <artifactId>hadoop-common</artifactId>
<exclusions> <scope>provided</scope>
<exclusion> <exclusions>
<groupId>net.java.dev.jets3t</groupId> <exclusion>
<artifactId>jets3t</artifactId> <groupId>net.java.dev.jets3t</groupId>
</exclusion> <artifactId>jets3t</artifactId>
<exclusion> </exclusion>
<groupId>javax.servlet.jsp</groupId> <exclusion>
<artifactId>jsp-api</artifactId> <groupId>javax.servlet.jsp</groupId>
</exclusion> <artifactId>jsp-api</artifactId>
<exclusion> </exclusion>
<groupId>org.mortbay.jetty</groupId> <exclusion>
<artifactId>jetty</artifactId> <groupId>org.mortbay.jetty</groupId>
</exclusion> <artifactId>jetty</artifactId>
<exclusion> </exclusion>
<groupId>com.sun.jersey</groupId> <exclusion>
<artifactId>jersey-server</artifactId> <groupId>com.sun.jersey</groupId>
</exclusion> <artifactId>jersey-server</artifactId>
<exclusion> </exclusion>
<groupId>com.sun.jersey</groupId> <exclusion>
<artifactId>jersey-core</artifactId> <groupId>com.sun.jersey</groupId>
</exclusion> <artifactId>jersey-core</artifactId>
<exclusion> </exclusion>
<groupId>com.sun.jersey</groupId> <exclusion>
<artifactId>jersey-json</artifactId> <groupId>com.sun.jersey</groupId>
</exclusion> <artifactId>jersey-json</artifactId>
<exclusion> </exclusion>
<groupId>javax.servlet</groupId> <exclusion>
<artifactId>servlet-api</artifactId> <groupId>javax.servlet</groupId>
</exclusion> <artifactId>servlet-api</artifactId>
<exclusion> </exclusion>
<groupId>tomcat</groupId> <exclusion>
<artifactId>jasper-compiler</artifactId> <groupId>tomcat</groupId>
</exclusion> <artifactId>jasper-compiler</artifactId>
<exclusion> </exclusion>
<groupId>tomcat</groupId> <exclusion>
<artifactId>jasper-runtime</artifactId> <groupId>tomcat</groupId>
</exclusion> <artifactId>jasper-runtime</artifactId>
<exclusion> </exclusion>
<groupId>com.google.code.findbugs</groupId> <exclusion>
<artifactId>jsr305</artifactId> <groupId>com.google.code.findbugs</groupId>
</exclusion> <artifactId>jsr305</artifactId>
</exclusions> </exclusion>
</dependency> </exclusions>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-hdfs</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>provided</scope> <artifactId>hadoop-hdfs</artifactId>
<exclusions> <scope>provided</scope>
<exclusion> <exclusions>
<groupId>javax.servlet.jsp</groupId> <exclusion>
<artifactId>jsp-api</artifactId> <groupId>javax.servlet.jsp</groupId>
</exclusion> <artifactId>jsp-api</artifactId>
<exclusion> </exclusion>
<groupId>javax.servlet</groupId> <exclusion>
<artifactId>servlet-api</artifactId> <groupId>javax.servlet</groupId>
</exclusion> <artifactId>servlet-api</artifactId>
<exclusion> </exclusion>
<groupId>io.netty</groupId> <exclusion>
<artifactId>netty</artifactId> <groupId>io.netty</groupId>
</exclusion> <artifactId>netty</artifactId>
<exclusion> </exclusion>
<groupId>stax</groupId> <exclusion>
<artifactId>stax-api</artifactId> <groupId>stax</groupId>
</exclusion> <artifactId>stax-api</artifactId>
<exclusion> </exclusion>
<groupId>xerces</groupId> <exclusion>
<artifactId>xercesImpl</artifactId> <groupId>xerces</groupId>
</exclusion> <artifactId>xercesImpl</artifactId>
</exclusions> </exclusion>
<version>${hadoop-two.version}</version> </exclusions>
</dependency> <version>${hadoop-two.version}</version>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-mapreduce-client-core</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>provided</scope> <artifactId>hadoop-mapreduce-client-core</artifactId>
<exclusions> <scope>provided</scope>
<exclusion> <exclusions>
<groupId>com.google.guava</groupId> <exclusion>
<artifactId>guava</artifactId> <groupId>com.google.guava</groupId>
</exclusion> <artifactId>guava</artifactId>
</exclusions> </exclusion>
</dependency> </exclusions>
<dependency> </dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
<version>1.9.13</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId> <artifactId>jackson-mapper-asl</artifactId>
<version>1.9.13</version> </exclusion>
<scope>provided</scope> <exclusion>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId> <artifactId>jackson-core-asl</artifactId>
<version>1.9.13</version> </exclusion>
<scope>provided</scope> </exclusions>
<exclusions> </dependency>
<exclusion> <dependency>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId> <artifactId>jackson-xc</artifactId>
</exclusion> <version>1.9.13</version>
<exclusion> <scope>provided</scope>
<groupId>org.codehaus.jackson</groupId> <exclusions>
<artifactId>jackson-core-asl</artifactId> <exclusion>
</exclusion> <groupId>org.codehaus.jackson</groupId>
</exclusions> <artifactId>jackson-mapper-asl</artifactId>
</dependency> </exclusion>
<dependency> <exclusion>
<groupId>org.apache.hadoop</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>hadoop-auth</artifactId> <artifactId>jackson-core-asl</artifactId>
<scope>provided</scope> </exclusion>
</dependency> </exclusions>
</dependencies> </dependency>
</profile> <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</profile>
<!-- <!--
profile for building against Hadoop 3.0.x. Activate using: profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0 mvn -Dhadoop.profile=3.0
--> -->
<profile> <profile>
<id>hadoop-3.0</id> <id>hadoop-3.0</id>
<activation> <activation>
<property> <property>
<name>hadoop.profile</name> <name>hadoop.profile</name>
<value>3.0</value> <value>3.0</value>
</property> </property>
</activation> </activation>
<properties> <properties>
<hadoop.version>${hadoop-three.version}</hadoop.version> <hadoop.version>${hadoop-three.version}</hadoop.version>
</properties> </properties>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId> <artifactId>hadoop-hdfs</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId> <artifactId>hadoop-auth</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId> <artifactId>hadoop-mapreduce-client-core</artifactId>
<scope>provided</scope> <scope>provided</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
</exclusion> </exclusion>
<exclusion> <exclusion>
<groupId>javax.xml.bind</groupId> <groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId> <artifactId>jaxb-api</artifactId>
</exclusion> </exclusion>
<exclusion> <exclusion>
<groupId>javax.ws.rs</groupId> <groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId> <artifactId>jsr311-api</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
<version>1.9.13</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId> <artifactId>jackson-mapper-asl</artifactId>
<version>1.9.13</version> </exclusion>
<scope>provided</scope> <exclusion>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId> <artifactId>jackson-core-asl</artifactId>
<version>1.9.13</version> </exclusion>
<scope>provided</scope> </exclusions>
<exclusions> </dependency>
<exclusion> <dependency>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId> <artifactId>jackson-xc</artifactId>
</exclusion> <version>1.9.13</version>
<exclusion> <scope>provided</scope>
<groupId>org.codehaus.jackson</groupId> <exclusions>
<artifactId>jackson-core-asl</artifactId> <exclusion>
</exclusion> <groupId>org.codehaus.jackson</groupId>
</exclusions> <artifactId>jackson-mapper-asl</artifactId>
</dependency> </exclusion>
</dependencies> <exclusion>
</profile> <groupId>org.codehaus.jackson</groupId>
</profiles> <artifactId>jackson-core-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</profile>
</profiles>
</project> </project>

View File

@ -1,25 +1,25 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" <project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- <!--
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file * regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
--> -->
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
@ -56,8 +56,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -1,183 +1,179 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" <project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- <!--
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file * regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
--> -->
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>hbase-shaded</artifactId>
<groupId>org.apache.hbase</groupId>
<version>2.5.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<parent> <artifactId>hbase-shaded-testing-util</artifactId>
<artifactId>hbase-shaded</artifactId> <name>Apache HBase - Shaded - Testing Util</name>
<groupId>org.apache.hbase</groupId>
<version>2.5.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>hbase-shaded-testing-util</artifactId> <dependencies>
<name>Apache HBase - Shaded - Testing Util</name>
<dependencies>
<!-- test-jar dependencies --> <!-- test-jar dependencies -->
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId> <artifactId>hadoop-hdfs</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-app</artifactId> <artifactId>hadoop-mapreduce-client-app</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId> <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId> <artifactId>hbase-common</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId> <artifactId>hbase-server</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>javax.xml.bind</groupId> <groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId> <artifactId>jaxb-api</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-asyncfs</artifactId> <artifactId>hbase-asyncfs</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-zookeeper</artifactId> <artifactId>hbase-zookeeper</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId> <artifactId>hbase-hadoop-compat</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop2-compat</artifactId> <artifactId>hbase-hadoop2-compat</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId> <artifactId>jackson-jaxrs</artifactId>
<version>1.9.13</version> <version>1.9.13</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-testing-util</artifactId> <artifactId>hbase-testing-util</artifactId>
<version>${project.version}</version> <scope>compile</scope>
<scope>compile</scope> <exclusions>
<exclusions> <exclusion>
<exclusion> <groupId>javax.xml.bind</groupId>
<groupId>javax.xml.bind</groupId> <artifactId>jaxb-api</artifactId>
<artifactId>jaxb-api</artifactId> </exclusion>
</exclusion> </exclusions>
</exclusions> </dependency>
</dependency> </dependencies>
</dependencies>
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId> <artifactId>maven-site-plugin</artifactId>
<configuration> <configuration>
<skip>true</skip> <skip>true</skip>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<!--Make it so assembly:single does nothing in here--> <!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>
<configuration> <configuration>
<skipAssembly>true</skipAssembly> <skipAssembly>true</skipAssembly>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId> <artifactId>maven-shade-plugin</artifactId>
<executions> <executions>
<execution> <execution>
<id>aggregate-into-a-jar-with-relocated-third-parties</id> <id>aggregate-into-a-jar-with-relocated-third-parties</id>
<configuration> <configuration>
<artifactSet> <artifactSet>
<excludes> <excludes>
<!-- exclude J2EE modules that come in for JDK11+ (since <!-- exclude J2EE modules that come in for JDK11+ (since
hadoop-3.2.0) or modules that come in for JDK8+ but hadoop-3.2.0) or modules that come in for JDK8+ but
need not be included --> need not be included -->
<exclude>javax.annotation:javax.annotation-api</exclude> <exclude>javax.annotation:javax.annotation-api</exclude>
<exclude>javax.activation:javax.activation-api</exclude> <exclude>javax.activation:javax.activation-api</exclude>
<exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude> <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude>
<exclude>jakarta.annotation:jakarta.annotation-api</exclude> <exclude>jakarta.annotation:jakarta.annotation-api</exclude>
<exclude>jakarta.validation:jakarta.validation-api</exclude> <exclude>jakarta.validation:jakarta.validation-api</exclude>
<exclude>org.glassfish.hk2.external:jakarta.inject</exclude> <exclude>org.glassfish.hk2.external:jakarta.inject</exclude>
<!-- <!--
Tell the shade plugin that in this case we want to include hadoop Tell the shade plugin that in this case we want to include hadoop
by leaving out the exclude. by leaving out the exclude.
--> -->
<!-- The rest of these should be kept in sync with the parent pom --> <!-- The rest of these should be kept in sync with the parent pom -->
<exclude>org.apache.hbase:hbase-resource-bundle</exclude> <exclude>org.apache.hbase:hbase-resource-bundle</exclude>
<exclude>org.slf4j:*</exclude> <exclude>org.slf4j:*</exclude>
<exclude>com.google.code.findbugs:*</exclude> <exclude>com.google.code.findbugs:*</exclude>
<exclude>com.github.stephenc.findbugs:*</exclude> <exclude>com.github.stephenc.findbugs:*</exclude>
<exclude>com.github.spotbugs:*</exclude> <exclude>com.github.spotbugs:*</exclude>
<exclude>org.apache.htrace:*</exclude> <exclude>org.apache.htrace:*</exclude>
<exclude>org.apache.yetus:*</exclude> <exclude>org.apache.yetus:*</exclude>
<exclude>log4j:*</exclude> <exclude>org.apache.logging.log4j:*</exclude>
<exclude>ch.qos.reload4j:*</exclude> <exclude>commons-logging:*</exclude>
<exclude>commons-logging:*</exclude> <exclude>org.javassist:*</exclude>
<exclude>org.javassist:*</exclude> <exclude>io.opentelemetry:*</exclude>
<exclude>io.opentelemetry:*</exclude> </excludes>
</excludes> </artifactSet>
</artifactSet> </configuration>
</configuration> </execution>
</execution> </executions>
</executions> </plugin>
</plugin> </plugins>
</plugins> </build>
</build>
</project> </project>

View File

@ -40,7 +40,6 @@
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client</artifactId> <artifactId>hbase-shaded-client</artifactId>
<version>${project.version}</version>
</dependency> </dependency>
<!-- parent pom defines these for children. :( :( :( --> <!-- parent pom defines these for children. :( :( :( -->
<dependency> <dependency>
@ -49,8 +48,18 @@
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<!-- Test dependencies --> <!-- Test dependencies -->
@ -97,8 +106,7 @@
<excludes> <excludes>
<!-- We leave logging stuff alone --> <!-- We leave logging stuff alone -->
<exclude>org.slf4j:*</exclude> <exclude>org.slf4j:*</exclude>
<exclude>log4j:*</exclude> <exclude>org.apache.logging.log4j:*</exclude>
<exclude>ch.qos.reload4j:*</exclude>
<exclude>commons-logging:*</exclude> <exclude>commons-logging:*</exclude>
<!-- annotations that never change --> <!-- annotations that never change -->
<exclude>com.google.code.findbugs:*</exclude> <exclude>com.google.code.findbugs:*</exclude>

File diff suppressed because it is too large Load Diff

View File

@ -147,13 +147,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -1,5 +1,7 @@
<?xml version="1.0"?> <?xml version="1.0"?>
<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> <project xmlns="https://maven.apache.org/POM/4.0.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<!-- <!--
/** /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
@ -19,288 +21,299 @@
* limitations under the License. * limitations under the License.
*/ */
--> -->
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<artifactId>hbase-build-configuration</artifactId> <artifactId>hbase-build-configuration</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<version>2.5.0-SNAPSHOT</version> <version>2.5.0-SNAPSHOT</version>
<relativePath>../hbase-build-configuration</relativePath> <relativePath>../hbase-build-configuration</relativePath>
</parent> </parent>
<artifactId>hbase-testing-util</artifactId> <artifactId>hbase-testing-util</artifactId>
<name>Apache HBase - Testing Util</name> <name>Apache HBase - Testing Util</name>
<description>HBase Testing Utilities.</description> <description>HBase Testing Utilities.</description>
<dependencies> <dependencies>
<!-- Intra-project dependencies --> <!-- Intra-project dependencies -->
<!-- we do not want to introduce this to downstream users so still set the scope to test --> <!-- we do not want to introduce this to downstream users so still set the scope to test -->
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-logging</artifactId> <artifactId>hbase-logging</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId> <artifactId>hbase-common</artifactId>
<type>jar</type> <type>jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId> <artifactId>hbase-common</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId> <artifactId>hbase-annotations</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>jdk.tools</groupId> <groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId> <artifactId>jdk.tools</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId> <artifactId>hbase-protocol</artifactId>
<type>jar</type> <type>jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId> <artifactId>hbase-client</artifactId>
<type>jar</type> <type>jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-zookeeper</artifactId> <artifactId>hbase-zookeeper</artifactId>
<type>jar</type> <type>jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-zookeeper</artifactId> <artifactId>hbase-zookeeper</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId> <artifactId>hbase-server</artifactId>
<type>jar</type> <type>jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId> <artifactId>hbase-server</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-asyncfs</artifactId> <artifactId>hbase-asyncfs</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId> <artifactId>hbase-hadoop-compat</artifactId>
<type>jar</type> <type>jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId> <artifactId>hbase-hadoop-compat</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId> <artifactId>${compat.module}</artifactId>
<type>jar</type> <type>jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId> <artifactId>${compat.module}</artifactId>
<type>test-jar</type> <type>test-jar</type>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId> <artifactId>jcl-over-slf4j</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId> <artifactId>jul-to-slf4j</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> <dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles> <profiles>
<!-- Profiles for building against different hadoop versions --> <!-- Profiles for building against different hadoop versions -->
<!-- There are a lot of common dependencies used here, should investigate <!-- There are a lot of common dependencies used here, should investigate
if we can combine these profiles somehow --> if we can combine these profiles somehow -->
<!-- profile for building against Hadoop 2.x. This is the default --> <!-- profile for building against Hadoop 2.x. This is the default -->
<profile> <profile>
<id>hadoop-2.0</id> <id>hadoop-2.0</id>
<activation> <activation>
<property> <property>
<!--Below formatting for dev-support/generate-hadoopX-poms.sh--> <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
<!--h2--><name>!hadoop.profile</name> <!--h2-->
</property> <name>!hadoop.profile</name>
</activation> </property>
<dependencies> </activation>
<dependency> <dependencies>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-common</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>compile</scope> <artifactId>hadoop-common</artifactId>
<exclusions> <scope>compile</scope>
<exclusion> <exclusions>
<groupId>javax.xml.bind</groupId> <exclusion>
<artifactId>jaxb-api</artifactId> <groupId>javax.xml.bind</groupId>
</exclusion> <artifactId>jaxb-api</artifactId>
</exclusions> </exclusion>
</dependency> </exclusions>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-auth</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>compile</scope> <artifactId>hadoop-auth</artifactId>
</dependency> <scope>compile</scope>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-client</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>compile</scope> <artifactId>hadoop-client</artifactId>
<exclusions> <scope>compile</scope>
<exclusion> <exclusions>
<groupId>com.google.guava</groupId> <exclusion>
<artifactId>guava</artifactId> <groupId>com.google.guava</groupId>
</exclusion> <artifactId>guava</artifactId>
<exclusion> </exclusion>
<groupId>javax.xml.bind</groupId> <exclusion>
<artifactId>jaxb-api</artifactId> <groupId>javax.xml.bind</groupId>
</exclusion> <artifactId>jaxb-api</artifactId>
</exclusions> </exclusion>
</dependency> </exclusions>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-mapreduce-client-core</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>compile</scope> <artifactId>hadoop-mapreduce-client-core</artifactId>
<exclusions> <scope>compile</scope>
<exclusion> <exclusions>
<groupId>com.google.guava</groupId> <exclusion>
<artifactId>guava</artifactId> <groupId>com.google.guava</groupId>
</exclusion> <artifactId>guava</artifactId>
<exclusion> </exclusion>
<groupId>javax.xml.bind</groupId> <exclusion>
<artifactId>jaxb-api</artifactId> <groupId>javax.xml.bind</groupId>
</exclusion> <artifactId>jaxb-api</artifactId>
</exclusions> </exclusion>
</dependency> </exclusions>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>compile</scope> <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<exclusions> <scope>compile</scope>
<exclusion> <exclusions>
<groupId>com.google.guava</groupId> <exclusion>
<artifactId>guava</artifactId> <groupId>com.google.guava</groupId>
</exclusion> <artifactId>guava</artifactId>
</exclusions> </exclusion>
</dependency> </exclusions>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-hdfs</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>compile</scope> <artifactId>hadoop-hdfs</artifactId>
</dependency> <scope>compile</scope>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-hdfs</artifactId> <groupId>org.apache.hadoop</groupId>
<type>test-jar</type> <artifactId>hadoop-hdfs</artifactId>
<scope>compile</scope> <type>test-jar</type>
</dependency> <scope>compile</scope>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-minicluster</artifactId> <groupId>org.apache.hadoop</groupId>
<scope>compile</scope> <artifactId>hadoop-minicluster</artifactId>
<exclusions> <scope>compile</scope>
<exclusion> <exclusions>
<groupId>com.google.guava</groupId> <exclusion>
<artifactId>guava</artifactId> <groupId>com.google.guava</groupId>
</exclusion> <artifactId>guava</artifactId>
<exclusion> </exclusion>
<groupId>org.apache.zookeeper</groupId> <exclusion>
<artifactId>zookeeper</artifactId> <groupId>org.apache.zookeeper</groupId>
</exclusion> <artifactId>zookeeper</artifactId>
</exclusions> </exclusion>
</dependency> </exclusions>
<dependency> </dependency>
<groupId>org.apache.hadoop</groupId> <dependency>
<artifactId>hadoop-minikdc</artifactId> <groupId>org.apache.hadoop</groupId>
</dependency> <artifactId>hadoop-minikdc</artifactId>
</dependencies> </dependency>
</profile> </dependencies>
</profile>
<!-- <!--
profile for building against Hadoop 3.0.x. Activate using: profile for building against Hadoop 3.0.x. Activate using:
mvn -Dhadoop.profile=3.0 mvn -Dhadoop.profile=3.0
--> -->
<profile> <profile>
<id>hadoop-3.0</id> <id>hadoop-3.0</id>
<activation> <activation>
<property> <property>
<name>hadoop.profile</name> <name>hadoop.profile</name>
<value>3.0</value> <value>3.0</value>
</property> </property>
</activation> </activation>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>javax.xml.bind</groupId> <groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId> <artifactId>jaxb-api</artifactId>
</exclusion> </exclusion>
<exclusion> <exclusion>
<groupId>javax.ws.rs</groupId> <groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId> <artifactId>jsr311-api</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<scope>compile</scope> <scope>compile</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
</exclusion> </exclusion>
<exclusion> <exclusion>
<groupId>javax.ws.rs</groupId> <groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId> <artifactId>jsr311-api</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId> <artifactId>hadoop-minikdc</artifactId>
</dependency> </dependency>
</dependencies> </dependencies>
</profile> </profile>
</profiles> </profiles>
</project> </project>

View File

@ -255,13 +255,23 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@ -174,13 +174,18 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>slf4j-reload4j</artifactId> <artifactId>log4j-api</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

122
pom.xml
View File

@ -688,7 +688,7 @@
</goals> </goals>
<configuration> <configuration>
<excludes> <excludes>
<exclude>log4j.properties</exclude> <exclude>log4j2.xml</exclude>
</excludes> </excludes>
</configuration> </configuration>
</execution> </execution>
@ -1052,7 +1052,29 @@
</configuration> </configuration>
</execution> </execution>
<execution> <execution>
<id>banned-log4j</id> <id>banned-other-logging-framework</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<bannedDependencies>
<excludes>
<exclude>log4j:*</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude>
<exclude>ch.qos.reload4j:*</exclude>
<exclude>org.slf4j:slf4j-reload4j</exclude>
<exclude>ch.qos.logback:*</exclude>
</excludes>
<message>
We do not allow other logging frameworks as now we use log4j2
</message>
</bannedDependencies>
</rules>
</configuration>
</execution>
<execution>
<id>banned-slf4j-log4j12</id>
<goals> <goals>
<goal>enforce</goal> <goal>enforce</goal>
</goals> </goals>
@ -1060,11 +1082,10 @@
<rules> <rules>
<bannedDependencies> <bannedDependencies>
<excludes> <excludes>
<exclude>log4j:**</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude> <exclude>org.slf4j:slf4j-log4j12</exclude>
</excludes> </excludes>
<message> <message>
Use reload4j instead We do not allow slf4j-log4j12 dependency as now we use log4j-slf4j-impl
</message> </message>
</bannedDependencies> </bannedDependencies>
</rules> </rules>
@ -1119,16 +1140,18 @@
<reason>Use SLF4j for logging</reason> <reason>Use SLF4j for logging</reason>
<bannedImports> <bannedImports>
<bannedImport>org.apache.commons.logging.**</bannedImport> <bannedImport>org.apache.commons.logging.**</bannedImport>
<bannedImport>org.apache.log4j.**</bannedImport>
<bannedImport>org.apache.logging.log4j.**</bannedImport>
</bannedImports> </bannedImports>
</restrictImports> </restrictImports>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports"> <restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>false</includeTestCode> <includeTestCode>false</includeTestCode>
<commentLineBufferSize>512</commentLineBufferSize> <commentLineBufferSize>512</commentLineBufferSize>
<reason> <reason>
Do not use log4j directly in code, see Log4jUtils in hbase-logging for more details. Do not use log4j2 directly in code, see Log4jUtils in hbase-logging for more details.
</reason> </reason>
<bannedImports> <bannedImports>
<bannedImport>org.apache.log4j.**</bannedImport> <bannedImport>org.apache.logging.log4j.**</bannedImport>
</bannedImports> </bannedImports>
</restrictImports> </restrictImports>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports"> <restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
@ -1533,7 +1556,7 @@
<hamcrest.version>1.3</hamcrest.version> <hamcrest.version>1.3</hamcrest.version>
<opentelemetry.version>1.0.1</opentelemetry.version> <opentelemetry.version>1.0.1</opentelemetry.version>
<opentelemetry-javaagent.version>1.0.1</opentelemetry-javaagent.version> <opentelemetry-javaagent.version>1.0.1</opentelemetry-javaagent.version>
<reload4j.version>1.2.19</reload4j.version> <log4j2.version>2.17.2</log4j2.version>
<mockito-core.version>2.28.2</mockito-core.version> <mockito-core.version>2.28.2</mockito-core.version>
<!--Internally we use a different version of protobuf. See hbase-protocol-shaded--> <!--Internally we use a different version of protobuf. See hbase-protocol-shaded-->
<external.protobuf.groupid>com.google.protobuf</external.protobuf.groupid> <external.protobuf.groupid>com.google.protobuf</external.protobuf.groupid>
@ -1993,8 +2016,8 @@
</dependency> </dependency>
<!-- <!--
Logging dependencies. In general, we use slf4j as the log facade in HBase, so all sub Logging dependencies. In general, we use slf4j as the log facade in HBase, so all sub
modules should depend on slf4j-api at compile scope, and then depend on slf4j-log4j12 modules should depend on slf4j-api at compile scope, and then depend on log4j-slf4j-impl
and log4j at test scope(and in hbase-assembly when shipping the binary) to redirect the and log4j2 at test scope(and in hbase-assembly when shipping the binary) to redirect the
log message to log4j. Do not introduce logging dependencies other than slf4j-api at compile log message to log4j. Do not introduce logging dependencies other than slf4j-api at compile
scope as it will mess up the logging framework for downstream users. scope as it will mess up the logging framework for downstream users.
Here we also depend on jcl-over-slf4j and jul-to-slf4j, as some of the libraries we depend Here we also depend on jcl-over-slf4j and jul-to-slf4j, as some of the libraries we depend
@ -2006,16 +2029,12 @@
<artifactId>jettison</artifactId> <artifactId>jettison</artifactId>
<version>${jettison.version}</version> <version>${jettison.version}</version>
</dependency> </dependency>
<!-- Logging -->
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version> <version>${slf4j.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-reload4j</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId> <artifactId>jcl-over-slf4j</artifactId>
@ -2027,9 +2046,24 @@
<version>${slf4j.version}</version> <version>${slf4j.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ch.qos.reload4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>reload4j</artifactId> <artifactId>log4j-api</artifactId>
<version>${reload4j.version}</version> <version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<version>${log4j2.version}</version>
</dependency> </dependency>
<!-- Avro dependencies we mostly get transitively, manual version coallescing --> <!-- Avro dependencies we mostly get transitively, manual version coallescing -->
<dependency> <dependency>
@ -2037,8 +2071,6 @@
<artifactId>avro</artifactId> <artifactId>avro</artifactId>
<version>${avro.version}</version> <version>${avro.version}</version>
</dependency> </dependency>
<!--This is not used by hbase directly. Used by thrift,
dropwizard and zk.-->
<dependency> <dependency>
<groupId>com.github.ben-manes.caffeine</groupId> <groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId> <artifactId>caffeine</artifactId>
@ -3388,6 +3420,46 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-app</artifactId>
<version>${hadoop-three.version}</version>
<type>test-jar</type>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId> <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
@ -3414,10 +3486,6 @@
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId> <artifactId>slf4j-log4j12</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
@ -3447,10 +3515,6 @@
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId> <artifactId>slf4j-log4j12</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
@ -3863,10 +3927,6 @@
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId> <artifactId>slf4j-log4j12</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>