true
, meaning
+ * that by default a FileAppender
will append to an existing file and not truncate
+ * it.
+ *
+ * This option is meaningful only if the FileAppender opens the file.
+ */
+ protected boolean fileAppend = true;
+
+ /**
+ * The name of the log file.
+ */
+ protected String fileName = null;
+
+ /**
+ * Do we do bufferedIO?
+ */
+ protected boolean bufferedIO = false;
+
+ /**
+ * Determines the size of IO buffer be. Default is 8K.
+ */
+ protected int bufferSize = 8 * 1024;
+
+ /**
+ * The default constructor does not do anything.
+ */
+ public FileAppender() {
+ }
+
+ /**
+ * Instantiate a FileAppender
and open the file designated by fileName
.
+ * The opened filename will become the output destination for this appender.
+ *
+ * If the append
parameter is true, the file will be appended to. Otherwise, the file
+ * designated by fileName
will be truncated before being opened.
+ *
+ * If the bufferedIO
parameter is true
, then buffered IO will be used to
+ * write to the output file.
+ */
+ public FileAppender(Layout layout, String fileName, boolean append, boolean bufferedIO,
+ int bufferSize) throws IOException {
+ this.layout = layout;
+ this.setFile(fileName, append, bufferedIO, bufferSize);
+ }
+
+ /**
+ * Instantiate a FileAppender and open the file designated by fileName
. The opened
+ * filename will become the output destination for this appender.
+ *
+ * If the append
parameter is true, the file will be appended to. Otherwise, the file
+ * designated by fileName
will be truncated before being opened.
+ */
+ public FileAppender(Layout layout, String fileName, boolean append) throws IOException {
+ this.layout = layout;
+ this.setFile(fileName, append, false, bufferSize);
+ }
+
+ /**
+ * Instantiate a FileAppender and open the file designated by filename
. The opened
+ * filename will become the output destination for this appender.
+ *
+ * The file will be appended to. + */ + public FileAppender(Layout layout, String fileName) throws IOException { + this(layout, fileName, true); + } + + /** + * The File property takes a string value which should be the name of the file to append + * to. + *
+ * Note that the special values "System.out" or "System.err" are no + * longer honored. + *
+ * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
+ * options are set.
+ */
+ public void setFile(String file) {
+ // Trim spaces from both ends. The users probably does not want
+ // trailing spaces in file names.
+ String val = file.trim();
+ fileName = val;
+ }
+
+ /**
+ * Returns the value of the Append option.
+ */
+ public boolean getAppend() {
+ return fileAppend;
+ }
+
+ /** Returns the value of the File option. */
+ public String getFile() {
+ return fileName;
+ }
+
+ /**
+ * If the value of File is not null
, then {@link #setFile} is called with the
+ * values of File and Append properties.
+ * @since 0.8.1
+ */
+ @Override
+ public void activateOptions() {
+ if (fileName != null) {
+ try {
+ setFile(fileName, fileAppend, bufferedIO, bufferSize);
+ } catch (java.io.IOException e) {
+ errorHandler.error("setFile(" + fileName + "," + fileAppend + ") call failed.", e,
+ org.apache.log4j.spi.ErrorCode.FILE_OPEN_FAILURE);
+ }
+ }
+ }
+
+ /**
+ * Closes the previously opened file.
+ */
+ protected void closeFile() {
+ if (this.qw != null) {
+ try {
+ this.qw.close();
+ } catch (java.io.IOException e) {
+ if (e instanceof InterruptedIOException) {
+ Thread.currentThread().interrupt();
+ }
+ // Exceptionally, it does not make sense to delegate to an
+ // ErrorHandler. Since a closed appender is basically dead.
+ }
+ }
+ }
+
+ /**
+ * Get the value of the BufferedIO option.
+ *
+ * BufferedIO will significatnly increase performance on heavily loaded systems.
+ */
+ public boolean getBufferedIO() {
+ return this.bufferedIO;
+ }
+
+ /**
+ * Get the size of the IO buffer.
+ */
+ public int getBufferSize() {
+ return this.bufferSize;
+ }
+
+ /**
+ * The Append option takes a boolean value. It is set to true
by default. If
+ * true, then File
will be opened in append mode by {@link #setFile setFile} (see
+ * above). Otherwise, {@link #setFile setFile} will open File
in truncate mode.
+ *
+ * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the
+ * options are set.
+ */
+ public void setAppend(boolean flag) {
+ fileAppend = flag;
+ }
+
+ /**
+ * The BufferedIO option takes a boolean value. It is set to false
by default.
+ * If true, then File
will be opened and the resulting {@link java.io.Writer} wrapped
+ * around a {@link BufferedWriter}. BufferedIO will significatnly increase performance on heavily
+ * loaded systems.
+ */
+ public void setBufferedIO(boolean bufferedIO) {
+ this.bufferedIO = bufferedIO;
+ if (bufferedIO) {
+ immediateFlush = false;
+ }
+ }
+
+ /**
+ * Set the size of the IO buffer.
+ */
+ public void setBufferSize(int bufferSize) {
+ this.bufferSize = bufferSize;
+ }
+
+ /**
+ *
+ * Sets and opens the file where the log output will go. The specified file must be + * writable. + *
+ * If there was already an opened file, then the previous file is closed first. + *
+ * Do not use this method directly. To configure a FileAppender or one of its subclasses, set
+ * its properties one by one and then call activateOptions.
+ * @param fileName The path to the log file.
+ * @param append If true will append to fileName. Otherwise will truncate fileName.
+ */
+ public synchronized void setFile(String fileName, boolean append, boolean bufferedIO,
+ int bufferSize) throws IOException {
+
+ // It does not make sense to have immediate flush and bufferedIO.
+ if (bufferedIO) {
+ setImmediateFlush(false);
+ }
+
+ reset();
+ FileOutputStream ostream = null;
+ try {
+ //
+ // attempt to create file
+ //
+ ostream = new FileOutputStream(fileName, append);
+ } catch (FileNotFoundException ex) {
+ //
+ // if parent directory does not exist then
+ // attempt to create it and try to create file
+ // see bug 9150
+ //
+ String parentName = new File(fileName).getParent();
+ if (parentName != null) {
+ File parentDir = new File(parentName);
+ if (!parentDir.exists() && parentDir.mkdirs()) {
+ ostream = new FileOutputStream(fileName, append);
+ } else {
+ throw ex;
+ }
+ } else {
+ throw ex;
+ }
+ }
+ Writer fw = createWriter(ostream);
+ if (bufferedIO) {
+ fw = new BufferedWriter(fw, bufferSize);
+ }
+ this.setQWForFiles(fw);
+ this.fileName = fileName;
+ this.fileAppend = append;
+ this.bufferedIO = bufferedIO;
+ this.bufferSize = bufferSize;
+ writeHeader();
+ }
+
+ /**
+ * Sets the quiet writer being used. This method is overriden by {@code RollingFileAppender}.
+ */
+ protected void setQWForFiles(Writer writer) {
+ this.qw = new org.apache.log4j.helpers.QuietWriter(writer, errorHandler);
+ }
+
+ /**
+ * Close any previously opened file and call the parent's reset
.
+ */
+ @Override
+ protected void reset() {
+ closeFile();
+ this.fileName = null;
+ super.reset();
+ }
+}
diff --git a/hbase-logging/src/test/resources/log4j.properties b/hbase-logging/src/test/resources/log4j.properties
deleted file mode 100644
index c322699ced2..00000000000
--- a/hbase-logging/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-# Custom Logging levels
-
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-
-log4j.logger.org.apache.hadoop=WARN
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase=DEBUG
-
-#These settings are workarounds against spurious logs from the minicluster.
-#See HBASE-4709
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE
diff --git a/hbase-logging/src/test/resources/log4j2.xml b/hbase-logging/src/test/resources/log4j2.xml
new file mode 100644
index 00000000000..643fae61c7c
--- /dev/null
+++ b/hbase-logging/src/test/resources/log4j2.xml
@@ -0,0 +1,45 @@
+
+
+
* Configuration conf = TEST_UTIL.getConfiguration(); * for (Iterator<Map.Entry<String, String>> i = conf.iterator(); i.hasNext();) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java index 57cfbeca6e2..028b8fd8c30 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java @@ -29,11 +29,10 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -61,10 +60,13 @@ public class TestAsyncTableBatchRetryImmediately { private static AsyncConnection CONN; + private static String LOG_LEVEL; + @BeforeClass public static void setUp() throws Exception { // disable the debug log to avoid flooding the output - LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO); + LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName()); + Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO"); UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, 1024); UTIL.startMiniCluster(1); Table table = UTIL.createTable(TABLE_NAME, FAMILY); @@ -79,6 +81,9 @@ public class TestAsyncTableBatchRetryImmediately { @AfterClass public static void tearDown() throws Exception { + if (LOG_LEVEL != null) { + Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL); + } CONN.close(); UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java index eaf1f9deb4c..4818b6d9b96 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java @@ -33,14 +33,13 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -65,6 +64,7 @@ public class TestMultiRespectsLimits { CompatibilityFactory.getInstance(MetricsAssertHelper.class); private final static byte[] FAMILY = Bytes.toBytes("D"); public static final int MAX_SIZE = 100; + private static String LOG_LEVEL; @Rule public TestName name = new TestName(); @@ -72,7 +72,8 @@ public class TestMultiRespectsLimits { @BeforeClass public static void setUpBeforeClass() throws Exception { // disable the debug log to avoid flooding the output - LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO); + LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName()); + Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO"); TEST_UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, MAX_SIZE); @@ -82,6 +83,9 @@ public class TestMultiRespectsLimits { @AfterClass public static void tearDownAfterClass() throws Exception { + if (LOG_LEVEL != null) { + Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL); + } TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java index 941d921481d..a45804a4515 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java @@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.testclassification.RPCTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.After; @@ -82,10 +83,8 @@ public class TestProtoBufRpc { this.conf = HBaseConfiguration.create(); this.conf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY, rpcServerImpl); - org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer") - .setLevel(org.apache.log4j.Level.ERROR); - org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace") - .setLevel(org.apache.log4j.Level.TRACE); + Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer", "ERROR"); + Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer.trace", "TRACE"); // Create server side implementation // Get RPC server for server side implementation this.server = RpcServerFactory.createRpcServer(null, "testrpc", diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java index 2d66106a3d2..122517574f7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java @@ -35,17 +35,19 @@ import org.mockito.Mockito; public class TestRpcServerTraceLogging { @ClassRule - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule - .forClass(TestRpcServerTraceLogging.class); + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestRpcServerTraceLogging.class); - static org.apache.log4j.Logger rpcServerLog = org.apache.log4j.Logger.getLogger(RpcServer.class); + private static final org.apache.logging.log4j.core.Logger rpcServerLog = + (org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(RpcServer.class); static final String TRACE_LOG_MSG = - "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" - + " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " - + "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " - + "number_of_rows: 2147483647 close_scanner: false client_handles_partials: " - + "true client_handles_heartbeats: true track_scan_metrics: false"; + "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" + + " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " + + "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " + + "number_of_rows: 2147483647 close_scanner: false client_handles_partials: " + + "true client_handles_heartbeats: true track_scan_metrics: false"; static final int TRACE_LOG_LENGTH = TRACE_LOG_MSG.length(); @@ -62,7 +64,7 @@ public class TestRpcServerTraceLogging { @Test public void testLoggingWithTraceOff() { conf.setInt("hbase.ipc.trace.log.max.length", 250); - rpcServerLog.setLevel(org.apache.log4j.Level.DEBUG); + rpcServerLog.setLevel(org.apache.logging.log4j.Level.DEBUG); String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG); assertEquals(150 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length()); @@ -72,7 +74,7 @@ public class TestRpcServerTraceLogging { @Test public void testLoggingWithTraceOn() { conf.setInt("hbase.ipc.trace.log.max.length", 250); - rpcServerLog.setLevel(org.apache.log4j.Level.TRACE); + rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE); String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG); assertEquals(250 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length()); @@ -82,7 +84,7 @@ public class TestRpcServerTraceLogging { @Test public void testLoggingWithTraceOnLargeMax() { conf.setInt("hbase.ipc.trace.log.max.length", 2000); - rpcServerLog.setLevel(org.apache.log4j.Level.TRACE); + rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE); String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG); assertEquals(TRACE_LOG_LENGTH, truncatedString.length()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java index 26de198a8d4..75ad7c186b7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java @@ -20,14 +20,16 @@ package org.apache.hadoop.hbase.regionserver; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.concurrent.BlockingDeque; +import java.util.concurrent.LinkedBlockingDeque; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -36,10 +38,6 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.Appender; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -47,8 +45,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; @@ -80,7 +79,7 @@ public class TestMultiLogThreshold { private HRegionServer rs; private RSRpcServices services; - private Appender appender; + private org.apache.logging.log4j.core.Appender appender; @Parameterized.Parameter public static boolean rejectLargeBatchOp; @@ -90,6 +89,21 @@ public class TestMultiLogThreshold { return Arrays.asList(new Object[] { false }, new Object[] { true }); } + private final class LevelAndMessage { + final org.apache.logging.log4j.Level level; + + final String msg; + + public LevelAndMessage(org.apache.logging.log4j.Level level, String msg) { + this.level = level; + this.msg = msg; + } + + } + + // log4j2 will reuse the LogEvent so we need to copy the level and message out. + private BlockingDequelogs = new LinkedBlockingDeque<>(); + @Before public void setupTest() throws Exception { util = new HBaseTestingUtility(); @@ -100,13 +114,28 @@ public class TestMultiLogThreshold { util.startMiniCluster(); util.createTable(NAME, TEST_FAM); rs = util.getRSForFirstRegionInTable(NAME); - appender = mock(Appender.class); - LogManager.getLogger(RSRpcServices.class).addAppender(appender); + appender = mock(org.apache.logging.log4j.core.Appender.class); + when(appender.getName()).thenReturn("mockAppender"); + when(appender.isStarted()).thenReturn(true); + doAnswer(new Answer () { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + org.apache.logging.log4j.core.LogEvent logEvent = + invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class); + logs.add( + new LevelAndMessage(logEvent.getLevel(), logEvent.getMessage().getFormattedMessage())); + return null; + } + }).when(appender).append(any(org.apache.logging.log4j.core.LogEvent.class)); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(RSRpcServices.class)).addAppender(appender); } @After public void tearDown() throws Exception { - LogManager.getLogger(RSRpcServices.class).removeAppender(appender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(RSRpcServices.class)).removeAppender(appender); util.shutdownMiniCluster(); } @@ -149,17 +178,16 @@ public class TestMultiLogThreshold { } private void assertLogBatchWarnings(boolean expected) { - ArgumentCaptor captor = ArgumentCaptor.forClass(LoggingEvent.class); - verify(appender, atLeastOnce()).doAppend(captor.capture()); + assertFalse(logs.isEmpty()); boolean actual = false; - for (LoggingEvent event : captor.getAllValues()) { - if (event.getLevel() == Level.WARN && - event.getRenderedMessage().contains("Large batch operation detected")) { + for (LevelAndMessage event : logs) { + if (event.level == org.apache.logging.log4j.Level.WARN && + event.msg.contains("Large batch operation detected")) { actual = true; break; } } - reset(appender); + logs.clear(); assertEquals(expected, actual); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java index da395512f7d..b3c2eb62296 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java @@ -42,11 +42,6 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.apache.hadoop.hbase.util.ManualEnvironmentEdge; import org.apache.hadoop.hbase.util.Threads; -import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.log4j.Appender; -import org.apache.log4j.Layout; -import org.apache.log4j.PatternLayout; -import org.apache.log4j.WriterAppender; import org.apache.zookeeper.KeeperException; import org.junit.After; import org.junit.Before; @@ -56,6 +51,8 @@ import org.junit.experimental.categories.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; + @Category(LargeTests.class) public class TestRegionServerReportForDuty { @@ -91,26 +88,15 @@ public class TestRegionServerReportForDuty { testUtil.shutdownMiniDFSCluster(); } - /** - * LogCapturer is similar to {@link org.apache.hadoop.test.GenericTestUtils.LogCapturer} - * except that this implementation has a default appender to the root logger. - * Hadoop 2.8+ supports the default appender in the LogCapture it ships and this can be replaced. - * TODO: This class can be removed after we upgrade Hadoop dependency. - */ - static class LogCapturer { + private static class LogCapturer { private StringWriter sw = new StringWriter(); - private WriterAppender appender; - private org.apache.log4j.Logger logger; + private org.apache.logging.log4j.core.appender.WriterAppender appender; + private org.apache.logging.log4j.core.Logger logger; - LogCapturer(org.apache.log4j.Logger logger) { + LogCapturer(org.apache.logging.log4j.core.Logger logger) { this.logger = logger; - Appender defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("stdout"); - if (defaultAppender == null) { - defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("console"); - } - final Layout layout = (defaultAppender == null) ? new PatternLayout() : - defaultAppender.getLayout(); - this.appender = new WriterAppender(layout, sw); + this.appender = org.apache.logging.log4j.core.appender.WriterAppender.newBuilder() + .setName("test").setTarget(sw).build(); this.logger.addAppender(this.appender); } @@ -146,7 +132,9 @@ public class TestRegionServerReportForDuty { master = cluster.addMaster(); master.start(); - LogCapturer capturer = new LogCapturer(org.apache.log4j.Logger.getLogger(HRegionServer.class)); + LogCapturer capturer = + new LogCapturer((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(HRegionServer.class)); // Set sleep interval relatively low so that exponential backoff is more demanding. int msginterval = 100; cluster.getConfiguration().setInt("hbase.regionserver.msginterval", msginterval); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java index 314b96695b6..31b95ee14d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java @@ -26,11 +26,11 @@ import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collection; import java.util.List; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.RegionInfoBuilder; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; @@ -132,12 +132,9 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator { this.ratio = inRatio; // Hide lots of logging so the system out is usable as a tab delimited file. - org.apache.log4j.Logger.getLogger(CompactionConfiguration.class). - setLevel(org.apache.log4j.Level.ERROR); - org.apache.log4j.Logger.getLogger(RatioBasedCompactionPolicy.class). - setLevel(org.apache.log4j.Level.ERROR); - - org.apache.log4j.Logger.getLogger(cpClass).setLevel(org.apache.log4j.Level.ERROR); + Log4jUtils.setLogLevel(CompactionConfiguration.class.getName(), "ERROR"); + Log4jUtils.setLogLevel(RatioBasedCompactionPolicy.class.getName(), "ERROR"); + Log4jUtils.setLogLevel(cpClass.getName(), "ERROR"); Configuration configuration = HBaseConfiguration.create(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java index d87fc3e2a85..6d5e81ab3fb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java @@ -27,15 +27,17 @@ import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isA; import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -50,9 +52,6 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.ToolRunner; -import org.apache.log4j.Appender; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -60,19 +59,14 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import org.junit.runner.RunWith; import org.mockito.ArgumentMatcher; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; -@RunWith(MockitoJUnitRunner.class) -@Category({LargeTests.class}) +@Category({ LargeTests.class }) public class TestCanaryTool { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCanaryTool.class); + HBaseClassTestRule.forClass(TestCanaryTool.class); private HBaseTestingUtility testingUtility; private static final byte[] FAMILY = Bytes.toBytes("f"); @@ -81,22 +75,26 @@ public class TestCanaryTool { @Rule public TestName name = new TestName(); + private org.apache.logging.log4j.core.Appender mockAppender; + @Before public void setUp() throws Exception { testingUtility = new HBaseTestingUtility(); testingUtility.startMiniCluster(); - LogManager.getRootLogger().addAppender(mockAppender); + mockAppender = mock(org.apache.logging.log4j.core.Appender.class); + when(mockAppender.getName()).thenReturn("mockAppender"); + when(mockAppender.isStarted()).thenReturn(true); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender); } @After public void tearDown() throws Exception { testingUtility.shutdownMiniCluster(); - LogManager.getRootLogger().removeAppender(mockAppender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger("org.apache.hadoop.hbase")).removeAppender(mockAppender); } - @Mock - Appender mockAppender; - @Test public void testBasicZookeeperCanaryWorks() throws Exception { final String[] args = { "-t", "10000", "-zookeeper" }; @@ -105,8 +103,8 @@ public class TestCanaryTool { @Test public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception { - final String[] args = { "-t", "10000", "-zookeeper", "-treatFailureAsError", - "-permittedZookeeperFailures", "1" }; + final String[] args = + { "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" }; testZookeeperCanaryWithArgs(args); } @@ -115,7 +113,7 @@ public class TestCanaryTool { final TableName tableName = TableName.valueOf(name.getMethodName()); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows - for (int i=0; i<1000; i++) { + for (int i = 0; i < 1000; i++) { byte[] iBytes = Bytes.toBytes(i); Put p = new Put(iBytes); p.addColumn(FAMILY, COLUMN, iBytes); @@ -156,7 +154,7 @@ public class TestCanaryTool { // the test table has two column family. If readAllCF set true, // we expect read count is double of region count int expectedReadCount = - readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions(); + readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions(); assertEquals("canary region success count should equal total expected read count", expectedReadCount, sink.getReadSuccessCount()); Map > regionMap = sink.getRegionMap(); @@ -184,7 +182,7 @@ public class TestCanaryTool { TableName tableName = TableName.valueOf("testCanaryRegionTaskResult"); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows - for (int i=0; i<1000; i++) { + for (int i = 0; i < 1000; i++) { byte[] iBytes = Bytes.toBytes(i); Put p = new Put(iBytes); p.addColumn(FAMILY, COLUMN, iBytes); @@ -197,23 +195,23 @@ public class TestCanaryTool { assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); assertTrue("canary should expect to scan at least 1 region", - sink.getTotalExpectedRegions() > 0); + sink.getTotalExpectedRegions() > 0); assertTrue("there should be no read failures", sink.getReadFailureCount() == 0); assertTrue("there should be no write failures", sink.getWriteFailureCount() == 0); assertTrue("verify read success count > 0", sink.getReadSuccessCount() > 0); assertTrue("verify write success count > 0", sink.getWriteSuccessCount() > 0); verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), - isA(ColumnFamilyDescriptor.class), anyLong()); + isA(ColumnFamilyDescriptor.class), anyLong()); verify(sink, atLeastOnce()).publishWriteTiming(isA(ServerName.class), isA(RegionInfo.class), - isA(ColumnFamilyDescriptor.class), anyLong()); + isA(ColumnFamilyDescriptor.class), anyLong()); assertEquals("canary region success count should equal total expected regions", - sink.getReadSuccessCount() + sink.getWriteSuccessCount(), sink.getTotalExpectedRegions()); + sink.getReadSuccessCount() + sink.getWriteSuccessCount(), sink.getTotalExpectedRegions()); Map > regionMap = sink.getRegionMap(); assertFalse("verify region map has size > 0", regionMap.isEmpty()); for (String regionName : regionMap.keySet()) { - for (CanaryTool.RegionTaskResult res: regionMap.get(regionName)) { + for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) { assertNotNull("verify getRegionNameAsString()", regionName); assertNotNull("verify getRegionInfo()", res.getRegionInfo()); assertNotNull("verify getTableName()", res.getTableName()); @@ -236,24 +234,25 @@ public class TestCanaryTool { // Ignore this test. It fails w/ the below on some mac os x. // [ERROR] Failures: - // [ERROR] TestCanaryTool.testReadTableTimeouts:216 + // [ERROR] TestCanaryTool.testReadTableTimeouts:216 // Argument(s) are different! Wanted: // mockAppender.doAppend( // - // ); - // -> at org.apache.hadoop.hbase.tool.TestCanaryTool - // .testReadTableTimeouts(TestCanaryTool.java:216) - // Actual invocations have different arguments: - // mockAppender.doAppend( - // org.apache.log4j.spi.LoggingEvent@2055cfc1 - // ); - // ) - // ) + // ); + // -> at org.apache.hadoop.hbase.tool.TestCanaryTool + // .testReadTableTimeouts(TestCanaryTool.java:216) + // Actual invocations have different arguments: + // mockAppender.doAppend( + // org.apache.log4j.spi.LoggingEvent@2055cfc1 + // ); + // ) + // ) // - @org.junit.Ignore @Test + @org.junit.Ignore + @Test public void testReadTableTimeouts() throws Exception { - final TableName [] tableNames = new TableName[] {TableName.valueOf(name.getMethodName() + "1"), - TableName.valueOf(name.getMethodName() + "2")}; + final TableName[] tableNames = new TableName[] { TableName.valueOf(name.getMethodName() + "1"), + TableName.valueOf(name.getMethodName() + "2") }; // Create 2 test tables. for (int j = 0; j < 2; j++) { Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY }); @@ -270,8 +269,8 @@ public class TestCanaryTool { CanaryTool canary = new CanaryTool(executor, sink); String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," + tableNames[1].getNameAsString() + "=0"; - String[] args = {"-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1", - name.getMethodName() + "2"}; + String[] args = { "-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1", + name.getMethodName() + "2" }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class)); for (int i = 0; i < 2; i++) { @@ -281,18 +280,21 @@ public class TestCanaryTool { sink.getReadLatencyMap().get(tableNames[i].getNameAsString())); } // One table's timeout is set for 0 ms and thus, should lead to an error. - verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher () { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("exceeded the configured read timeout."); - } - })); - verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher () { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("Configured read timeout"); - } - })); + verify(mockAppender, times(1)) + .append(argThat(new ArgumentMatcher () { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage() + .contains("exceeded the configured read timeout."); + } + })); + verify(mockAppender, times(2)) + .append(argThat(new ArgumentMatcher () { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage().contains("Configured read timeout"); + } + })); } @Test @@ -300,43 +302,47 @@ public class TestCanaryTool { ExecutorService executor = new ScheduledThreadPoolExecutor(1); CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink()); CanaryTool canary = new CanaryTool(executor, sink); - String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE)}; + String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE) }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); assertNotEquals("verify non-null write latency", null, sink.getWriteLatency()); assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency()); - verify(mockAppender, times(1)).doAppend(argThat( - new ArgumentMatcher () { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("Configured write timeout"); - } - })); + verify(mockAppender, times(1)) + .append(argThat(new ArgumentMatcher () { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage().contains("Configured write timeout"); + } + })); } - //no table created, so there should be no regions + // no table created, so there should be no regions @Test public void testRegionserverNoRegions() throws Exception { runRegionserverCanary(); - verify(mockAppender).doAppend(argThat(new ArgumentMatcher () { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("Regionserver not serving any regions"); - } - })); + verify(mockAppender) + .append(argThat(new ArgumentMatcher () { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage() + .contains("Regionserver not serving any regions"); + } + })); } - //by creating a table, there shouldn't be any region servers not serving any regions + // by creating a table, there shouldn't be any region servers not serving any regions @Test public void testRegionserverWithRegions() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); testingUtility.createTable(tableName, new byte[][] { FAMILY }); runRegionserverCanary(); - verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher () { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("Regionserver not serving any regions"); - } - })); + verify(mockAppender, never()) + .append(argThat(new ArgumentMatcher () { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage() + .contains("Regionserver not serving any regions"); + } + })); } @Test @@ -344,7 +350,7 @@ public class TestCanaryTool { final TableName tableName = TableName.valueOf(name.getMethodName()); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows - for (int i=0; i<1000; i++) { + for (int i = 0; i < 1000; i++) { byte[] iBytes = Bytes.toBytes(i); Put p = new Put(iBytes); p.addColumn(FAMILY, COLUMN, iBytes); @@ -358,23 +364,20 @@ public class TestCanaryTool { new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration()); conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true); assertEquals(0, ToolRunner.run(conf, canary, args)); - verify(sink, atLeastOnce()) - .publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), - isA(ColumnFamilyDescriptor.class), anyLong()); + verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), + isA(ColumnFamilyDescriptor.class), anyLong()); assertEquals("verify no read error count", 0, canary.getReadFailures().size()); } private void runRegionserverCanary() throws Exception { ExecutorService executor = new ScheduledThreadPoolExecutor(1); CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink()); - String[] args = { "-t", "10000", "-regionserver"}; + String[] args = { "-t", "10000", "-regionserver" }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); assertEquals("verify no read error count", 0, canary.getReadFailures().size()); } private void testZookeeperCanaryWithArgs(String[] args) throws Exception { - Integer port = - Iterables.getOnlyElement(testingUtility.getZkCluster().getClientPortList(), null); String hostPort = testingUtility.getZkCluster().getAddress().toString(); testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort); ExecutorService executor = new ScheduledThreadPoolExecutor(2); @@ -382,8 +385,8 @@ public class TestCanaryTool { CanaryTool canary = new CanaryTool(executor, sink); assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - String baseZnode = testingUtility.getConfiguration() - .get(HConstants.ZOOKEEPER_ZNODE_PARENT, HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT); + String baseZnode = testingUtility.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT, + HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT); verify(sink, atLeastOnce()).publishReadTiming(eq(baseZnode), eq(hostPort), anyLong()); } } diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-check-invariants/pom.xml index bd9b3f4139f..e61e1637c55 100644 --- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml +++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml @@ -46,12 +46,10 @@ org.apache.hbase hbase-shaded-mapreduce -${project.version} org.apache.hbase hbase-shaded-client-byo-hadoop -${project.version} @@ -60,8 +58,18 @@ provided - +log4j -log4j +org.apache.logging.log4j +log4j-api +provided ++ +org.apache.logging.log4j +log4j-core +provided ++ diff --git a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml index 090e0682196..18aaf716a8f 100644 --- a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml +++ b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml @@ -1,118 +1,120 @@org.apache.logging.log4j +log4j-slf4j-impl provided - - diff --git a/hbase-shaded/hbase-shaded-client/pom.xml b/hbase-shaded/hbase-shaded-client/pom.xml index 8cfc3f29021..4c6ff0e834a 100644 --- a/hbase-shaded/hbase-shaded-client/pom.xml +++ b/hbase-shaded/hbase-shaded-client/pom.xml @@ -1,6 +1,6 @@4.0.0 -- -hbase-shaded -org.apache.hbase -3.0.0-SNAPSHOT -.. -hbase-shaded-client-byo-hadoop -Apache HBase - Shaded - Client -- -- -- -org.apache.maven.plugins -maven-site-plugin -- -true -- - -maven-assembly-plugin -- -true -- -org.apache.maven.plugins -maven-shade-plugin -- + xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> + +- -org.apache.hbase -hbase-client -4.0.0 ++ +hbase-shaded +org.apache.hbase +3.0.0-SNAPSHOT +.. +hbase-shaded-client-byo-hadoop +Apache HBase - Shaded - Client ++ ++ ++ +org.apache.maven.plugins +maven-site-plugin ++ +true ++ + +maven-assembly-plugin ++ +true ++ +org.apache.maven.plugins +maven-shade-plugin ++ -+ +org.apache.hbase +hbase-client +- - - +- -hadoop-3.0 -- -- !hadoop.profile - -- -org.apache.hadoop -hadoop-auth -provided -- -org.apache.hadoop -hadoop-common -provided -- -org.codehaus.jackson -jackson-jaxrs -1.9.13 -provided -- -- -org.codehaus.jackson -jackson-mapper-asl -- -org.codehaus.jackson -jackson-core-asl -- -org.codehaus.jackson -jackson-xc -1.9.13 -provided -- -- -org.codehaus.jackson -jackson-mapper-asl -- -org.codehaus.jackson -jackson-core-asl -+ + + + +hadoop-3.0 ++ ++ +!hadoop.profile ++ ++ +org.apache.hadoop +hadoop-auth +provided ++ +org.apache.hadoop +hadoop-common +provided ++ +org.codehaus.jackson +jackson-jaxrs +1.9.13 +provided ++ ++ +org.codehaus.jackson +jackson-mapper-asl ++ +org.codehaus.jackson +jackson-core-asl ++ +org.codehaus.jackson +jackson-xc +1.9.13 +provided ++ ++ +org.codehaus.jackson +jackson-mapper-asl ++ +org.codehaus.jackson +jackson-core-asl ++ xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml b/hbase-shaded/hbase-shaded-mapreduce/pom.xml index 4fe7fe8a767..fb48adb5d97 100644 --- a/hbase-shaded/hbase-shaded-mapreduce/pom.xml +++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml @@ -1,6 +1,6 @@4.0.0 -- -hbase-shaded -org.apache.hbase -3.0.0-SNAPSHOT -.. -hbase-shaded-client -Apache HBase - Shaded - Client (with Hadoop bundled) -- -- -- -org.apache.maven.plugins -maven-site-plugin -- -true -- - -maven-assembly-plugin -- -true -- -org.apache.maven.plugins -maven-shade-plugin -- -- -aggregate-into-a-jar-with-relocated-third-parties -- -- -- - -javax.annotation:javax.annotation-api -javax.activation:javax.activation-api -jakarta.ws.rs:jakarta.ws.rs-api -jakarta.annotation:jakarta.annotation-api -jakarta.validation:jakarta.validation-api -org.glassfish.hk2.external:jakarta.inject - - -org.apache.hbase:hbase-resource-bundle -org.slf4j:* -com.google.code.findbugs:* -com.github.stephenc.findbugs:* -com.github.spotbugs:* -org.apache.htrace:* -org.apache.yetus:* -log4j:* -commons-logging:* -org.javassist:* -- - +- -org.apache.hbase -hbase-client -4.0.0 ++ +hbase-shaded +org.apache.hbase +3.0.0-SNAPSHOT +.. +hbase-shaded-client +Apache HBase - Shaded - Client (with Hadoop bundled) ++ ++ ++ +org.apache.maven.plugins +maven-site-plugin ++ +true ++ + +maven-assembly-plugin ++ +true ++ +org.apache.maven.plugins +maven-shade-plugin ++ ++ +aggregate-into-a-jar-with-relocated-third-parties ++ ++ ++ + +javax.annotation:javax.annotation-api +javax.activation:javax.activation-api +jakarta.ws.rs:jakarta.ws.rs-api +jakarta.annotation:jakarta.annotation-api +jakarta.validation:jakarta.validation-api +org.glassfish.hk2.external:jakarta.inject + + +org.apache.hbase:hbase-resource-bundle +org.slf4j:* +com.google.code.findbugs:* +com.github.stephenc.findbugs:* +com.github.spotbugs:* +org.apache.htrace:* +org.apache.yetus:* +org.apache.logging.log4j:* +commons-logging:* +org.javassist:* ++ + +org.apache.hbase +hbase-client ++ xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> - diff --git a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml index 4750c9c9679..e71a067d377 100644 --- a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml +++ b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml @@ -1,68 +1,86 @@4.0.0 -- -hbase-shaded -org.apache.hbase -3.0.0-SNAPSHOT -.. -hbase-shaded-mapreduce -Apache HBase - Shaded - MapReduce -- -- -- -org.apache.maven.plugins -maven-site-plugin -- -true -- - -maven-assembly-plugin -- -true -- -org.apache.maven.plugins -maven-jar-plugin -- -- -- - -org/apache/hadoop/hbase/mapreduce/Driver -- -org.apache.maven.plugins -maven-shade-plugin -- - - -- -org.apache.hbase -hbase-mapreduce -- - -- -javax.xml.bind -jaxb-api -- - -javax.ws.rs -jsr311-api -- -javax.ws.rs -javax.ws.rs-api -- -com.sun.jersey -jersey-server -- -com.sun.jersey -jersey-client -- -com.sun.jersey -jersey-core -- -com.sun.jersey -jersey-json -- - -com.sun.jersey.contribs -jersey-guice -- -javax.servlet -javax.servlet-api -- -org.eclipse.jetty -jetty-http -- -org.eclipse.jetty -jetty-security -- -org.eclipse.jetty -jetty-server -- -org.eclipse.jetty -jetty-servlet -- -org.eclipse.jetty -jetty-util -- -org.eclipse.jetty -jetty-util-ajax -- -org.glassfish -javax.el -- -org.eclipse.jetty -jetty-webapp -- -org.glassfish.jersey.core -jersey-server -- - -org.glassfish.jersey.containers -jersey-container-servlet-core -- -org.glassfish.web -javax.servlet.jsp -- -javax.servlet.jsp -javax.servlet.jsp-api -- + maven-assembly-plugin ++ + +true ++ +org.apache.maven.plugins +maven-jar-plugin ++ ++ ++ + +org/apache/hadoop/hbase/mapreduce/Driver ++ + + +org.apache.maven.plugins +maven-shade-plugin ++ + + ++ +org.apache.hbase +hbase-mapreduce ++ + ++ +javax.xml.bind +jaxb-api ++ + +javax.ws.rs +jsr311-api ++ +javax.ws.rs +javax.ws.rs-api ++ +com.sun.jersey +jersey-server ++ +com.sun.jersey +jersey-client ++ +com.sun.jersey +jersey-core ++ +com.sun.jersey +jersey-json ++ + +com.sun.jersey.contribs +jersey-guice ++ +javax.servlet +javax.servlet-api ++ +org.eclipse.jetty +jetty-http ++ +org.eclipse.jetty +jetty-security ++ +org.eclipse.jetty +jetty-server ++ +org.eclipse.jetty +jetty-servlet ++ +org.eclipse.jetty +jetty-util ++ +org.eclipse.jetty +jetty-util-ajax ++ +org.glassfish +javax.el ++ +org.eclipse.jetty +jetty-webapp ++ +org.glassfish.jersey.core +jersey-server ++ + - -org.glassfish.jersey.containers +jersey-container-servlet-core +- hadoop-3.0 -- -- !hadoop.profile - -${hadoop-three.version} -- - -org.apache.hadoop -hadoop-common -provided -- -org.apache.hadoop -hadoop-hdfs -provided -- -org.apache.hadoop -hadoop-auth -provided -- -org.apache.hadoop -hadoop-mapreduce-client-core -provided -- -- -com.google.guava -guava -- -javax.xml.bind -jaxb-api -- -javax.ws.rs -jsr311-api -+ + +org.glassfish.web +javax.servlet.jsp ++ +javax.servlet.jsp +javax.servlet.jsp-api ++ + + ++ -hadoop-3.0 ++ ++ +!hadoop.profile ++ +${hadoop-three.version} ++ -+ +org.apache.hadoop +hadoop-common +provided ++ +org.apache.hadoop +hadoop-hdfs +provided ++ +org.apache.hadoop +hadoop-auth +provided ++ +org.apache.hadoop +hadoop-mapreduce-client-core +provided ++ ++ +com.google.guava +guava ++ +javax.xml.bind +jaxb-api ++ +javax.ws.rs +jsr311-api ++ -org.codehaus.jackson +jackson-jaxrs +1.9.13 +provided ++ org.codehaus.jackson -jackson-jaxrs -1.9.13 -provided -- -- -org.codehaus.jackson -jackson-mapper-asl -- -org.codehaus.jackson -jackson-core-asl -+ -jackson-mapper-asl + +org.codehaus.jackson -jackson-xc -1.9.13 -provided -- -- -org.codehaus.jackson -jackson-mapper-asl -- -org.codehaus.jackson -jackson-core-asl -jackson-core-asl + + + ++ + + +org.codehaus.jackson +jackson-xc +1.9.13 +provided ++ ++ +org.codehaus.jackson +jackson-mapper-asl ++ +org.codehaus.jackson +jackson-core-asl +- - diff --git a/hbase-shaded/hbase-shaded-testing-util/pom.xml b/hbase-shaded/hbase-shaded-testing-util/pom.xml index dbdec084250..a83b5150005 100644 --- a/hbase-shaded/hbase-shaded-testing-util/pom.xml +++ b/hbase-shaded/hbase-shaded-testing-util/pom.xml @@ -1,234 +1,203 @@4.0.0 + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + +4.0.0 -- +org.apache.hbase -hbase-build-configuration -3.0.0-SNAPSHOT -../../hbase-build-configuration -+ -org.apache.hbase +hbase-build-configuration +3.0.0-SNAPSHOT +../../hbase-build-configuration +hbase-shaded-testing-util-tester -Apache HBase - Shaded - Testing Util Tester -Ensures that hbase-shaded-testing-util works with hbase-shaded-client. +hbase-shaded-testing-util-tester +Apache HBase - Shaded - Testing Util Tester +Ensures that hbase-shaded-testing-util works with hbase-shaded-client. -- +- - -junit -junit -test -- - -org.slf4j -slf4j-log4j12 -test -- -org.apache.hbase -hbase-shaded-client -${project.version} -- -org.apache.hbase -hbase-shaded-testing-util -${project.version} -test -- -org.codehaus.jackson -jackson-mapper-asl -1.9.13 -test -+ + +junit +junit +test ++ +org.apache.hbase +hbase-logging +test-jar +test ++ +org.apache.logging.log4j +log4j-api +test ++ +org.apache.logging.log4j +log4j-core +test ++ +org.apache.logging.log4j +log4j-slf4j-impl +test ++ +org.apache.logging.log4j +log4j-1.2-api +test ++ +org.apache.hbase +hbase-shaded-client ++ +org.apache.hbase +hbase-shaded-testing-util +${project.version} +test ++ +org.codehaus.jackson +jackson-mapper-asl +1.9.13 +test +- - diff --git a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml index 2d05ee2a037..1ebdf5929b3 100644 --- a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml +++ b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml @@ -40,7 +40,6 @@4.0.0 + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + +4.0.0 -- +hbase-shaded -org.apache.hbase -3.0.0-SNAPSHOT -.. -+ -hbase-shaded +org.apache.hbase +3.0.0-SNAPSHOT +.. +hbase-shaded-testing-util -Apache HBase - Shaded - Testing Util - -- - - -- -org.apache.hadoop -hadoop-common -${hadoop.version} -test-jar -compile -- -- -javax.servlet.jsp -jsp-api -- -org.codehaus.jackson -jackson-mapper-asl -- -org.codehaus.jackson -jackson-core-asl -- -org.codehaus.jackson -jackson-jaxrs -- -org.codehaus.jackson -jackson-xc -- -javax.xml.bind -jaxb-api -- -javax.ws.rs -jsr311-api -- -org.apache.hadoop -hadoop-hdfs -test-jar -compile -- -org.apache.hadoop -hadoop-mapreduce-client-app -${hadoop.version} -test-jar -compile -- -- -org.codehaus.jackson -jackson-mapper-asl -- -org.codehaus.jackson -jackson-core-asl -- -org.codehaus.jackson -jackson-jaxrs -- -org.codehaus.jackson -jackson-xc -- -javax.xml.bind -jaxb-api -- -javax.ws.rs -jsr311-api -- -org.apache.hadoop -hadoop-mapreduce-client-jobclient -${hadoop.version} -test-jar -compile -- -org.apache.hbase -hbase-common -test-jar -compile -- -org.apache.hbase -hbase-server -test-jar -compile -- -- -javax.xml.bind -jaxb-api -- -org.apache.hbase -hbase-asyncfs -test-jar -compile -- -org.apache.hbase -hbase-zookeeper -test-jar -compile -- -org.apache.hbase -hbase-hadoop-compat -test-jar -compile -- -org.codehaus.jackson -jackson-jaxrs -1.9.13 -compile -- -org.apache.hbase -hbase-testing-util -${project.version} -compile -- -- -javax.xml.bind -jaxb-api -- +- -- -org.apache.maven.plugins -maven-site-plugin -- -true -- - -maven-assembly-plugin -- -true -- -org.apache.maven.plugins -maven-shade-plugin -- -- -aggregate-into-a-jar-with-relocated-third-parties -- -- -- - -javax.annotation:javax.annotation-api -javax.activation:javax.activation-api -jakarta.ws.rs:jakarta.ws.rs-api -jakarta.annotation:jakarta.annotation-api -jakarta.validation:jakarta.validation-api -org.glassfish.hk2.external:jakarta.inject - - -org.apache.hbase:hbase-resource-bundle -org.slf4j:* -com.google.code.findbugs:* -com.github.stephenc.findbugs:* -com.github.spotbugs:* -org.apache.htrace:* -org.apache.yetus:* -log4j:* -commons-logging:* -org.javassist:* -hbase-shaded-testing-util +Apache HBase - Shaded - Testing Util ++ + ++ +org.apache.hadoop +hadoop-common +${hadoop.version} +test-jar +compile ++ ++ +javax.servlet.jsp +jsp-api ++ +org.codehaus.jackson +jackson-mapper-asl ++ +org.codehaus.jackson +jackson-core-asl ++ +org.codehaus.jackson +jackson-jaxrs ++ +org.codehaus.jackson +jackson-xc ++ +javax.xml.bind +jaxb-api ++ +javax.ws.rs +jsr311-api ++ +org.apache.hadoop +hadoop-hdfs +test-jar +compile ++ +org.apache.hadoop +hadoop-mapreduce-client-app +test-jar +compile ++ +org.apache.hadoop +hadoop-mapreduce-client-jobclient +test-jar +compile ++ +org.apache.hbase +hbase-common +test-jar +compile ++ +org.apache.hbase +hbase-server +test-jar +compile ++ ++ +javax.xml.bind +jaxb-api ++ +org.apache.hbase +hbase-asyncfs +test-jar +compile ++ +org.apache.hbase +hbase-zookeeper +test-jar +compile ++ +org.apache.hbase +hbase-hadoop-compat +test-jar +compile ++ +org.codehaus.jackson +jackson-jaxrs +1.9.13 +compile ++ +org.apache.hbase +hbase-testing-util +compile ++ ++ +javax.xml.bind +jaxb-api ++ + ++ +org.apache.maven.plugins +maven-site-plugin ++ +true ++ + +maven-assembly-plugin ++ +true ++ +org.apache.maven.plugins +maven-shade-plugin ++ ++ +aggregate-into-a-jar-with-relocated-third-parties ++ ++ ++ + +javax.annotation:javax.annotation-api +javax.activation:javax.activation-api +jakarta.ws.rs:jakarta.ws.rs-api +jakarta.annotation:jakarta.annotation-api +jakarta.validation:jakarta.validation-api +org.glassfish.hk2.external:jakarta.inject + + +org.apache.hbase:hbase-resource-bundle +org.slf4j:* +com.google.code.findbugs:* +com.github.stephenc.findbugs:* +com.github.spotbugs:* +org.apache.htrace:* +org.apache.yetus:* +org.apache.logging.log4j:* +commons-logging:* +org.javassist:* +org.apache.hbase hbase-shaded-client -${project.version} @@ -49,8 +48,18 @@ provided - +log4j -log4j +org.apache.logging.log4j +log4j-api +provided ++ +org.apache.logging.log4j +log4j-core +provided ++ diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml index 88832ba92a3..7c78d189224 100644 --- a/hbase-shaded/pom.xml +++ b/hbase-shaded/pom.xml @@ -1,5 +1,7 @@ -org.apache.logging.log4j +log4j-slf4j-impl provided + - diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml index 63db97719c6..6695e76e961 100644 --- a/hbase-shell/pom.xml +++ b/hbase-shell/pom.xml @@ -142,13 +142,23 @@4.0.0 -- -hbase-build-configuration -org.apache.hbase -3.0.0-SNAPSHOT -../hbase-build-configuration -hbase-shaded -Apache HBase - Shaded -Module of HBase with most deps shaded. -pom -+ +4.0.0 ++ +hbase-build-configuration +org.apache.hbase +3.0.0-SNAPSHOT +../hbase-build-configuration +hbase-shaded +Apache HBase - Shaded +Module of HBase with most deps shaded. +pom +- -true +true -true -true -org.apache.hadoop.hbase.shaded -- -hbase-shaded-client-byo-hadoop -hbase-shaded-client -hbase-shaded-mapreduce -hbase-shaded-testing-util -hbase-shaded-testing-util-tester -hbase-shaded-check-invariants -hbase-shaded-with-hadoop-check-invariants -- -- - -org.apache.hbase -hbase-resource-bundle -true -- -log4j -log4j -true -- -org.slf4j -slf4j-log4j12 -true -- - -- - - -maven-assembly-plugin -- -true -- -org.apache.maven.plugins -maven-remote-resources-plugin -- -- -aggregate-licenses -- -process -- -- -${build.year} -${license.debug.print.included} -${license.bundles.dependencies} -${license.bundles.jquery} -${license.bundles.logo} -${license.bundles.bootstrap} -- -${project.groupId}:hbase-resource-bundle:${project.version} -- -${project.groupId}:hbase-resource-bundle:${project.version} -- -supplemental-models.xml -- - - - -maven-assembly-plugin -- -true -- org.apache.maven.plugins -maven-shade-plugin -3.2.4 -- - aggregate-into-a-jar-with-relocated-third-parties -package -- -shade -- false -false -true -false -- -- - -javax.annotation:javax.annotation-api -javax.activation:javax.activation-api -jakarta.ws.rs:jakarta.ws.rs-api -jakarta.annotation:jakarta.annotation-api -jakarta.validation:jakarta.validation-api -org.glassfish.hk2.external:jakarta.inject - -org.apache.hadoop:* - -org.apache.hbase:hbase-resource-bundle -org.slf4j:* -com.google.code.findbugs:* -com.github.stephenc.findbugs:* -com.github.spotbugs:* -org.apache.htrace:* -org.apache.yetus:* -log4j:* -commons-logging:* -org.javassist:* -- - - -com.cedarsoftware -${shaded.prefix}.com.cedarsoftware -- -com.codahale -${shaded.prefix}.com.codahale -- -com.ctc -${shaded.prefix}.com.ctc -- -com.dropwizard -${shaded.prefix}.com.dropwizard -- -com.fasterxml -${shaded.prefix}.com.fasterxml -- -com.github.benmanes.caffeine -${shaded.prefix}.com.github.benmanes.caffeine -- -com.google -${shaded.prefix}.com.google -- -com.jamesmurty -${shaded.prefix}.com.jamesmurty -- -com.jcraft -${shaded.prefix}.com.jcraft -- -com.lmax -${shaded.prefix}.com.lmax -- -com.microsoft -${shaded.prefix}.com.microsoft -- -com.nimbusds -${shaded.prefix}.com.nimbusds -- -com.squareup -${shaded.prefix}.com.squareup -- -com.thoughtworks -${shaded.prefix}.com.thoughtworks -- +com.zaxxer -${shaded.prefix}.com.zaxxer -true +true +org.apache.hadoop.hbase.shaded ++ +hbase-shaded-client-byo-hadoop +hbase-shaded-client +hbase-shaded-mapreduce +hbase-shaded-testing-util +hbase-shaded-testing-util-tester +hbase-shaded-check-invariants +hbase-shaded-with-hadoop-check-invariants ++ ++ + +org.apache.hbase +hbase-resource-bundle +true ++ +org.apache.logging.log4j +log4j-api +true ++ +org.apache.logging.log4j +log4j-core +true ++ +org.apache.logging.log4j +log4j-slf4j-impl +true ++ ++ ++ + + +maven-assembly-plugin ++ +true ++ +org.apache.maven.plugins +maven-remote-resources-plugin ++ ++ +aggregate-licenses ++ +process ++ ++ +${build.year} +${license.debug.print.included} +${license.bundles.dependencies} +${license.bundles.jquery} +${license.bundles.logo} +${license.bundles.bootstrap} ++ +${project.groupId}:hbase-resource-bundle:${project.version} ++ +${project.groupId}:hbase-resource-bundle:${project.version} ++ +supplemental-models.xml ++ -+ -+ + +maven-assembly-plugin ++ +true ++ -org.apache.maven.plugins +maven-shade-plugin +3.2.4 ++ -+ -aggregate-into-a-jar-with-relocated-third-parties +package ++ +shade ++ -false +false +true +false ++ ++ + +javax.annotation:javax.annotation-api +javax.activation:javax.activation-api +jakarta.ws.rs:jakarta.ws.rs-api +jakarta.annotation:jakarta.annotation-api +jakarta.validation:jakarta.validation-api +org.glassfish.hk2.external:jakarta.inject + +org.apache.hadoop:* + +org.apache.hbase:hbase-resource-bundle +org.slf4j:* +com.google.code.findbugs:* +com.github.stephenc.findbugs:* +com.github.spotbugs:* +org.apache.htrace:* +org.apache.yetus:* +org.apache.logging.log4j:* +commons-logging:* +org.javassist:* ++ + -+ +com.cedarsoftware +${shaded.prefix}.com.cedarsoftware ++ +com.codahale +${shaded.prefix}.com.codahale ++ +com.ctc +${shaded.prefix}.com.ctc ++ +com.dropwizard +${shaded.prefix}.com.dropwizard ++ +com.fasterxml +${shaded.prefix}.com.fasterxml ++ +com.github.benmanes.caffeine +${shaded.prefix}.com.github.benmanes.caffeine ++ +com.google +${shaded.prefix}.com.google ++ +com.jamesmurty +${shaded.prefix}.com.jamesmurty ++ +com.jcraft +${shaded.prefix}.com.jcraft ++ +com.lmax +${shaded.prefix}.com.lmax ++ +com.microsoft +${shaded.prefix}.com.microsoft ++ +com.nimbusds +${shaded.prefix}.com.nimbusds ++ +com.squareup +${shaded.prefix}.com.squareup ++ +com.thoughtworks +${shaded.prefix}.com.thoughtworks ++ + +com.zaxxer +${shaded.prefix}.com.zaxxer ++ + +org.xbill +${shaded.prefix}.org.xbill ++ +org.jboss.netty +${shaded.prefix}.org.jboss.netty ++ + +io.netty +${shaded.prefix}.io.netty ++ + +okio +${shaded.prefix}.okio ++ +org.checkerframework +${shaded.prefix}.org.checkerframework ++ +org.codehaus +${shaded.prefix}.org.codehaus ++ +org.eclipse +${shaded.prefix}.org.eclipse ++ +org.ehcache +${shaded.prefix}.org.ehcache ++ +org.jcodings +${shaded.prefix}.org.jcodings ++ +org.joni +${shaded.prefix}.org.joni ++ +org.mortbay +${shaded.prefix}.org.mortbay ++ +org.nustaq +${shaded.prefix}.org.nustaq ++ +org.terracotta +${shaded.prefix}.org.terracotta ++ +org.tukaani +${shaded.prefix}.org.tukaani ++ +org.xerial +${shaded.prefix}.org.xerial ++ +org.znerd +${shaded.prefix}.org.znerd ++ +org.aopalliance +${shaded.prefix}.org.aopalliance ++ +org.fusesource +${shaded.prefix}.org.fusesource ++ +org.iq80 +${shaded.prefix}.org.iq80 ++ +org.jamon +${shaded.prefix}.org.jamon ++ + +org.jets3t +${shaded.prefix}.org.jets3t ++ +contribs.mx +${shaded.prefix}.contribs.mx ++ + +org.objectweb +${shaded.prefix}.org.objectweb ++ +org.apache.avro +${shaded.prefix}.org.apache.avro ++ +org.apache.curator +${shaded.prefix}.org.apache.curator ++ +org.apache.directory +${shaded.prefix}.org.apache.directory ++ +org.apache.http +${shaded.prefix}.org.apache.http ++ +org.apache.jasper +${shaded.prefix}.org.apache.jasper ++ +org.apache.jute +${shaded.prefix}.org.apache.jute ++ +org.apache.kerby +${shaded.prefix}.org.apache.kerby ++ +org.apache.taglibs +${shaded.prefix}.org.apache.taglibs ++ + +org.apache.zookeeper +${shaded.prefix}.org.apache.zookeeper ++ +org.apache.commons.validator +${shaded.prefix}.org.apache.commons.validator ++ +org.apache.commons.beanutils +${shaded.prefix}.org.apache.commons.beanutils ++ +org.apache.commons.cli +${shaded.prefix}.org.apache.commons.cli ++ +org.apache.commons.collections +${shaded.prefix}.org.apache.commons.collections ++ +org.apache.commons.configuration +${shaded.prefix}.org.apache.commons.configuration ++ +org.apache.commons.crypto +${shaded.prefix}.org.apache.commons.crypto ++ +org.apache.commons.csv +${shaded.prefix}.org.apache.commons.csv ++ +org.apache.commons.daemon +${shaded.prefix}.org.apache.commons.daemon ++ +org.apache.commons.io +${shaded.prefix}.org.apache.commons.io ++ +org.apache.commons.math +${shaded.prefix}.org.apache.commons.math ++ +org.apache.commons.math3 +${shaded.prefix}.org.apache.commons.math3 ++ +org.apache.commons.net +${shaded.prefix}.org.apache.commons.net ++ +org.apache.commons.lang +${shaded.prefix}.org.apache.commons.lang ++ +org.apache.commons.lang3 +${shaded.prefix}.org.apache.commons.lang3 ++ +org.apache.commons.el +${shaded.prefix}.org.apache.commons.el ++ +org.apache.commons.httpclient +${shaded.prefix}.org.apache.commons.httpclient ++ +org.apache.commons.compress +${shaded.prefix}.org.apache.commons.compress ++ +org.apache.commons.digester +${shaded.prefix}.org.apache.commons.digester ++ +org.apache.commons.codec +${shaded.prefix}.org.apache.commons.codec ++ + +org.apache.commons.text +${shaded.prefix}.org.apache.commons.text ++ - -net/ +${shaded.prefix}.net. +- - - -org.xbill -${shaded.prefix}.org.xbill -- -org.jboss.netty -${shaded.prefix}.org.jboss.netty -- - - -io.netty -${shaded.prefix}.io.netty -- - - -okio -${shaded.prefix}.okio -- -org.checkerframework -${shaded.prefix}.org.checkerframework -- -org.codehaus -${shaded.prefix}.org.codehaus -- -org.eclipse -${shaded.prefix}.org.eclipse -- -org.ehcache -${shaded.prefix}.org.ehcache -- -org.jcodings -${shaded.prefix}.org.jcodings -- -org.joni -${shaded.prefix}.org.joni -- -org.mortbay -${shaded.prefix}.org.mortbay -- -org.nustaq -${shaded.prefix}.org.nustaq -- -org.terracotta -${shaded.prefix}.org.terracotta -- -org.tukaani -${shaded.prefix}.org.tukaani -- -org.xerial -${shaded.prefix}.org.xerial -- -org.znerd -${shaded.prefix}.org.znerd -- -org.aopalliance -${shaded.prefix}.org.aopalliance -- -org.fusesource -${shaded.prefix}.org.fusesource -- -org.iq80 -${shaded.prefix}.org.iq80 -- -org.jamon -${shaded.prefix}.org.jamon -- - -org.jets3t -${shaded.prefix}.org.jets3t -- -contribs.mx -${shaded.prefix}.contribs.mx -- - - - -org.objectweb -${shaded.prefix}.org.objectweb -- -org.apache.avro -${shaded.prefix}.org.apache.avro -- -org.apache.curator -${shaded.prefix}.org.apache.curator -- -org.apache.directory -${shaded.prefix}.org.apache.directory -- -org.apache.http -${shaded.prefix}.org.apache.http -- -org.apache.jasper -${shaded.prefix}.org.apache.jasper -- -org.apache.jute -${shaded.prefix}.org.apache.jute -- -org.apache.kerby -${shaded.prefix}.org.apache.kerby -- -org.apache.taglibs -${shaded.prefix}.org.apache.taglibs -- - - -org.apache.zookeeper -${shaded.prefix}.org.apache.zookeeper -- -org.apache.commons.validator -${shaded.prefix}.org.apache.commons.validator -- -org.apache.commons.beanutils -${shaded.prefix}.org.apache.commons.beanutils -- -org.apache.commons.cli -${shaded.prefix}.org.apache.commons.cli -- -org.apache.commons.collections -${shaded.prefix}.org.apache.commons.collections -- -org.apache.commons.configuration -${shaded.prefix}.org.apache.commons.configuration -- -org.apache.commons.crypto -${shaded.prefix}.org.apache.commons.crypto -- -org.apache.commons.csv -${shaded.prefix}.org.apache.commons.csv -- -org.apache.commons.daemon -${shaded.prefix}.org.apache.commons.daemon -- -org.apache.commons.io -${shaded.prefix}.org.apache.commons.io -- -org.apache.commons.math -${shaded.prefix}.org.apache.commons.math -- -org.apache.commons.math3 -${shaded.prefix}.org.apache.commons.math3 -- -org.apache.commons.net -${shaded.prefix}.org.apache.commons.net -- -org.apache.commons.lang -${shaded.prefix}.org.apache.commons.lang -- -org.apache.commons.lang3 -${shaded.prefix}.org.apache.commons.lang3 -- -org.apache.commons.el -${shaded.prefix}.org.apache.commons.el -- -org.apache.commons.httpclient -${shaded.prefix}.org.apache.commons.httpclient -- -org.apache.commons.compress -${shaded.prefix}.org.apache.commons.compress -- -org.apache.commons.digester -${shaded.prefix}.org.apache.commons.digester -- -org.apache.commons.codec -${shaded.prefix}.org.apache.commons.codec -- - - -org.apache.commons.text -${shaded.prefix}.org.apache.commons.text -- - -net/ -${shaded.prefix}.net. -- - +- + +- +LICENSE.txt -ASL2.0 + ++ + -+ - -+ -LICENSE.txt +ASL2.0 -overview.html -- -false -${project.name} -- -- - - -dnsjava:dnsjava -- -dig* -jnamed* -lookup* -update* -+ overview.html ++ +false +${project.name} ++ ++ + -+ +dnsjava:dnsjava ++ +dig* +jnamed* +lookup* +update* +- -org.eclipse.jetty.orbit:javax.servlet.jsp.jstl -- -META-INF/ECLIPSEF.SF -META-INF/ECLIPSEF.RSA -- - -commons-beanutils:commons-beanutils-core -- -org/apache/commons/collections/*.class -- - -org.apache.hadoop:hadoop-yarn-common -- -webapps/* -webapps/**/* -- -*:* -- - -*.proto -**/*.proto - -LICENSE -NOTICE -- - -org.apache.commons:commons-math3 -- -assets/org/apache/commons/math3/**/* -- - - -org.apache.hadoop:* -- -mapred-default.xml.orig -- -org.eclipse.jetty:* -- -about.html -jetty-dir.css -- -org.apache.kerby:* -- -krb5-template.conf -krb5_udp-template.conf -ccache.txt -keytab.txt -org.eclipse.jetty.orbit:javax.servlet.jsp.jstl ++ + +META-INF/ECLIPSEF.SF +META-INF/ECLIPSEF.RSA ++ + +commons-beanutils:commons-beanutils-core ++ +org/apache/commons/collections/*.class ++ + +org.apache.hadoop:hadoop-yarn-common ++ +webapps/* +webapps/**/* ++ +*:* ++ + +*.proto +**/*.proto + +LICENSE +NOTICE ++ + +org.apache.commons:commons-math3 ++ +assets/org/apache/commons/math3/**/* ++ + + +org.apache.hadoop:* ++ +mapred-default.xml.orig ++ +org.eclipse.jetty:* ++ +about.html +jetty-dir.css ++ + + + + + + + +org.apache.kerby:* ++ +krb5-template.conf +krb5_udp-template.conf +ccache.txt +keytab.txt +test - org.slf4j -slf4j-log4j12 +org.apache.logging.log4j +log4j-api test - +log4j -log4j +org.apache.logging.log4j +log4j-core +test ++ +org.apache.logging.log4j +log4j-slf4j-impl +test ++ diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml index 6be2bfb6c39..2ec556023f6 100644 --- a/hbase-testing-util/pom.xml +++ b/hbase-testing-util/pom.xml @@ -1,5 +1,7 @@ -org.apache.logging.log4j +log4j-1.2-api test + - diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml index 6f8d7babd6a..fca2aa6217c 100644 --- a/hbase-thrift/pom.xml +++ b/hbase-thrift/pom.xml @@ -253,13 +253,23 @@4.0.0 -- -hbase-build-configuration -org.apache.hbase -3.0.0-SNAPSHOT -../hbase-build-configuration -hbase-testing-util -Apache HBase - Testing Util -HBase Testing Utilities. -+ -4.0.0 ++ +hbase-build-configuration +org.apache.hbase +3.0.0-SNAPSHOT +../hbase-build-configuration +hbase-testing-util +Apache HBase - Testing Util +HBase Testing Utilities. +- +- -org.apache.hbase -hbase-logging -test-jar -test -- -org.apache.hbase -hbase-common -jar -compile -- -org.apache.hbase -hbase-common -test-jar -compile -- -org.apache.hbase -hbase-annotations -test-jar -compile -- -- -jdk.tools -jdk.tools -- -org.apache.hbase -hbase-client -jar -compile -- -org.apache.hbase -hbase-zookeeper -jar -compile -- -org.apache.hbase -hbase-zookeeper -test-jar -compile -- -org.apache.hbase -hbase-server -jar -compile -- -org.apache.hbase -hbase-server -test-jar -compile -- -org.apache.hbase -hbase-asyncfs -test-jar -compile -- -org.apache.hbase -hbase-hadoop-compat -jar -compile -- -org.apache.hbase -hbase-hadoop-compat -test-jar -compile -- -org.slf4j -jcl-over-slf4j -test -- -org.slf4j -jul-to-slf4j -test -- -org.slf4j -slf4j-log4j12 -test -- -log4j -log4j -test -+ +org.apache.hbase +hbase-logging +test-jar +test ++ +org.apache.hbase +hbase-common +jar +compile ++ +org.apache.hbase +hbase-common +test-jar +compile ++ +org.apache.hbase +hbase-annotations +test-jar +compile ++ ++ +jdk.tools +jdk.tools ++ +org.apache.hbase +hbase-client +jar +compile ++ +org.apache.hbase +hbase-zookeeper +jar +compile ++ +org.apache.hbase +hbase-zookeeper +test-jar +compile ++ +org.apache.hbase +hbase-server +jar +compile ++ +org.apache.hbase +hbase-server +test-jar +compile ++ +org.apache.hbase +hbase-asyncfs +test-jar +compile ++ +org.apache.hbase +hbase-hadoop-compat +jar +compile ++ +org.apache.hbase +hbase-hadoop-compat +test-jar +compile ++ +org.slf4j +jcl-over-slf4j +test ++ +org.slf4j +jul-to-slf4j +test ++ +org.apache.logging.log4j +log4j-api +test ++ +org.apache.logging.log4j +log4j-core +test ++ +org.apache.logging.log4j +log4j-slf4j-impl +test ++ +org.apache.logging.log4j +log4j-1.2-api +test +- - - - +- -hadoop-3.0 -- -- !hadoop.profile - -- -org.apache.hadoop -hadoop-common -- -- -javax.xml.bind -jaxb-api -- -javax.ws.rs -jsr311-api -- -org.apache.hadoop -hadoop-minicluster -compile -- -- -com.google.guava -guava -- -javax.ws.rs -jsr311-api -- -org.apache.hadoop -hadoop-minikdc -+ + + + + +hadoop-3.0 ++ ++ +!hadoop.profile ++ ++ +org.apache.hadoop +hadoop-common ++ ++ +javax.xml.bind +jaxb-api ++ +javax.ws.rs +jsr311-api ++ +org.apache.hadoop +hadoop-minicluster +compile ++ ++ +com.google.guava +guava ++ +javax.ws.rs +jsr311-api ++ +org.apache.hadoop +hadoop-minikdc +test - org.slf4j -slf4j-log4j12 +org.apache.logging.log4j +log4j-api test - +log4j -log4j +org.apache.logging.log4j +log4j-core +test ++ +org.apache.logging.log4j +log4j-slf4j-impl +test ++ diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml index 966ef5b1432..6270d0ebd9a 100644 --- a/hbase-zookeeper/pom.xml +++ b/hbase-zookeeper/pom.xml @@ -166,13 +166,18 @@org.apache.logging.log4j +log4j-1.2-api test test - org.slf4j -slf4j-log4j12 +org.apache.logging.log4j +log4j-api test - +log4j -log4j +org.apache.logging.log4j +log4j-core +test ++ diff --git a/pom.xml b/pom.xml index a66ed58c81a..0f5c6fb8ef2 100755 --- a/pom.xml +++ b/pom.xml @@ -865,7 +865,7 @@org.apache.logging.log4j +log4j-slf4j-impl test @@ -1179,6 +1179,42 @@ + - log4j.properties +log4j2.xml + +banned-log4j ++ +enforce ++ ++ ++ ++ +log4j:log4j ++ We do not allow log4j dependencies as now we use log4j2 + ++ banned-slf4j-log4j12 ++ +enforce ++ ++ ++ ++ +org.slf4j:slf4j-log4j12 ++ We do not allow slf4j-log4j12 dependency as now we use log4j-slf4j-impl + +banned-jetty @@ -1267,16 +1303,18 @@ Use SLF4j for logging org.apache.commons.logging.** +org.apache.log4j.** +org.apache.logging.log4j.** false 512 - Do not use log4j directly in code, see Log4jUtils in hbase-logging for more details. + Do not use log4j2 directly in code, see Log4jUtils in hbase-logging for more details. - org.apache.log4j.** +org.apache.logging.log4j.** @@ -1653,7 +1691,7 @@ 4.13 1.3 4.2.0-incubating -1.2.17 +2.14.1 2.28.2 0.6.1 thrift @@ -2050,8 +2088,8 @@- org.slf4j slf4j-api ${slf4j.version} - org.slf4j -slf4j-log4j12 -${slf4j.version} -org.slf4j jcl-over-slf4j @@ -2084,9 +2118,24 @@${slf4j.version} - +log4j -log4j -${log4j.version} +org.apache.logging.log4j +log4j-api +${log4j2.version} ++ +org.apache.logging.log4j +log4j-core +${log4j2.version} ++ +org.apache.logging.log4j +log4j-slf4j-impl +${log4j2.version} ++ org.apache.logging.log4j +log4j-1.2-api +${log4j2.version} @@ -2094,8 +2143,6 @@ -avro ${avro.version} + com.github.ben-manes.caffeine caffeine @@ -2771,6 +2818,46 @@+ org.apache.hadoop +hadoop-mapreduce-client-app +${hadoop-three.version} +test-jar ++ ++ +org.codehaus.jackson +jackson-mapper-asl ++ +org.codehaus.jackson +jackson-core-asl ++ +org.codehaus.jackson +jackson-jaxrs ++ +org.codehaus.jackson +jackson-xc ++ +javax.xml.bind +jaxb-api ++ +javax.ws.rs +jsr311-api ++ +org.slf4j +slf4j-log4j12 ++ +log4j +log4j +org.apache.hadoop hadoop-mapreduce-client-jobclient @@ -2792,10 +2879,6 @@org.slf4j slf4j-log4j12 -- log4j -log4j -@@ -2821,10 +2904,6 @@ org.slf4j slf4j-log4j12 -- log4j -log4j -@@ -3051,6 +3130,14 @@ com.google.code.findbugs jsr305 ++ +org.slf4j +slf4j-log4j12 ++ log4j +log4j +@@ -3126,10 +3213,6 @@ org.slf4j slf4j-log4j12 -- log4j -log4j -